From 4b16efe2c48b1b59b2e557ed3d679fff8dd382ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 11 Nov 2024 16:03:53 +0000 Subject: [PATCH 001/163] Started object-orientation based RestAPI querying --- poetry.lock | 31 ++++- pyproject.toml | 2 + simvue/api/__init__.py | 0 simvue/api/objects/__init__.py | 8 ++ simvue/api/objects/alert/__init__.py | 5 + simvue/api/objects/alert/base.py | 93 ++++++++++++++ simvue/api/objects/alert/events.py | 63 +++++++++ simvue/api/objects/alert/fetch.py | 16 +++ simvue/api/objects/alert/metrics.py | 158 +++++++++++++++++++++++ simvue/api/objects/alert/user.py | 0 simvue/api/objects/base.py | 185 +++++++++++++++++++++++++++ simvue/api/objects/folder.py | 128 ++++++++++++++++++ simvue/api/objects/tag.py | 61 +++++++++ simvue/{api.py => api/request.py} | 67 +++++++++- simvue/client.py | 73 +++-------- simvue/config/parameters.py | 2 +- simvue/factory/proxy/remote.py | 2 +- simvue/run.py | 2 +- tests/unit/test_alert.py | 18 +++ tests/unit/test_folder.py | 37 ++++++ tests/unit/test_tag.py | 26 ++++ 21 files changed, 916 insertions(+), 61 deletions(-) create mode 100644 simvue/api/__init__.py create mode 100644 simvue/api/objects/__init__.py create mode 100644 simvue/api/objects/alert/__init__.py create mode 100644 simvue/api/objects/alert/base.py create mode 100644 simvue/api/objects/alert/events.py create mode 100644 simvue/api/objects/alert/fetch.py create mode 100644 simvue/api/objects/alert/metrics.py create mode 100644 simvue/api/objects/alert/user.py create mode 100644 simvue/api/objects/base.py create mode 100644 simvue/api/objects/folder.py create mode 100644 simvue/api/objects/tag.py rename simvue/{api.py => api/request.py} (74%) create mode 100644 tests/unit/test_alert.py create mode 100644 tests/unit/test_folder.py create mode 100644 tests/unit/test_tag.py diff --git a/poetry.lock b/poetry.lock index 8c4bb089..6509e53f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -52,6 +52,17 @@ types-python-dateutil = ">=2.8.10" doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] +[[package]] +name = "boltons" +version = "24.1.0" +description = "When they're not builtins, they're boltons." +optional = false +python-versions = ">=3.7" +files = [ + {file = "boltons-24.1.0-py3-none-any.whl", hash = "sha256:a1776d47fdc387fb730fba1fe245f405ee184ee0be2fb447dd289773a84aed3b"}, + {file = "boltons-24.1.0.tar.gz", hash = "sha256:4a49b7d57ee055b83a458c8682a2a6f199d263a8aa517098bda9bab813554b87"}, +] + [[package]] name = "certifi" version = "2024.8.30" @@ -1335,9 +1346,9 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, {version = ">=1.22.4", markers = "python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -1585,8 +1596,8 @@ files = [ annotated-types = ">=0.6.0" pydantic-core = "2.23.4" typing-extensions = [ - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, {version = ">=4.6.1", markers = "python_version < \"3.13\""}, + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, ] [package.extras] @@ -2250,6 +2261,20 @@ files = [ {file = "types_python_dateutil-2.9.0.20241003-py3-none-any.whl", hash = "sha256:250e1d8e80e7bbc3a6c99b907762711d1a1cdd00e978ad39cb5940f6f0a87f3d"}, ] +[[package]] +name = "types-requests" +version = "2.32.0.20241016" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, + {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, +] + +[package.dependencies] +urllib3 = ">=2" + [[package]] name = "typing-extensions" version = "4.12.2" @@ -2320,4 +2345,4 @@ plot = ["matplotlib", "plotly"] [metadata] lock-version = "2.0" python-versions = "^3.10,<3.14" -content-hash = "6d01a9167c759a202e23065afda61f54231d0d8e9b7150fcdab18c070ba4eb72" +content-hash = "d899f0560742d1f8083a1f8ce5c9e9b906354308dc0d27c9edee9b886f7e3145" diff --git a/pyproject.toml b/pyproject.toml index ddfda1a8..a18750a9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,6 +55,7 @@ tabulate = "^0.9.0" randomname = "^0.2.1" codecarbon = "^2.7.1" numpy = "^2.1.2" +boltons = "^24.1.0" [tool.poetry.extras] plot = ["matplotlib", "plotly"] @@ -71,6 +72,7 @@ pytest-mock = "^3.14.0" pytest-sugar = "^1.0.0" pytest-xdist = "^3.6.1" jinja2 = "^3.1.4" +types-requests = "^2.32.0.20241016" [build-system] requires = ["poetry-core"] diff --git a/simvue/api/__init__.py b/simvue/api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/simvue/api/objects/__init__.py b/simvue/api/objects/__init__.py new file mode 100644 index 00000000..2085eda1 --- /dev/null +++ b/simvue/api/objects/__init__.py @@ -0,0 +1,8 @@ +from .alert import ( + SimvueAlert as SimvueAlert, + EventsAlert as EventsAlert, + MetricsThresholdAlert as MetricsThresholdAlert, + MetricsRangeAlert as MetricsRangeAlert, +) +from .tag import Tag as Tag +from .folder import Folder as Folder diff --git a/simvue/api/objects/alert/__init__.py b/simvue/api/objects/alert/__init__.py new file mode 100644 index 00000000..032694c8 --- /dev/null +++ b/simvue/api/objects/alert/__init__.py @@ -0,0 +1,5 @@ +from .fetch import SimvueAlert +from .metrics import MetricsThresholdAlert, MetricsRangeAlert +from .events import EventsAlert + +__all__ = ["SimvueAlert", "MetricsRangeAlert", "MetricsThresholdAlert", "EventsAlert"] diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py new file mode 100644 index 00000000..4ac24ea0 --- /dev/null +++ b/simvue/api/objects/alert/base.py @@ -0,0 +1,93 @@ +import pydantic +import typing +from simvue.api.objects.base import SimvueObject +from simvue.models import NAME_REGEX + + +class AlertBase(SimvueObject): + def get_alert(self) -> dict[str, typing.Any]: + try: + return self._get()["alert"] + except KeyError as e: + raise RuntimeError("Expected key 'alert' in alert retrieval") from e + + @property + def name(self) -> str: + try: + return self._get()["name"] + except KeyError as e: + raise RuntimeError("Expected key 'name' in alert retrieval") from e + + @name.setter + @pydantic.validate_call + def name( + self, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] + ) -> None: + self._put(name=name) + + @property + def description(self) -> str | None: + try: + return self._get()["description"] + except KeyError as e: + raise RuntimeError("Expected key 'description' in alert retrieval") from e + + @description.setter + @pydantic.validate_call + def description(self, description: str | None) -> None: + self._put(description=description) + + @property + def tags(self) -> list[str]: + try: + return self._get()["tags"] or [] + except KeyError as e: + raise RuntimeError("Expected key 'tags' in alert retrieval") from e + + @tags.setter + @pydantic.validate_call + def tags(self, tags: list[str]) -> None: + self._put(tags=tags) + + @property + def notification(self) -> typing.Literal["none", "email"]: + try: + return self._get()["notification"] + except KeyError as e: + raise RuntimeError("Expected key 'notification' in alert retrieval") from e + + @notification.setter + @pydantic.validate_call + def notification(self, notification: typing.Literal["none", "email"]) -> None: + self._put(notification=notification) + + @property + def source(self) -> typing.Literal["events", "metrics", "user"]: + try: + return self._get()["source"] + except KeyError as e: + raise RuntimeError("Expected key 'source' in alert retrieval") from e + + @property + def enabled(self) -> bool: + try: + return self._get()["enabled"] + except KeyError as e: + raise RuntimeError("Expected key 'enabled' in alert retrieval") from e + + @enabled.setter + @pydantic.validate_call + def enabled(self, enabled: str) -> None: + self._put(enabled=enabled) + + @property + def abort(self) -> bool: + try: + return self._get()["abort"] + except KeyError as e: + raise RuntimeError("Expected key 'abort' in alert retrieval") from e + + @abort.setter + @pydantic.validate_call + def abort(self, abort: str) -> None: + self._put(abort=abort) diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py new file mode 100644 index 00000000..8a2f371a --- /dev/null +++ b/simvue/api/objects/alert/events.py @@ -0,0 +1,63 @@ +import typing +import pydantic +from .base import AlertBase +from simvue.models import NAME_REGEX + + +class EventsAlert(AlertBase): + def __init__(self, identifier: str | None = None, **kwargs) -> None: + self._label = "alert" + self.alert = EventAlertDefinition(self) + super().__init__(identifier, **kwargs) + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + notification: typing.Literal["none", "email"], + pattern: str, + frequency: pydantic.PositiveInt, + enabled: bool = True, + tags: list[str] | None = None, + ) -> typing.Self: + _alert = EventsAlert() + _alert_definition = {"pattern": pattern, "frequency": frequency} + _alert._post( + name=name, + notification=notification, + source="events", + alert=_alert_definition, + enabled=enabled, + tags=tags or [], + ) + return _alert + + +class EventAlertDefinition: + def __init__(self, alert: EventsAlert) -> None: + self._alert = alert + + @property + def pattern(self) -> str: + try: + return self._alert.get_alert()["pattern"] + except KeyError as e: + raise RuntimeError( + "Expected key 'pattern' in alert definition retrieval" + ) from e + + @property + def frequency(self) -> int: + try: + return self._alert.get_alert()["frequency"] + except KeyError as e: + raise RuntimeError( + "Expected key 'frequency' in alert definition retrieval" + ) from e + + @frequency.setter + @pydantic.validate_call + def frequency(self, frequency: int) -> None: + self._alert._put(frequency=frequency) diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py new file mode 100644 index 00000000..98c0774c --- /dev/null +++ b/simvue/api/objects/alert/fetch.py @@ -0,0 +1,16 @@ +from .events import EventsAlert +from .metrics import MetricsThresholdAlert, MetricsRangeAlert +from .base import AlertBase + + +class SimvueAlert: + def __new__( + cls, identifier: str | None = None, **kwargs + ) -> EventsAlert | MetricsRangeAlert | MetricsThresholdAlert: + _alert_pre = AlertBase(identifier) + if _alert_pre.source == "events": + return EventsAlert(identifier) + elif _alert_pre.source == "metrics" and _alert_pre.get_alert().get("threshold"): + return MetricsThresholdAlert(identifier) + elif _alert_pre.source == "metrics": + return MetricsRangeAlert(identifier) diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py new file mode 100644 index 00000000..d52e434b --- /dev/null +++ b/simvue/api/objects/alert/metrics.py @@ -0,0 +1,158 @@ +import pydantic +import typing +from .base import AlertBase +from simvue.models import NAME_REGEX + +Aggregate = typing.Literal["average", "sum", "at least one", "all"] +Rule = typing.Literal["is above", "is below", "is inside range", "is outside range"] + + +class MetricsAlert(AlertBase): + @property + def aggregation(self) -> Aggregate: + if not (_aggregation := self._alert.get_alert().get("aggregation")): + raise RuntimeError( + "Expected key 'aggregation' in alert definition retrieval" + ) + return _aggregation + + @property + def rule(self) -> Rule: + if not (_rule := self._alert.get_alert().get("rule")): + raise RuntimeError("Expected key 'rule' in alert definition retrieval") + return _rule + + @property + def window(self) -> int: + if not (_window := self._alert.get_alert().get("window")): + raise RuntimeError("Expected key 'window' in alert definition retrieval") + return _window + + @property + def frequency(self) -> int: + try: + return self._alert.get_alert()["frequency"] + except KeyError as e: + raise RuntimeError( + "Expected key 'frequency' in alert definition retrieval" + ) from e + + @frequency.setter + @pydantic.validate_call + def frequency(self, frequency: int) -> None: + self._alert._put(frequency=frequency) + + +class MetricsThresholdAlert(MetricsAlert): + def __init__(self, identifier: str | None = None, **kwargs) -> None: + self._label = "alert" + self.alert = MetricThresholdAlertDefinition(self) + super().__init__(identifier, **kwargs) + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + notification: typing.Literal["none", "email"], + aggregation: Aggregate, + rule: Rule, + window: pydantic.PositiveInt, + threshold: float, + frequency: pydantic.PositiveInt, + enabled: bool = True, + tags: list[str] | None = None, + ) -> typing.Self: + _alert = MetricsThresholdAlert() + _alert_definition = { + "rule": rule, + "frequency": frequency, + "window": window, + "aggregation": aggregation, + "threshold": threshold, + } + _alert._post( + name=name, + notification=notification, + source="events", + alert=_alert_definition, + enabled=enabled, + tags=tags or [], + ) + return _alert + + +class MetricsRangeAlert(AlertBase): + def __init__(self, identifier: str, **kwargs) -> None: + self._label = "alert" + self.alert = MetricRangeAlertDefinition(self) + super().__init__(identifier, **kwargs) + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + notification: typing.Literal["none", "email"], + aggregation: Aggregate, + rule: Rule, + window: pydantic.PositiveInt, + range_high: float, + range_low: float, + frequency: pydantic.PositiveInt, + enabled: bool = True, + tags: list[str] | None = None, + ) -> typing.Self: + if range_low >= range_high: + raise ValueError(f"Invalid arguments for range [{range_low}, {range_high}]") + + _alert = MetricsThresholdAlert() + _alert_definition = { + "rule": rule, + "frequency": frequency, + "window": window, + "aggregation": aggregation, + "range_low": range_low, + "range_high": range_high, + } + _alert._post( + name=name, + notification=notification, + source="events", + alert=_alert_definition, + enabled=enabled, + tags=tags or [], + ) + return _alert + + +class MetricThresholdAlertDefinition: + def __init__(self, alert: MetricsThresholdAlert) -> None: + self._alert = alert + + @property + def threshold(self) -> float: + if not (threshold_l := self._alert.get_alert().get("threshold")): + raise RuntimeError("Expected key 'threshold' in alert definition retrieval") + return threshold_l + + +class MetricRangeAlertDefinition: + def __init__(self, alert: MetricsRangeAlert) -> None: + self._alert = alert + + @property + def range_low(self) -> float: + if not (range_l := self._alert.get_alert().get("range_low")): + raise RuntimeError("Expected key 'range_low' in alert definition retrieval") + return range_l + + @property + def range_high(self) -> float: + if not (range_u := self._alert.get_alert().get("range_high")): + raise RuntimeError( + "Expected key 'range_high' in alert definition retrieval" + ) + return range_u diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py new file mode 100644 index 00000000..e69de29b diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py new file mode 100644 index 00000000..6db47dfa --- /dev/null +++ b/simvue/api/objects/base.py @@ -0,0 +1,185 @@ +""" +Simvue RestAPI Objects +====================== + +Contains base class for interacting with objects on the Simvue server +""" + +import abc +import pathlib +import typing +import boltons.urlutils as bo_url +import http + +from codecarbon.external.logger import logging + +from simvue.config.user import SimvueConfiguration +from simvue.version import __version__ +from simvue.api.request import ( + get as sv_get, + post as sv_post, + put as sv_put, + delete as sv_delete, + get_json_from_response, +) + + +class Visibility: + def __init__(self, sv_obj: "SimvueObject") -> None: + self._sv_obj = sv_obj + + def _update_visibility(self, key: str, value: typing.Any) -> None: + _visibility = self._sv_obj._get_visibility() | {key: value} + self._sv_obj._put(visibility=_visibility) + + @property + def users(self) -> list[str]: + return self._sv_obj._get_visibility().get("users", []) + + @users.setter + def users(self, users: list[str]) -> None: + self._update_visibility("users", users) + + @property + def public(self) -> bool: + return self._sv_obj._get_visibility().get("public", False) + + @public.setter + def public(self, public: bool) -> None: + self._update_visibility("public", public) + + @property + def tenant(self) -> bool: + return self._sv_obj._get_visibility().get("tenant", False) + + @tenant.setter + def tenant(self, tenant: bool) -> None: + self._update_visibility("tenant", tenant) + + +class SimvueObject(abc.ABC): + def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + self._logger = logging.getLogger(f"simvue.{self.__class__.__name__}") + self._label: str = getattr(self, "_label", self.__class__.__name__.lower()) + self._identifier: typing.Optional[str] = identifier + self._user_config = SimvueConfiguration.fetch(**kwargs) + self._headers: dict[str, str] = { + "Authorization": f"Bearer {self._user_config.server.token}", + "User-Agent": f"Simvue Python client {__version__}", + } + + def _get_visibility(self) -> dict[str, bool | list[str]]: + if not (visibility := self._get().get("visibility")): + raise RuntimeError("Expected key 'visibility' in response") + return visibility + + @classmethod + def new(cls, **kwargs) -> "SimvueObject": + _obj = SimvueObject() + _obj._post(**kwargs) + return _obj + + @property + def id(self) -> typing.Optional[str]: + return self._identifier + + @property + def _url_path(self) -> pathlib.Path: + return pathlib.Path(f"api/{self._label}s") + + @property + def _base_url(self) -> str: + _url = bo_url.URL(self._user_config.server.url) + _url.path = self._url_path + return f"{_url}" + + @property + def url(self) -> typing.Optional[str]: + if not self._identifier: + return None + _url = bo_url.URL(self._user_config.server.url) + _url.path = f"{self._url_path / self._identifier}" + return f"{_url}" + + def _post(self, **kwargs) -> dict[str, typing.Any]: + _response = sv_post( + url=self._base_url, headers=self._headers, data=kwargs, is_json=True + ) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Creation of {self.__class__.__name__.lower()} '{kwargs}'", + ) + + if not isinstance(_json_response, dict): + raise RuntimeError( + f"Expected dictionary from JSON response during {self._label} creation " + f"but got '{type(_json_response)}'" + ) + self._logger.debug("'%s' created successfully", _json_response["id"]) + self._identifier = _json_response["id"] + + return _json_response + + def _put(self, **kwargs) -> dict[str, typing.Any]: + if not self.url: + raise RuntimeError( + f"Identifier for instance of {self.__class__.__name__} Unknown" + ) + _response = sv_put( + url=self.url, headers=self._headers, data=kwargs, is_json=True + ) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Creation of {self.__class__.__name__.lower()} '{self._identifier}", + ) + + if not isinstance(_json_response, dict): + raise RuntimeError( + f"Expected dictionary from JSON response during {self._label} modification " + f"but got '{type(_json_response)}'" + ) + self._logger.debug("'%s' modified successfully", self._identifier) + + return _json_response + + def _delete(self) -> dict[str, typing.Any]: + if not self.url: + raise RuntimeError( + f"Identifier for instance of {self.__class__.__name__} Unknown" + ) + _response = sv_delete(url=self.url, headers=self._headers) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NO_CONTENT], + scenario=f"Deletion of {self.__class__.__name__.lower()} '{self._identifier}'", + ) + self._logger.debug("'%s' deleted successfully", self._identifier) + + if not isinstance(_json_response, dict): + raise RuntimeError( + f"Expected dictionary from JSON response during {self._label} deletion " + f"but got '{type(_json_response)}'" + ) + return _json_response + + def _get(self) -> dict[str, typing.Any]: + if not self.url: + raise RuntimeError( + f"Identifier for instance of {self.__class__.__name__} Unknown" + ) + _response = sv_get(url=self.url, headers=self._headers) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieval of {self.__class__.__name__.lower()} '{self._identifier}'", + ) + self._logger.debug("'%s' retrieved successfully", self._identifier) + + if not isinstance(_json_response, dict): + raise RuntimeError( + f"Expected dictionary from JSON response during {self._label} retrieval " + f"but got '{type(_json_response)}'" + ) + return _json_response diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py new file mode 100644 index 00000000..70f8ad4e --- /dev/null +++ b/simvue/api/objects/folder.py @@ -0,0 +1,128 @@ +""" +Simvue Server Folder +==================== + +Contains a class for remotely connecting to a Simvue folder, or defining +a new folder given relevant arguments. + +""" + +import pathlib +import typing + +import pydantic +from .base import SimvueObject, Visibility +from simvue.models import FOLDER_REGEX + + +class Folder(SimvueObject): + """ + Simvue Folder + ============= + + This class is used to connect to/create folder objects on the Simvue server, + any modification of instance attributes is mirrored on the remote object. + + """ + + def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + """Initialise a Folder + + If an identifier is provided a connection will be made to the + object matching the identifier on the target server. + Else a new Folder will be created using arguments provided in kwargs. + + Parameters + ---------- + identifier : str, optional + the remote server unique id for the target folder + **kwargs : dict + any additional arguments to be passed to the object initialiser + """ + self.visibility = Visibility(self) + super().__init__(identifier, **kwargs) + + @classmethod + @pydantic.validate_call + def new( + cls, *, path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)] + ) -> typing.Self: + """Create a new Folder on the Simvue server with the given path""" + _folder = Folder() + _folder._post(path=path) + return _folder + + @property + def tags(self) -> list[str]: + """Return list of tags assigned to this folder""" + try: + return self._get()["tags"] + except KeyError as e: + raise RuntimeError( + f"Expected value for 'tags' for folder '{self._identifier}'" + ) from e + + @tags.setter + @pydantic.validate_call + def tags(self, tags: list[str]) -> None: + """Set tags assigned to this folder""" + self._put(tags=tags) + + @property + def path(self) -> pathlib.Path: + """Return the path of this folder""" + try: + return self._get()["path"] + except KeyError as e: + raise RuntimeError( + f"Expected value for 'path' for folder '{self._identifier}'" + ) from e + + @property + def description(self) -> typing.Optional[str]: + """Return the folder description""" + return self._get().get("description") + + @description.setter + @pydantic.validate_call + def description(self, description: str) -> None: + """Update the folder description""" + self._put(description=description) + + @property + def name(self) -> typing.Optional[str]: + """Return the folder name""" + return self._get().get("name") + + @name.setter + @pydantic.validate_call + def name(self, name: str) -> None: + """Update the folder name""" + self._put(name=name) + + @property + def star(self) -> bool: + """Return if this folder is starred""" + return self._get().get("starred", False) + + @star.setter + @pydantic.validate_call + def star(self, is_true: bool = True) -> None: + """Star this folder as a favourite""" + self._put(starred=is_true) + + @property + def ttl(self) -> int: + """Return the retention period for this folder""" + try: + return self._get()["ttl"] + except KeyError as e: + raise RuntimeError( + f"Expected value for 'ttl' for folder '{self._identifier}'" + ) from e + + @ttl.setter + @pydantic.validate_call + def ttl(self, time_seconds: int) -> None: + """Update the retention period for this folder""" + self._put(ttl=time_seconds) diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py new file mode 100644 index 00000000..02fe9d8a --- /dev/null +++ b/simvue/api/objects/tag.py @@ -0,0 +1,61 @@ +import pydantic.color +import typing +from .base import SimvueObject + + +class Tag(SimvueObject): + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: str, + description: str | None = None, + color: pydantic.color.Color | None = None, + ) -> typing.Self: + """Create a new Tag on the Simvue server""" + _data: dict[str, typing.Any] = {"name": name} + if description: + _data["description"] = description + if color: + _data["description"] = color.as_hex() + _tag = Tag() + _tag._post(**_data) + return _tag + + @property + def name(self) -> str: + try: + return self._get()["name"] + except KeyError as e: + raise RuntimeError("Expected key 'name' in tag retrieval") from e + + @name.setter + @pydantic.validate_call + def name(self, name: str) -> None: + self._put(name=name) + + @property + def color(self) -> pydantic.color.RGBA: + try: + _color: str = self._get()["colour"] + return pydantic.color.parse_str(_color) + except KeyError as e: + raise RuntimeError("Expected key 'colour' in tag retrieval") from e + + @color.setter + @pydantic.validate_call + def color(self, color: pydantic.color.Color) -> None: + self._put(colour=color.as_hex()) + + @property + def description(self) -> str: + try: + return self._get()["description"] + except KeyError as e: + raise RuntimeError("Expected key 'description' in tag retrieval") from e + + @description.setter + @pydantic.validate_call + def description(self, description: str) -> None: + self._put(description=description) diff --git a/simvue/api.py b/simvue/api/request.py similarity index 74% rename from simvue/api.py rename to simvue/api/request.py index 8f294aae..97c80df2 100644 --- a/simvue/api.py +++ b/simvue/api/request.py @@ -19,7 +19,7 @@ stop_after_attempt, wait_exponential, ) -from .utilities import parse_validation_response +from simvue.utilities import parse_validation_response DEFAULT_API_TIMEOUT = 10 RETRY_MULTIPLIER = 1 @@ -193,3 +193,68 @@ def get( response.raise_for_status() return response + + +@retry( + wait=wait_exponential(multiplier=RETRY_MULTIPLIER, min=RETRY_MIN, max=RETRY_MAX), + retry=retry_if_exception(is_retryable_exception), + stop=stop_after_attempt(RETRY_STOP), + reraise=True, +) +def delete( + url: str, headers: dict[str, str], timeout: int = DEFAULT_API_TIMEOUT +) -> requests.Response: + """HTTP DELETE + + Parameters + ---------- + url : str + URL to put to + headers : dict[str, str] + headers for the post request + timeout : int, optional + timeout of request, by default DEFAULT_API_TIMEOUT + + Returns + ------- + requests.Response + response from executing DELETE + """ + response = requests.delete(url, headers=headers, timeout=timeout) + response.raise_for_status() + + return response + + +def get_json_from_response( + expected_status: list[int], + scenario: str, + response: requests.Response, +) -> typing.Union[dict, list]: + try: + json_response = response.json() + json_response = json_response or {} + except json.JSONDecodeError: + json_response = None + + error_str = f"{scenario} failed " + + if (_status_code := response.status_code) in expected_status: + if json_response is not None: + return json_response + details = "could not request JSON response" + else: + error_str += f"with status {_status_code}" + details = (json_response or {}).get("details") + + try: + txt_response = response.text + except UnicodeDecodeError: + txt_response = None + + if details: + error_str += f": {details}" + elif txt_response: + error_str += f": {txt_response}" + + raise RuntimeError(error_str) diff --git a/simvue/client.py b/simvue/client.py index 70a819bd..f8a01cdf 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -27,9 +27,8 @@ from .utilities import check_extra, prettify_pydantic from .models import FOLDER_REGEX, NAME_REGEX from .config.user import SimvueConfiguration +from .api.request import get_json_from_response -if typing.TYPE_CHECKING: - pass CONCURRENT_DOWNLOADS = 10 DOWNLOAD_CHUNK_SIZE = 8192 @@ -121,40 +120,6 @@ def __init__( "Authorization": f"Bearer {self._user_config.server.token}" } - def _get_json_from_response( - self, - expected_status: list[int], - scenario: str, - response: requests.Response, - ) -> typing.Union[dict, list]: - try: - json_response = response.json() - json_response = json_response or {} - except json.JSONDecodeError: - json_response = None - - error_str = f"{scenario} failed " - - if (_status_code := response.status_code) in expected_status: - if json_response is not None: - return json_response - details = "could not request JSON response" - else: - error_str += f"with status {_status_code}" - details = (json_response or {}).get("details") - - try: - txt_response = response.text - except UnicodeDecodeError: - txt_response = None - - if details: - error_str += f": {details}" - elif txt_response: - error_str += f": {txt_response}" - - raise RuntimeError(error_str) - @prettify_pydantic @pydantic.validate_call def get_run_id_from_name( @@ -189,7 +154,7 @@ def get_run_id_from_name( params=params, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario="Retrieval of run ID from name", response=response, @@ -239,7 +204,7 @@ def get_run(self, run_id: str) -> typing.Optional[dict[str, typing.Any]]: f"{self._user_config.server.url}/api/runs/{run_id}", headers=self._headers ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], scenario=f"Retrieval of run '{run_id}'", response=response, @@ -363,7 +328,7 @@ def get_runs( if output_format not in ("dict", "dataframe"): raise ValueError("Invalid format specified") - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario="Run retrieval", response=response, @@ -408,7 +373,7 @@ def delete_run(self, run_id: str) -> typing.Optional[dict]: headers=self._headers, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario=f"Deletion of run '{run_id}'", response=response, @@ -556,7 +521,7 @@ def delete_folder( params=params, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], scenario=f"Deletion of folder '{folder_path}'", response=response, @@ -623,7 +588,7 @@ def list_artifacts(self, run_id: str) -> list[dict[str, typing.Any]]: params=params, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario=f"Retrieval of artifacts for run '{run_id}", response=response, @@ -649,7 +614,7 @@ def _retrieve_artifact_from_server( params=params, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], scenario=f"Retrieval of artifact '{name}' for run '{run_id}'", response=response, @@ -691,7 +656,7 @@ def abort_run(self, run_id: str, reason: str) -> typing.Union[dict, list]: json=body, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], scenario=f"Abort of run '{run_id}'", response=response, @@ -885,7 +850,7 @@ def get_artifacts_as_files( params=params, ) - self._get_json_from_response( + get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario=f"Download of artifacts for run '{run_id}'", response=response, @@ -978,7 +943,7 @@ def get_folders( params=params, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario="Retrieval of folders", response=response, @@ -1025,7 +990,7 @@ def get_metrics_names(self, run_id: str) -> list[str]: params=params, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario=f"Request for metric names for run '{run_id}'", response=response, @@ -1061,7 +1026,7 @@ def _get_run_metrics_from_server( params=params, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario=f"Retrieval of metrics '{metric_names}' in " f"runs '{run_ids}'", response=metrics_response, @@ -1323,7 +1288,7 @@ def get_events( params=params, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario=f"Retrieval of events for run '{run_id}'", response=response, @@ -1380,7 +1345,7 @@ def get_alerts( params=params, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario=f"Retrieval of alerts for run '{run_id}'", response=response, @@ -1392,7 +1357,7 @@ def get_alerts( params=params, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[200], scenario=f"Retrieval of alerts for run '{run_id}'", response=response, @@ -1469,7 +1434,7 @@ def get_tags( params=params, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[200], scenario="Retrieval of tags", response=response, @@ -1504,7 +1469,7 @@ def delete_tag(self, tag_id: str) -> None: headers=self._headers, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario=f"Deletion of tag '{tag_id}'", response=response, @@ -1545,7 +1510,7 @@ def get_tag(self, tag_id: str) -> typing.Optional[dict[str, typing.Any]]: f"{self._user_config.server.url}/api/tag/{tag_id}", headers=self._headers ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], scenario=f"Retrieval of tag '{tag_id}'", response=response, diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index a0c13b70..020d0fad 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -18,7 +18,7 @@ import simvue.models as sv_models from simvue.utilities import get_expiry from simvue.version import __version__ -from simvue.api import get +from simvue.api.request import get logger = logging.getLogger(__file__) diff --git a/simvue/factory/proxy/remote.py b/simvue/factory/proxy/remote.py index 4f3077f2..d6239388 100644 --- a/simvue/factory/proxy/remote.py +++ b/simvue/factory/proxy/remote.py @@ -5,7 +5,7 @@ if typing.TYPE_CHECKING: from simvue.config.user import SimvueConfiguration -from simvue.api import get, post, put +from simvue.api.request import get, post, put from simvue.factory.proxy.base import SimvueBaseClass from simvue.utilities import prepare_for_api, skip_if_failed from simvue.version import __version__ diff --git a/simvue/run.py b/simvue/run.py index 1cdb617e..cab03b8f 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -33,7 +33,7 @@ from pydantic import ValidationError from .config.user import SimvueConfiguration -import simvue.api as sv_api +import simvue.api.request as sv_api from .factory.dispatch import Dispatcher from .executor import Executor diff --git a/tests/unit/test_alert.py b/tests/unit/test_alert.py new file mode 100644 index 00000000..30c54419 --- /dev/null +++ b/tests/unit/test_alert.py @@ -0,0 +1,18 @@ +import pytest +import uuid + +from simvue.api.objects import SimvueAlert, MetricsRangeAlert, MetricsThresholdAlert, EventsAlert + +@pytest.mark.api +def test_event_alert_creation() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = EventsAlert.new( + name=f"events_alert_{_uuid}", + frequency=1, + pattern="completed", + notification="none" + ) + assert _alert.alert.frequency == 1 + assert _alert.alert.pattern == "completed" + assert _alert.name == f"events_alert_{_uuid}" + assert _alert.notification == "none" diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py new file mode 100644 index 00000000..1dded7a9 --- /dev/null +++ b/tests/unit/test_folder.py @@ -0,0 +1,37 @@ +import typing +import pytest +import uuid +import time + +from simvue.api.objects.folder import Folder + +@pytest.mark.api +def test_folder_creation() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _path = f"/simvue_unit_testing/objects/folder/{_uuid}" + _folder = Folder.new(path=_path) + assert _folder.id + assert _folder.path == _path + assert not _folder.visibility.public + assert not _folder.visibility.tenant + assert not _folder.visibility.users + + +@pytest.mark.api(depends=["test_folder_creation"]) +def test_folder_modification() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _path = f"/simvue_unit_testing/objects/folder/{_uuid}" + _description = "Test study" + _tags = ["testing", "api"] + _folder = Folder.new(path=_path) + time.sleep(1) + _folder_new = Folder(identifier=_folder.id) + _folder_new.tags = _tags + _folder_new.description = _description + _folder_new.visibility.tenant = True + assert _folder_new.tags == _tags + assert _folder.tags == _tags + assert _folder_new.description == _description + assert _folder.description == _description + assert _folder_new.visibility.tenant + diff --git a/tests/unit/test_tag.py b/tests/unit/test_tag.py new file mode 100644 index 00000000..f3d6d617 --- /dev/null +++ b/tests/unit/test_tag.py @@ -0,0 +1,26 @@ +import time +import pytest +import uuid +from simvue.api.objects.tag import Tag + +@pytest.mark.api +def test_tag_creation() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tag = Tag.new(name=f"test_tag_{_uuid}") + assert _tag.name == f"test_tag_{_uuid}" + assert _tag.color + assert not _tag.description + + +@pytest.mark.api +def test_tag_modification() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tag = Tag.new(name=f"test_tag_{_uuid}") + time.sleep(1) + _new_tag = Tag(_tag.id) + _new_tag.name = _tag.name.replace("test", "test_modified") + _new_tag.color = "rgb({r}, {g}, {b})".format(r=250, g=0, b=0) + _new_tag.description = "modified test tag" + assert _new_tag.name == f"test_modified_tag_{_uuid}" + assert _new_tag.color.r == 250 / 255 + assert _new_tag.description == "modified test tag" From da886d94f4515c31bd3abe4cb111ab42c7b9e659 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 11 Nov 2024 16:11:49 +0000 Subject: [PATCH 002/163] Fix generic alert class --- simvue/api/objects/alert/base.py | 4 ++++ simvue/api/objects/alert/events.py | 1 - simvue/api/objects/alert/metrics.py | 22 ++++++++++------------ tests/unit/test_alert.py | 17 +++++++++++++++++ 4 files changed, 31 insertions(+), 13 deletions(-) diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 4ac24ea0..208ad84f 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -5,6 +5,10 @@ class AlertBase(SimvueObject): + def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + self._label = "alert" + super().__init__(identifier, **kwargs) + def get_alert(self) -> dict[str, typing.Any]: try: return self._get()["alert"] diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index 8a2f371a..885021db 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -6,7 +6,6 @@ class EventsAlert(AlertBase): def __init__(self, identifier: str | None = None, **kwargs) -> None: - self._label = "alert" self.alert = EventAlertDefinition(self) super().__init__(identifier, **kwargs) diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index d52e434b..f93ff1c2 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -10,7 +10,7 @@ class MetricsAlert(AlertBase): @property def aggregation(self) -> Aggregate: - if not (_aggregation := self._alert.get_alert().get("aggregation")): + if not (_aggregation := self.alert.get_alert().get("aggregation")): raise RuntimeError( "Expected key 'aggregation' in alert definition retrieval" ) @@ -18,20 +18,20 @@ def aggregation(self) -> Aggregate: @property def rule(self) -> Rule: - if not (_rule := self._alert.get_alert().get("rule")): + if not (_rule := self.alert.get_alert().get("rule")): raise RuntimeError("Expected key 'rule' in alert definition retrieval") return _rule @property def window(self) -> int: - if not (_window := self._alert.get_alert().get("window")): + if not (_window := self.alert.get_alert().get("window")): raise RuntimeError("Expected key 'window' in alert definition retrieval") return _window @property def frequency(self) -> int: try: - return self._alert.get_alert()["frequency"] + return self.alert.get_alert()["frequency"] except KeyError as e: raise RuntimeError( "Expected key 'frequency' in alert definition retrieval" @@ -40,12 +40,11 @@ def frequency(self) -> int: @frequency.setter @pydantic.validate_call def frequency(self, frequency: int) -> None: - self._alert._put(frequency=frequency) + self.alert._put(frequency=frequency) class MetricsThresholdAlert(MetricsAlert): def __init__(self, identifier: str | None = None, **kwargs) -> None: - self._label = "alert" self.alert = MetricThresholdAlertDefinition(self) super().__init__(identifier, **kwargs) @@ -85,7 +84,6 @@ def new( class MetricsRangeAlert(AlertBase): def __init__(self, identifier: str, **kwargs) -> None: - self._label = "alert" self.alert = MetricRangeAlertDefinition(self) super().__init__(identifier, **kwargs) @@ -130,28 +128,28 @@ def new( class MetricThresholdAlertDefinition: def __init__(self, alert: MetricsThresholdAlert) -> None: - self._alert = alert + self.alert = alert @property def threshold(self) -> float: - if not (threshold_l := self._alert.get_alert().get("threshold")): + if not (threshold_l := self.alert.get_alert().get("threshold")): raise RuntimeError("Expected key 'threshold' in alert definition retrieval") return threshold_l class MetricRangeAlertDefinition: def __init__(self, alert: MetricsRangeAlert) -> None: - self._alert = alert + self.alert = alert @property def range_low(self) -> float: - if not (range_l := self._alert.get_alert().get("range_low")): + if not (range_l := self.alert.get_alert().get("range_low")): raise RuntimeError("Expected key 'range_low' in alert definition retrieval") return range_l @property def range_high(self) -> float: - if not (range_u := self._alert.get_alert().get("range_high")): + if not (range_u := self.alert.get_alert().get("range_high")): raise RuntimeError( "Expected key 'range_high' in alert definition retrieval" ) diff --git a/tests/unit/test_alert.py b/tests/unit/test_alert.py index 30c54419..4fb19a6b 100644 --- a/tests/unit/test_alert.py +++ b/tests/unit/test_alert.py @@ -1,3 +1,4 @@ +import time import pytest import uuid @@ -16,3 +17,19 @@ def test_event_alert_creation() -> None: assert _alert.alert.pattern == "completed" assert _alert.name == f"events_alert_{_uuid}" assert _alert.notification == "none" + + +@pytest.mark.api +def test_event_alert_modification() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = EventsAlert.new( + name=f"events_alert_{_uuid}", + frequency=1, + pattern="completed", + notification="none" + ) + time.sleep(1) + _new_alert = SimvueAlert(_alert.id) + assert isinstance(_new_alert, EventsAlert) + _new_alert.description = "updated!" + assert _new_alert.description == "updated!" From 3c06c0c6ba4f1bf1b0406db30546dc04e98b95ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 12 Nov 2024 09:09:03 +0000 Subject: [PATCH 003/163] Add staging to RestAPI calls --- simvue/api/objects/alert/base.py | 24 ++++++++++----------- simvue/api/objects/alert/events.py | 7 +++--- simvue/api/objects/alert/metrics.py | 7 +++--- simvue/api/objects/base.py | 33 +++++++++++++++++++++++++---- simvue/api/objects/folder.py | 21 ++++++++++-------- simvue/api/objects/tag.py | 14 ++++++------ tests/unit/test_alert.py | 2 ++ 7 files changed, 70 insertions(+), 38 deletions(-) diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 208ad84f..52a3df4c 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -1,6 +1,6 @@ import pydantic import typing -from simvue.api.objects.base import SimvueObject +from simvue.api.objects.base import SimvueObject, dynamic_property from simvue.models import NAME_REGEX @@ -27,9 +27,9 @@ def name(self) -> str: def name( self, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] ) -> None: - self._put(name=name) + self._staging["name"] = name - @property + @dynamic_property def description(self) -> str | None: try: return self._get()["description"] @@ -39,9 +39,9 @@ def description(self) -> str | None: @description.setter @pydantic.validate_call def description(self, description: str | None) -> None: - self._put(description=description) + self._staging["description"] = description - @property + @dynamic_property def tags(self) -> list[str]: try: return self._get()["tags"] or [] @@ -51,9 +51,9 @@ def tags(self) -> list[str]: @tags.setter @pydantic.validate_call def tags(self, tags: list[str]) -> None: - self._put(tags=tags) + self._staging["tags"] = tags - @property + @dynamic_property def notification(self) -> typing.Literal["none", "email"]: try: return self._get()["notification"] @@ -63,7 +63,7 @@ def notification(self) -> typing.Literal["none", "email"]: @notification.setter @pydantic.validate_call def notification(self, notification: typing.Literal["none", "email"]) -> None: - self._put(notification=notification) + self._staging["notification"] = notification @property def source(self) -> typing.Literal["events", "metrics", "user"]: @@ -72,7 +72,7 @@ def source(self) -> typing.Literal["events", "metrics", "user"]: except KeyError as e: raise RuntimeError("Expected key 'source' in alert retrieval") from e - @property + @dynamic_property def enabled(self) -> bool: try: return self._get()["enabled"] @@ -82,9 +82,9 @@ def enabled(self) -> bool: @enabled.setter @pydantic.validate_call def enabled(self, enabled: str) -> None: - self._put(enabled=enabled) + self._staging["enabled"] = enabled - @property + @dynamic_property def abort(self) -> bool: try: return self._get()["abort"] @@ -94,4 +94,4 @@ def abort(self) -> bool: @abort.setter @pydantic.validate_call def abort(self, abort: str) -> None: - self._put(abort=abort) + self._staging["abort"] = abort diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index 885021db..c9fe0e09 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -1,6 +1,6 @@ import typing import pydantic -from .base import AlertBase +from .base import AlertBase, dynamic_property from simvue.models import NAME_REGEX @@ -47,7 +47,7 @@ def pattern(self) -> str: "Expected key 'pattern' in alert definition retrieval" ) from e - @property + @dynamic_property def frequency(self) -> int: try: return self._alert.get_alert()["frequency"] @@ -59,4 +59,5 @@ def frequency(self) -> int: @frequency.setter @pydantic.validate_call def frequency(self, frequency: int) -> None: - self._alert._put(frequency=frequency) + _alert = self._alert.get_alert() | {"frequency": frequency} + self._alert._staging["alert"] = _alert diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index f93ff1c2..bfad442f 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -1,6 +1,6 @@ import pydantic import typing -from .base import AlertBase +from .base import AlertBase, dynamic_property from simvue.models import NAME_REGEX Aggregate = typing.Literal["average", "sum", "at least one", "all"] @@ -28,7 +28,7 @@ def window(self) -> int: raise RuntimeError("Expected key 'window' in alert definition retrieval") return _window - @property + @dynamic_property def frequency(self) -> int: try: return self.alert.get_alert()["frequency"] @@ -40,7 +40,8 @@ def frequency(self) -> int: @frequency.setter @pydantic.validate_call def frequency(self, frequency: int) -> None: - self.alert._put(frequency=frequency) + _alert = self._alert.get_alert() | {"frequency": frequency} + self._alert._staging["alert"] = _alert class MetricsThresholdAlert(MetricsAlert): diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 6db47dfa..d5508fe4 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -24,15 +24,34 @@ ) +def dynamic_property(member_func: typing.Callable): + def _wrapper(self: typing.Union["SimvueObject", typing.Any]) -> typing.Any: + if isinstance(self, SimvueObject): + _sv_obj = self + elif hasattr(self, "_sv_obj"): + _sv_obj = self._sv_obj + else: + raise RuntimeError( + f"Cannot use 'dynamic_property' decorator on type '{self.__name__}'" + ) + if member_func.__name__ in _sv_obj._staging: + _sv_obj._logger.warning( + f"Uncommitted change found for attribute '{member_func.__name__}'" + ) + return member_func(self) + + return property(_wrapper) + + class Visibility: def __init__(self, sv_obj: "SimvueObject") -> None: self._sv_obj = sv_obj def _update_visibility(self, key: str, value: typing.Any) -> None: _visibility = self._sv_obj._get_visibility() | {key: value} - self._sv_obj._put(visibility=_visibility) + self._sv_obj._staging["visibility"] = _visibility - @property + @dynamic_property def users(self) -> list[str]: return self._sv_obj._get_visibility().get("users", []) @@ -40,7 +59,7 @@ def users(self) -> list[str]: def users(self, users: list[str]) -> None: self._update_visibility("users", users) - @property + @dynamic_property def public(self) -> bool: return self._sv_obj._get_visibility().get("public", False) @@ -48,7 +67,7 @@ def public(self) -> bool: def public(self, public: bool) -> None: self._update_visibility("public", public) - @property + @dynamic_property def tenant(self) -> bool: return self._sv_obj._get_visibility().get("tenant", False) @@ -63,6 +82,7 @@ def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: self._label: str = getattr(self, "_label", self.__class__.__name__.lower()) self._identifier: typing.Optional[str] = identifier self._user_config = SimvueConfiguration.fetch(**kwargs) + self._staging: dict[str, typing.Any] = {} self._headers: dict[str, str] = { "Authorization": f"Bearer {self._user_config.server.token}", "User-Agent": f"Simvue Python client {__version__}", @@ -79,6 +99,11 @@ def new(cls, **kwargs) -> "SimvueObject": _obj._post(**kwargs) return _obj + def commit(self) -> None: + if not self._staging: + return + self._put(**self._staging) + @property def id(self) -> typing.Optional[str]: return self._identifier diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index 70f8ad4e..bb250b65 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -11,7 +11,7 @@ import typing import pydantic -from .base import SimvueObject, Visibility +from .base import SimvueObject, Visibility, dynamic_property from simvue.models import FOLDER_REGEX @@ -52,9 +52,12 @@ def new( _folder._post(path=path) return _folder - @property + @dynamic_property def tags(self) -> list[str]: """Return list of tags assigned to this folder""" + if self._staging.get("tags"): + self._logger.warning("Uncommitted changes found for attribute 'tags'") + return self._staging["tags"] try: return self._get()["tags"] except KeyError as e: @@ -66,7 +69,7 @@ def tags(self) -> list[str]: @pydantic.validate_call def tags(self, tags: list[str]) -> None: """Set tags assigned to this folder""" - self._put(tags=tags) + self._staging["tags"] = tags @property def path(self) -> pathlib.Path: @@ -78,7 +81,7 @@ def path(self) -> pathlib.Path: f"Expected value for 'path' for folder '{self._identifier}'" ) from e - @property + @dynamic_property def description(self) -> typing.Optional[str]: """Return the folder description""" return self._get().get("description") @@ -87,9 +90,9 @@ def description(self) -> typing.Optional[str]: @pydantic.validate_call def description(self, description: str) -> None: """Update the folder description""" - self._put(description=description) + self._staging["description"] = description - @property + @dynamic_property def name(self) -> typing.Optional[str]: """Return the folder name""" return self._get().get("name") @@ -98,9 +101,9 @@ def name(self) -> typing.Optional[str]: @pydantic.validate_call def name(self, name: str) -> None: """Update the folder name""" - self._put(name=name) + self._staging["name"] = name - @property + @dynamic_property def star(self) -> bool: """Return if this folder is starred""" return self._get().get("starred", False) @@ -109,7 +112,7 @@ def star(self) -> bool: @pydantic.validate_call def star(self, is_true: bool = True) -> None: """Star this folder as a favourite""" - self._put(starred=is_true) + self._staging["starred"] = is_true @property def ttl(self) -> int: diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py index 02fe9d8a..1f20a13e 100644 --- a/simvue/api/objects/tag.py +++ b/simvue/api/objects/tag.py @@ -1,6 +1,6 @@ import pydantic.color import typing -from .base import SimvueObject +from .base import SimvueObject, dynamic_property class Tag(SimvueObject): @@ -23,7 +23,7 @@ def new( _tag._post(**_data) return _tag - @property + @dynamic_property def name(self) -> str: try: return self._get()["name"] @@ -33,9 +33,9 @@ def name(self) -> str: @name.setter @pydantic.validate_call def name(self, name: str) -> None: - self._put(name=name) + self._staging["name"] = name - @property + @dynamic_property def color(self) -> pydantic.color.RGBA: try: _color: str = self._get()["colour"] @@ -46,9 +46,9 @@ def color(self) -> pydantic.color.RGBA: @color.setter @pydantic.validate_call def color(self, color: pydantic.color.Color) -> None: - self._put(colour=color.as_hex()) + self._staging["colour"] = color.as_hex() - @property + @dynamic_property def description(self) -> str: try: return self._get()["description"] @@ -58,4 +58,4 @@ def description(self) -> str: @description.setter @pydantic.validate_call def description(self, description: str) -> None: - self._put(description=description) + self._staging["description"] = description diff --git a/tests/unit/test_alert.py b/tests/unit/test_alert.py index 4fb19a6b..4b3538a7 100644 --- a/tests/unit/test_alert.py +++ b/tests/unit/test_alert.py @@ -32,4 +32,6 @@ def test_event_alert_modification() -> None: _new_alert = SimvueAlert(_alert.id) assert isinstance(_new_alert, EventsAlert) _new_alert.description = "updated!" + assert _new_alert.description != "updated!" + _new_alert.commit() assert _new_alert.description == "updated!" From 3b0b00c1ebeffdb7ca8a7e87f12959ea89dbb417 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 12 Nov 2024 13:56:36 +0000 Subject: [PATCH 004/163] Added Run object and simplified decorator --- simvue/api/objects/__init__.py | 3 +- simvue/api/objects/alert/__init__.py | 4 +- simvue/api/objects/alert/base.py | 17 +- simvue/api/objects/alert/events.py | 5 +- simvue/api/objects/alert/fetch.py | 2 +- simvue/api/objects/alert/metrics.py | 5 +- simvue/api/objects/base.py | 23 +-- simvue/api/objects/folder.py | 21 +-- simvue/api/objects/run.py | 234 +++++++++++++++++++++++++++ simvue/api/objects/tag.py | 13 +- simvue/models.py | 1 + tests/unit/test_alert.py | 4 +- tests/unit/test_folder.py | 1 + tests/unit/test_run.py | 43 +++++ 14 files changed, 337 insertions(+), 39 deletions(-) create mode 100644 simvue/api/objects/run.py create mode 100644 tests/unit/test_run.py diff --git a/simvue/api/objects/__init__.py b/simvue/api/objects/__init__.py index 2085eda1..e2032e23 100644 --- a/simvue/api/objects/__init__.py +++ b/simvue/api/objects/__init__.py @@ -1,8 +1,9 @@ from .alert import ( - SimvueAlert as SimvueAlert, + Alert as Alert, EventsAlert as EventsAlert, MetricsThresholdAlert as MetricsThresholdAlert, MetricsRangeAlert as MetricsRangeAlert, ) +from .run import Run as Run from .tag import Tag as Tag from .folder import Folder as Folder diff --git a/simvue/api/objects/alert/__init__.py b/simvue/api/objects/alert/__init__.py index 032694c8..e0911967 100644 --- a/simvue/api/objects/alert/__init__.py +++ b/simvue/api/objects/alert/__init__.py @@ -1,5 +1,5 @@ -from .fetch import SimvueAlert +from .fetch import Alert from .metrics import MetricsThresholdAlert, MetricsRangeAlert from .events import EventsAlert -__all__ = ["SimvueAlert", "MetricsRangeAlert", "MetricsThresholdAlert", "EventsAlert"] +__all__ = ["Alert", "MetricsRangeAlert", "MetricsThresholdAlert", "EventsAlert"] diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 52a3df4c..dcced11d 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -1,6 +1,6 @@ import pydantic import typing -from simvue.api.objects.base import SimvueObject, dynamic_property +from simvue.api.objects.base import SimvueObject, staging_check from simvue.models import NAME_REGEX @@ -29,7 +29,8 @@ def name( ) -> None: self._staging["name"] = name - @dynamic_property + @property + @staging_check def description(self) -> str | None: try: return self._get()["description"] @@ -41,7 +42,8 @@ def description(self) -> str | None: def description(self, description: str | None) -> None: self._staging["description"] = description - @dynamic_property + @property + @staging_check def tags(self) -> list[str]: try: return self._get()["tags"] or [] @@ -53,7 +55,8 @@ def tags(self) -> list[str]: def tags(self, tags: list[str]) -> None: self._staging["tags"] = tags - @dynamic_property + @property + @staging_check def notification(self) -> typing.Literal["none", "email"]: try: return self._get()["notification"] @@ -72,7 +75,8 @@ def source(self) -> typing.Literal["events", "metrics", "user"]: except KeyError as e: raise RuntimeError("Expected key 'source' in alert retrieval") from e - @dynamic_property + @property + @staging_check def enabled(self) -> bool: try: return self._get()["enabled"] @@ -84,7 +88,8 @@ def enabled(self) -> bool: def enabled(self, enabled: str) -> None: self._staging["enabled"] = enabled - @dynamic_property + @property + @staging_check def abort(self) -> bool: try: return self._get()["abort"] diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index c9fe0e09..443e51dd 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -1,6 +1,6 @@ import typing import pydantic -from .base import AlertBase, dynamic_property +from .base import AlertBase, staging_check from simvue.models import NAME_REGEX @@ -47,7 +47,8 @@ def pattern(self) -> str: "Expected key 'pattern' in alert definition retrieval" ) from e - @dynamic_property + @property + @staging_check def frequency(self) -> int: try: return self._alert.get_alert()["frequency"] diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index 98c0774c..9accc519 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -3,7 +3,7 @@ from .base import AlertBase -class SimvueAlert: +class Alert: def __new__( cls, identifier: str | None = None, **kwargs ) -> EventsAlert | MetricsRangeAlert | MetricsThresholdAlert: diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index bfad442f..5678ce6d 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -1,6 +1,6 @@ import pydantic import typing -from .base import AlertBase, dynamic_property +from .base import AlertBase, staging_check from simvue.models import NAME_REGEX Aggregate = typing.Literal["average", "sum", "at least one", "all"] @@ -28,7 +28,8 @@ def window(self) -> int: raise RuntimeError("Expected key 'window' in alert definition retrieval") return _window - @dynamic_property + @property + @staging_check def frequency(self) -> int: try: return self.alert.get_alert()["frequency"] diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index d5508fe4..fdb007c5 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -24,15 +24,17 @@ ) -def dynamic_property(member_func: typing.Callable): - def _wrapper(self: typing.Union["SimvueObject", typing.Any]) -> typing.Any: +def staging_check(member_func: typing.Callable) -> typing.Callable: + """Decorator for checking if requested attribute has uncommitted changes""" + + def _wrapper(self) -> typing.Any: if isinstance(self, SimvueObject): _sv_obj = self elif hasattr(self, "_sv_obj"): _sv_obj = self._sv_obj else: raise RuntimeError( - f"Cannot use 'dynamic_property' decorator on type '{self.__name__}'" + f"Cannot use 'staging_check' decorator on type '{type(self).__name__}'" ) if member_func.__name__ in _sv_obj._staging: _sv_obj._logger.warning( @@ -40,7 +42,7 @@ def _wrapper(self: typing.Union["SimvueObject", typing.Any]) -> typing.Any: ) return member_func(self) - return property(_wrapper) + return _wrapper class Visibility: @@ -51,7 +53,8 @@ def _update_visibility(self, key: str, value: typing.Any) -> None: _visibility = self._sv_obj._get_visibility() | {key: value} self._sv_obj._staging["visibility"] = _visibility - @dynamic_property + @property + @staging_check def users(self) -> list[str]: return self._sv_obj._get_visibility().get("users", []) @@ -59,7 +62,8 @@ def users(self) -> list[str]: def users(self, users: list[str]) -> None: self._update_visibility("users", users) - @dynamic_property + @property + @staging_check def public(self) -> bool: return self._sv_obj._get_visibility().get("public", False) @@ -67,7 +71,8 @@ def public(self) -> bool: def public(self, public: bool) -> None: self._update_visibility("public", public) - @dynamic_property + @property + @staging_check def tenant(self) -> bool: return self._sv_obj._get_visibility().get("tenant", False) @@ -94,7 +99,7 @@ def _get_visibility(self) -> dict[str, bool | list[str]]: return visibility @classmethod - def new(cls, **kwargs) -> "SimvueObject": + def new(cls, **kwargs): _obj = SimvueObject() _obj._post(**kwargs) return _obj @@ -169,7 +174,7 @@ def _put(self, **kwargs) -> dict[str, typing.Any]: return _json_response - def _delete(self) -> dict[str, typing.Any]: + def delete(self) -> dict[str, typing.Any]: if not self.url: raise RuntimeError( f"Identifier for instance of {self.__class__.__name__} Unknown" diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index bb250b65..b0819e4f 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -11,7 +11,7 @@ import typing import pydantic -from .base import SimvueObject, Visibility, dynamic_property +from .base import SimvueObject, Visibility, staging_check from simvue.models import FOLDER_REGEX @@ -44,15 +44,14 @@ def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: @classmethod @pydantic.validate_call - def new( - cls, *, path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)] - ) -> typing.Self: + def new(cls, *, path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)]): """Create a new Folder on the Simvue server with the given path""" _folder = Folder() _folder._post(path=path) return _folder - @dynamic_property + @property + @staging_check def tags(self) -> list[str]: """Return list of tags assigned to this folder""" if self._staging.get("tags"): @@ -81,7 +80,8 @@ def path(self) -> pathlib.Path: f"Expected value for 'path' for folder '{self._identifier}'" ) from e - @dynamic_property + @property + @staging_check def description(self) -> typing.Optional[str]: """Return the folder description""" return self._get().get("description") @@ -92,7 +92,8 @@ def description(self, description: str) -> None: """Update the folder description""" self._staging["description"] = description - @dynamic_property + @property + @staging_check def name(self) -> typing.Optional[str]: """Return the folder name""" return self._get().get("name") @@ -103,7 +104,8 @@ def name(self, name: str) -> None: """Update the folder name""" self._staging["name"] = name - @dynamic_property + @property + @staging_check def star(self) -> bool: """Return if this folder is starred""" return self._get().get("starred", False) @@ -115,6 +117,7 @@ def star(self, is_true: bool = True) -> None: self._staging["starred"] = is_true @property + @staging_check def ttl(self) -> int: """Return the retention period for this folder""" try: @@ -128,4 +131,4 @@ def ttl(self) -> int: @pydantic.validate_call def ttl(self, time_seconds: int) -> None: """Update the retention period for this folder""" - self._put(ttl=time_seconds) + self._staging["ttl"] = time_seconds diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py new file mode 100644 index 00000000..6a80493b --- /dev/null +++ b/simvue/api/objects/run.py @@ -0,0 +1,234 @@ +import typing +import pydantic +import datetime +from .base import SimvueObject, staging_check, Visibility +from simvue.models import FOLDER_REGEX, NAME_REGEX, DATETIME_FORMAT + +Status = typing.Literal[ + "lost", "failed", "completed", "terminated", "running", "created" +] + + +class Run(SimvueObject): + def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + """Initialise a Run + + If an identifier is provided a connection will be made to the + object matching the identifier on the target server. + Else a new Run will be created using arguments provided in kwargs. + + Parameters + ---------- + identifier : str, optional + the remote server unique id for the target run + **kwargs : dict + any additional arguments to be passed to the object initialiser + """ + self.visibility = Visibility(self) + super().__init__(identifier, **kwargs) + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + folder: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], + ): + """Create a new Folder on the Simvue server with the given path""" + _run = Run() + _run._post(folder=folder, system=None, status="created") + return _run + + @property + @staging_check + def name(self) -> str: + try: + return self._get()["name"] + except KeyError as e: + raise RuntimeError( + f"Expected value for 'name' for run '{self._identifier}'" + ) from e + + @name.setter + @pydantic.validate_call + def name( + self, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] + ) -> None: + self._staging["name"] = name + + @property + @staging_check + def tags(self) -> list[str]: + try: + return self._get()["tags"] + except KeyError as e: + raise RuntimeError( + f"Expected value for 'tags' for run '{self._identifier}'" + ) from e + + @tags.setter + @pydantic.validate_call + def tags(self, tags: list[str]) -> None: + self._staging["tags"] = tags + + @property + @staging_check + def status(self) -> Status: + if not (_status := self._get().get("status")): + raise RuntimeError( + f"Expected value for 'status' for run '{self._identifier}'" + ) + return _status + + @status.setter + @pydantic.validate_call + def status(self, status: Status) -> None: + self._staging["status"] = status + + @property + @staging_check + def ttl(self) -> int: + """Return the retention period for this run""" + try: + return self._get()["ttl"] + except KeyError as e: + raise RuntimeError( + f"Expected value for 'ttl' for run '{self._identifier}'" + ) from e + + @ttl.setter + @pydantic.validate_call + def ttl(self, time_seconds: int) -> None: + """Update the retention period for this run""" + self._staging["ttl"] = time_seconds + + @property + @staging_check + def folder(self) -> str: + if not (_folder := self._get().get("folder")): + raise RuntimeError( + f"Expected value for 'folder' for run '{self._identifier}'" + ) + return _folder + + @folder.setter + @pydantic.validate_call + def folder( + self, folder: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)] + ) -> None: + self._staging["folder"] = folder + + @property + @staging_check + def metadata(self) -> dict[str, typing.Any]: + if not (_metadata := self._get().get("metadata")): + raise RuntimeError( + f"Expected value for 'metadata' for run '{self._identifier}'" + ) + return _metadata + + @metadata.setter + @pydantic.validate_call + def metadata(self, metadata: dict[str, typing.Any]) -> None: + self._staging["metadata"] = metadata + + @property + @staging_check + def description(self) -> str: + if not (_description := self._get().get("description")): + raise RuntimeError( + f"Expected value for 'description' for run '{self._identifier}'" + ) + return _description + + @description.setter + @pydantic.validate_call + def description(self, description: str) -> None: + self._staging["description"] = description + + @property + def system(self) -> dict[str, typing.Any]: + if not (_system := self._get().get("system")): + raise RuntimeError( + f"Expected value for 'descriptio' for run '{self._identifier}'" + ) + return _system + + @property + @staging_check + def heartbeat_timeout(self) -> int: + if not (_heartbeat_timeout := self._get().get("heartbeat_timeout")): + raise RuntimeError( + f"Expected value for 'heartbeat_timeout' for run '{self._identifier}'" + ) + return _heartbeat_timeout + + @heartbeat_timeout.setter + @pydantic.validate_call + def heartbeat_timeout(self, time_seconds: int) -> None: + self._staging["heartbeat_timeout"] = time_seconds + + @property + @staging_check + def notifications(self) -> typing.Literal["none", "email"]: + try: + return self._get()["notifications"] + except KeyError as e: + raise RuntimeError("Expected key 'notifications' in alert retrieval") from e + + @notifications.setter + @pydantic.validate_call + def notifications(self, notifications: typing.Literal["none", "email"]) -> None: + self._staging["notifications"] = notifications + + @property + @staging_check + def alerts(self) -> list[str]: + try: + return self._get()["alerts"] + except KeyError as e: + raise RuntimeError("Expected key 'alerts' in alert retrieval") from e + + @alerts.setter + @pydantic.validate_call + def alerts(self, alerts: list[str]) -> None: + self._staging["alerts"] = alerts + + @property + @staging_check + def created(self) -> datetime.datetime: + try: + return datetime.datetime.strptime(self._get()["created"], DATETIME_FORMAT) + except KeyError as e: + raise RuntimeError("Expected key 'created' in alert retrieval") from e + + @created.setter + @pydantic.validate_call + def created(self, created: datetime.datetime) -> None: + self._staging["created"] = created.strftime(DATETIME_FORMAT) + + @property + @staging_check + def started(self) -> datetime.datetime: + try: + return datetime.datetime.strptime(self._get()["started"], DATETIME_FORMAT) + except KeyError as e: + raise RuntimeError("Expected key 'started' in alert retrieval") from e + + @started.setter + @pydantic.validate_call + def started(self, started: datetime.datetime) -> None: + self._staging["started"] = started.strftime(DATETIME_FORMAT) + + @property + @staging_check + def endtime(self) -> datetime.datetime: + try: + return datetime.datetime.strptime(self._get()["endtime"], DATETIME_FORMAT) + except KeyError as e: + raise RuntimeError("Expected key 'endtime' in alert retrieval") from e + + @endtime.setter + @pydantic.validate_call + def endtime(self, endtime: datetime.datetime) -> None: + self._staging["endtime"] = endtime.strftime(DATETIME_FORMAT) diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py index 1f20a13e..0381230c 100644 --- a/simvue/api/objects/tag.py +++ b/simvue/api/objects/tag.py @@ -1,6 +1,6 @@ import pydantic.color import typing -from .base import SimvueObject, dynamic_property +from .base import SimvueObject, staging_check class Tag(SimvueObject): @@ -12,7 +12,7 @@ def new( name: str, description: str | None = None, color: pydantic.color.Color | None = None, - ) -> typing.Self: + ): """Create a new Tag on the Simvue server""" _data: dict[str, typing.Any] = {"name": name} if description: @@ -23,7 +23,8 @@ def new( _tag._post(**_data) return _tag - @dynamic_property + @property + @staging_check def name(self) -> str: try: return self._get()["name"] @@ -35,7 +36,8 @@ def name(self) -> str: def name(self, name: str) -> None: self._staging["name"] = name - @dynamic_property + @property + @staging_check def color(self) -> pydantic.color.RGBA: try: _color: str = self._get()["colour"] @@ -48,7 +50,8 @@ def color(self) -> pydantic.color.RGBA: def color(self, color: pydantic.color.Color) -> None: self._staging["colour"] = color.as_hex() - @dynamic_property + @property + @staging_check def description(self) -> str: try: return self._get()["description"] diff --git a/simvue/models.py b/simvue/models.py index 235acd04..2e47b440 100644 --- a/simvue/models.py +++ b/simvue/models.py @@ -4,6 +4,7 @@ FOLDER_REGEX: str = r"^/.*" NAME_REGEX: str = r"^[a-zA-Z0-9\-\_\s\/\.:]+$" METRIC_KEY_REGEX: str = r"^[a-zA-Z0-9\-\_\s\/\.:=><]+$" +DATETIME_FORMAT: str = "%Y-%m-%d %H:%M:%S.%f" MetadataKeyString = Annotated[str, StringConstraints(pattern=r"^[\w\-\s\.]+$")] TagString = Annotated[str, StringConstraints(pattern=r"^[\w\-\s\.]+$")] diff --git a/tests/unit/test_alert.py b/tests/unit/test_alert.py index 4b3538a7..dde990a1 100644 --- a/tests/unit/test_alert.py +++ b/tests/unit/test_alert.py @@ -2,7 +2,7 @@ import pytest import uuid -from simvue.api.objects import SimvueAlert, MetricsRangeAlert, MetricsThresholdAlert, EventsAlert +from simvue.api.objects import Alert, MetricsRangeAlert, MetricsThresholdAlert, EventsAlert @pytest.mark.api def test_event_alert_creation() -> None: @@ -29,7 +29,7 @@ def test_event_alert_modification() -> None: notification="none" ) time.sleep(1) - _new_alert = SimvueAlert(_alert.id) + _new_alert = Alert(_alert.id) assert isinstance(_new_alert, EventsAlert) _new_alert.description = "updated!" assert _new_alert.description != "updated!" diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index 1dded7a9..50ca9008 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -29,6 +29,7 @@ def test_folder_modification() -> None: _folder_new.tags = _tags _folder_new.description = _description _folder_new.visibility.tenant = True + _folder_new.commit() assert _folder_new.tags == _tags assert _folder.tags == _tags assert _folder_new.description == _description diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py new file mode 100644 index 00000000..49bac8d0 --- /dev/null +++ b/tests/unit/test_run.py @@ -0,0 +1,43 @@ +import pytest +import time +import datetime +import uuid + +from simvue.api.objects import Run, Folder + +@pytest.mark.api +def test_run_creation() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(folder=_folder_name) + assert _run.folder == _folder_name + _run.delete() + _folder.delete() + + +@pytest.mark.api +def test_run_modification() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(folder=_folder_name) + assert _run.folder == _folder_name + time.sleep(1) + _now = datetime.datetime.now() + _new_run = Run(identifier=_run.id) + _new_run.name = "simvue_test_run" + _new_run.description = "Simvue test run" + _new_run.created = _now + _new_run.tags = ["simvue", "test", "tag"] + _new_run.ttl = 120 + assert _new_run.ttl != 120 + _new_run.commit() + assert _new_run.ttl == 120 + assert _new_run.description == "Simvue test run" + assert _new_run.created == _now + assert sorted(_new_run.tags) == sorted(["simvue", "test", "tag"]) + assert _new_run.name == "simvue_test_run" + _run.delete() + _folder.delete() + From 9cd7f90ec28582d2623cbb3b6baf86fee66c16d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 13 Nov 2024 08:34:03 +0000 Subject: [PATCH 005/163] Added artifact object and committing to creation --- simvue/api/objects/__init__.py | 1 + simvue/api/objects/artifact.py | 98 ++++++++++++++++++++++++++++++++++ simvue/api/objects/base.py | 31 ++++++++--- simvue/api/objects/folder.py | 3 -- simvue/utilities.py | 18 ++++++- tests/unit/test_alert.py | 2 + tests/unit/test_artifact.py | 34 ++++++++++++ tests/unit/test_folder.py | 2 + tests/unit/test_run.py | 4 ++ tests/unit/test_tag.py | 3 ++ 10 files changed, 186 insertions(+), 10 deletions(-) create mode 100644 simvue/api/objects/artifact.py create mode 100644 tests/unit/test_artifact.py diff --git a/simvue/api/objects/__init__.py b/simvue/api/objects/__init__.py index e2032e23..1deb7774 100644 --- a/simvue/api/objects/__init__.py +++ b/simvue/api/objects/__init__.py @@ -4,6 +4,7 @@ MetricsThresholdAlert as MetricsThresholdAlert, MetricsRangeAlert as MetricsRangeAlert, ) +from .artifact import Artifact as Artifact from .run import Run as Run from .tag import Tag as Tag from .folder import Folder as Folder diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py new file mode 100644 index 00000000..30cba2ba --- /dev/null +++ b/simvue/api/objects/artifact.py @@ -0,0 +1,98 @@ +import typing +import os.path +import pydantic + +from simvue.models import NAME_REGEX +from simvue.utilities import get_mimetype_for_file, get_mimetypes, calculate_sha256 +from .base import SimvueObject + +Category = typing.Literal["code", "input", "output"] + + +class Artifact(SimvueObject): + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + run: str, + storage: str | None, + category: Category, + file_path: pydantic.FilePath, + file_type: str | None, + ): + _file_type = file_type or get_mimetype_for_file(file_path) + + if _file_type not in get_mimetypes(): + raise ValueError(f"Invalid MIME type '{file_type}' specified") + + _file_size = file_path.stat().st_size + _file_orig_path = file_path.expanduser().absolute() + _file_checksum = calculate_sha256(f"{file_path}", is_file=True) + + _artifact = Artifact() + _artifact._post( + name=name, + run=run, + storage=storage, + category=category, + originalPath=os.path.expandvars(_file_orig_path), + size=_file_size, + type=_file_type, + checksum=_file_checksum, + ) + + @property + def name(self) -> str: + try: + return self._get()["name"] + except KeyError as e: + raise RuntimeError( + f"Expected value for 'name' for artifact '{self._identifier}'" + ) from e + + @property + def checksum(self) -> str: + try: + return self._get()["checksum"] + except KeyError as e: + raise RuntimeError( + f"Expected value for 'checksum' for artifact '{self._identifier}'" + ) from e + + @property + def category(self) -> Category: + try: + return self._get()["category"] + except KeyError as e: + raise RuntimeError( + f"Expected value for 'category' for artifact '{self._identifier}'" + ) from e + + @property + def original_path(self) -> str: + try: + return self._get()["originalPath"] + except KeyError as e: + raise RuntimeError( + f"Expected value for 'originalPath' for artifact '{self._identifier}'" + ) from e + + @property + def storage(self) -> str: + try: + return self._get()["storage"] + except KeyError as e: + raise RuntimeError( + f"Expected value for 'storage' for artifact '{self._identifier}'" + ) from e + + @property + def type(self) -> str: + try: + return self._get()["type"] + except KeyError as e: + raise RuntimeError( + f"Expected value for 'type' for artifact '{self._identifier}'" + ) from e diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index fdb007c5..b33ec04f 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -12,6 +12,7 @@ import http from codecarbon.external.logger import logging +from codecarbon.output_methods.emissions_data import json from simvue.config.user import SimvueConfiguration from simvue.version import __version__ @@ -86,8 +87,15 @@ def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: self._logger = logging.getLogger(f"simvue.{self.__class__.__name__}") self._label: str = getattr(self, "_label", self.__class__.__name__.lower()) self._identifier: typing.Optional[str] = identifier - self._user_config = SimvueConfiguration.fetch(**kwargs) - self._staging: dict[str, typing.Any] = {} + + _config_args = { + "server_url": kwargs.pop("server_url", None), + "server_token": kwargs.pop("server_token", None), + } + + self._user_config = SimvueConfiguration.fetch(**_config_args) + + self._staging: dict[str, typing.Any] = kwargs self._headers: dict[str, str] = { "Authorization": f"Bearer {self._user_config.server.token}", "User-Agent": f"Simvue Python client {__version__}", @@ -100,14 +108,17 @@ def _get_visibility(self) -> dict[str, bool | list[str]]: @classmethod def new(cls, **kwargs): - _obj = SimvueObject() - _obj._post(**kwargs) - return _obj + return SimvueObject(**kwargs) def commit(self) -> None: if not self._staging: return - self._put(**self._staging) + + # Initial commit is creation of object + if not self._identifier: + self._post(**self._staging) + else: + self._put(**self._staging) @property def id(self) -> typing.Optional[str]: @@ -213,3 +224,11 @@ def _get(self) -> dict[str, typing.Any]: f"but got '{type(_json_response)}'" ) return _json_response + + def cache(self, output_file: pathlib.Path) -> None: + if not (_dir := output_file.parent).exists(): + raise FileNotFoundError( + f"Cannot write {self._label} to '{_dir}', not a directory." + ) + with output_file.open("w", encoding="utf-8") as out_f: + json.dump(self._staging, out_f, indent=2) diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index b0819e4f..01f52e76 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -54,9 +54,6 @@ def new(cls, *, path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX) @staging_check def tags(self) -> list[str]: """Return list of tags assigned to this folder""" - if self._staging.get("tags"): - self._logger.warning("Uncommitted changes found for attribute 'tags'") - return self._staging["tags"] try: return self._get()["tags"] except KeyError as e: diff --git a/simvue/utilities.py b/simvue/utilities.py index 2159a666..d87e8d67 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -3,6 +3,7 @@ import logging import json import sys +import mimetypes import tabulate import pydantic import importlib.util @@ -307,7 +308,7 @@ def prepare_for_api(data_in, all=True): return data -def calculate_sha256(filename: str, is_file: bool) -> typing.Optional[str]: +def calculate_sha256(filename: str | typing.Any, is_file: bool) -> typing.Optional[str]: """ Calculate sha256 checksum of the specified file """ @@ -378,3 +379,18 @@ def simvue_timestamp(date_time: typing.Optional[datetime.datetime] = None) -> st if not date_time: date_time = datetime.datetime.now(timezone.utc) return date_time.strftime("%Y-%m-%d %H:%M:%S.%f") + + +@functools.lru_cache +def get_mimetypes() -> list[str]: + """Returns a list of allowed MIME types""" + mimetypes.init() + _valid_mimetypes = ["application/vnd.plotly.v1+json"] + _valid_mimetypes += list(mimetypes.types_map.values()) + return _valid_mimetypes + + +def get_mimetype_for_file(file_path: pathlib.Path) -> str: + """Return MIME type for the given file""" + _guess, *_ = mimetypes.guess_type(file_path) + return _guess or "application/octet-stream" diff --git a/tests/unit/test_alert.py b/tests/unit/test_alert.py index dde990a1..abdbe4dd 100644 --- a/tests/unit/test_alert.py +++ b/tests/unit/test_alert.py @@ -13,6 +13,7 @@ def test_event_alert_creation() -> None: pattern="completed", notification="none" ) + _alert.commit() assert _alert.alert.frequency == 1 assert _alert.alert.pattern == "completed" assert _alert.name == f"events_alert_{_uuid}" @@ -28,6 +29,7 @@ def test_event_alert_modification() -> None: pattern="completed", notification="none" ) + _alert.commit() time.sleep(1) _new_alert = Alert(_alert.id) assert isinstance(_new_alert, EventsAlert) diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py new file mode 100644 index 00000000..0bf475e2 --- /dev/null +++ b/tests/unit/test_artifact.py @@ -0,0 +1,34 @@ +import pytest +import uuid +import time +import pathlib +import tempfile + +from simvue.api.objects import Artifact, Run +from simvue.api.objects.folder import Folder + +@pytest.mark.api +def test_artifact_creation() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(folder=_folder_name) + + with tempfile.NamedTemporaryFile(suffix=".txt") as temp_f: + _path = pathlib.Path(temp_f.name) + with _path.open("w") as out_f: + out_f.write("Hello World!") + _artifact = Artifact.new( + name=f"test_artifact_{_uuid}", + run=_run.id, + file_path=_path, + category="input", + storage=None, + file_type=None + ) + _artifact.commit() + time.sleep(1) + assert _artifact.name == f"test_artifact_{_uuid}" + _artifact.delete() + _run.delete() + _folder.delete() diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index 50ca9008..3492d71e 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -10,6 +10,7 @@ def test_folder_creation() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _path = f"/simvue_unit_testing/objects/folder/{_uuid}" _folder = Folder.new(path=_path) + _folder.commit() assert _folder.id assert _folder.path == _path assert not _folder.visibility.public @@ -24,6 +25,7 @@ def test_folder_modification() -> None: _description = "Test study" _tags = ["testing", "api"] _folder = Folder.new(path=_path) + _folder.commit() time.sleep(1) _folder_new = Folder(identifier=_folder.id) _folder_new.tags = _tags diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index 49bac8d0..8cd7296d 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -11,6 +11,8 @@ def test_run_creation() -> None: _folder_name = f"/simvue_unit_testing/{_uuid}" _folder = Folder.new(path=_folder_name) _run = Run.new(folder=_folder_name) + _folder.commit() + _run.commit() assert _run.folder == _folder_name _run.delete() _folder.delete() @@ -22,6 +24,8 @@ def test_run_modification() -> None: _folder_name = f"/simvue_unit_testing/{_uuid}" _folder = Folder.new(path=_folder_name) _run = Run.new(folder=_folder_name) + _folder.commit() + _run.commit() assert _run.folder == _folder_name time.sleep(1) _now = datetime.datetime.now() diff --git a/tests/unit/test_tag.py b/tests/unit/test_tag.py index f3d6d617..a67e9130 100644 --- a/tests/unit/test_tag.py +++ b/tests/unit/test_tag.py @@ -7,6 +7,7 @@ def test_tag_creation() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tag = Tag.new(name=f"test_tag_{_uuid}") + _tag.commit() assert _tag.name == f"test_tag_{_uuid}" assert _tag.color assert not _tag.description @@ -16,11 +17,13 @@ def test_tag_creation() -> None: def test_tag_modification() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tag = Tag.new(name=f"test_tag_{_uuid}") + _tag.commit() time.sleep(1) _new_tag = Tag(_tag.id) _new_tag.name = _tag.name.replace("test", "test_modified") _new_tag.color = "rgb({r}, {g}, {b})".format(r=250, g=0, b=0) _new_tag.description = "modified test tag" + _new_tag.commit() assert _new_tag.name == f"test_modified_tag_{_uuid}" assert _new_tag.color.r == 250 / 255 assert _new_tag.description == "modified test tag" From 52ca992e1c8f140644ac9284c280617faeb6a9f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 13 Nov 2024 15:23:55 +0000 Subject: [PATCH 006/163] Improved alert tests and added user alert --- simvue/api/objects/__init__.py | 2 + simvue/api/objects/alert/__init__.py | 9 +- simvue/api/objects/alert/base.py | 47 ++++------- simvue/api/objects/alert/events.py | 15 ++-- simvue/api/objects/alert/metrics.py | 119 +++++++++++++------------- simvue/api/objects/alert/user.py | 27 ++++++ simvue/api/objects/artifact.py | 50 +++-------- simvue/api/objects/base.py | 121 +++++++++++++++++++++++---- simvue/api/objects/folder.py | 32 +++---- simvue/api/objects/run.py | 95 ++++++--------------- simvue/api/objects/tag.py | 23 ++--- simvue/config/parameters.py | 5 -- simvue/config/user.py | 4 +- simvue/factory/proxy/offline.py | 7 +- simvue/run.py | 10 +-- simvue/sender.py | 17 ++-- tests/functional/test_config.py | 2 +- tests/unit/test_alert.py | 39 --------- tests/unit/test_artifact.py | 33 +++++++- tests/unit/test_folder.py | 50 ++++++++++- tests/unit/test_run.py | 58 ++++++++++++- tests/unit/test_tag.py | 41 ++++++++- 22 files changed, 475 insertions(+), 331 deletions(-) delete mode 100644 tests/unit/test_alert.py diff --git a/simvue/api/objects/__init__.py b/simvue/api/objects/__init__.py index 1deb7774..438e26e5 100644 --- a/simvue/api/objects/__init__.py +++ b/simvue/api/objects/__init__.py @@ -3,7 +3,9 @@ EventsAlert as EventsAlert, MetricsThresholdAlert as MetricsThresholdAlert, MetricsRangeAlert as MetricsRangeAlert, + UserAlert as UserAlert, ) +from .stats import Stats as Stats from .artifact import Artifact as Artifact from .run import Run as Run from .tag import Tag as Tag diff --git a/simvue/api/objects/alert/__init__.py b/simvue/api/objects/alert/__init__.py index e0911967..fb1c7349 100644 --- a/simvue/api/objects/alert/__init__.py +++ b/simvue/api/objects/alert/__init__.py @@ -1,5 +1,12 @@ from .fetch import Alert from .metrics import MetricsThresholdAlert, MetricsRangeAlert from .events import EventsAlert +from .user import UserAlert -__all__ = ["Alert", "MetricsRangeAlert", "MetricsThresholdAlert", "EventsAlert"] +__all__ = [ + "Alert", + "MetricsRangeAlert", + "MetricsThresholdAlert", + "EventsAlert", + "UserAlert", +] diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index dcced11d..988334bd 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -5,22 +5,25 @@ class AlertBase(SimvueObject): + @classmethod + def new(cls, offline: bool = False, **kwargs): + _alert = AlertBase(**kwargs) + _alert.offline_mode(offline) + return _alert + def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: self._label = "alert" super().__init__(identifier, **kwargs) def get_alert(self) -> dict[str, typing.Any]: try: - return self._get()["alert"] - except KeyError as e: - raise RuntimeError("Expected key 'alert' in alert retrieval") from e + return self._get_attribute("alert") + except AttributeError: + return {} @property def name(self) -> str: - try: - return self._get()["name"] - except KeyError as e: - raise RuntimeError("Expected key 'name' in alert retrieval") from e + return self._get_attribute("name") @name.setter @pydantic.validate_call @@ -32,10 +35,7 @@ def name( @property @staging_check def description(self) -> str | None: - try: - return self._get()["description"] - except KeyError as e: - raise RuntimeError("Expected key 'description' in alert retrieval") from e + return self._get_attribute("description") @description.setter @pydantic.validate_call @@ -45,10 +45,7 @@ def description(self, description: str | None) -> None: @property @staging_check def tags(self) -> list[str]: - try: - return self._get()["tags"] or [] - except KeyError as e: - raise RuntimeError("Expected key 'tags' in alert retrieval") from e + return self._get_attribute("tags") @tags.setter @pydantic.validate_call @@ -58,10 +55,7 @@ def tags(self, tags: list[str]) -> None: @property @staging_check def notification(self) -> typing.Literal["none", "email"]: - try: - return self._get()["notification"] - except KeyError as e: - raise RuntimeError("Expected key 'notification' in alert retrieval") from e + return self._get_attribute("notification") @notification.setter @pydantic.validate_call @@ -70,18 +64,12 @@ def notification(self, notification: typing.Literal["none", "email"]) -> None: @property def source(self) -> typing.Literal["events", "metrics", "user"]: - try: - return self._get()["source"] - except KeyError as e: - raise RuntimeError("Expected key 'source' in alert retrieval") from e + return self._get_attribute("source") @property @staging_check def enabled(self) -> bool: - try: - return self._get()["enabled"] - except KeyError as e: - raise RuntimeError("Expected key 'enabled' in alert retrieval") from e + return self._get_attribute("enabled") @enabled.setter @pydantic.validate_call @@ -91,10 +79,7 @@ def enabled(self, enabled: str) -> None: @property @staging_check def abort(self) -> bool: - try: - return self._get()["abort"] - except KeyError as e: - raise RuntimeError("Expected key 'abort' in alert retrieval") from e + return self._get_attribute("abort") @abort.setter @pydantic.validate_call diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index 443e51dd..0401864c 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -20,10 +20,10 @@ def new( frequency: pydantic.PositiveInt, enabled: bool = True, tags: list[str] | None = None, + offline: bool = False, ) -> typing.Self: - _alert = EventsAlert() _alert_definition = {"pattern": pattern, "frequency": frequency} - _alert._post( + _alert = EventsAlert( name=name, notification=notification, source="events", @@ -31,17 +31,18 @@ def new( enabled=enabled, tags=tags or [], ) + _alert.offline_mode(offline) return _alert class EventAlertDefinition: def __init__(self, alert: EventsAlert) -> None: - self._alert = alert + self._sv_obj = alert @property def pattern(self) -> str: try: - return self._alert.get_alert()["pattern"] + return self._sv_obj.get_alert()["pattern"] except KeyError as e: raise RuntimeError( "Expected key 'pattern' in alert definition retrieval" @@ -51,7 +52,7 @@ def pattern(self) -> str: @staging_check def frequency(self) -> int: try: - return self._alert.get_alert()["frequency"] + return self._sv_obj.get_alert()["frequency"] except KeyError as e: raise RuntimeError( "Expected key 'frequency' in alert definition retrieval" @@ -60,5 +61,5 @@ def frequency(self) -> int: @frequency.setter @pydantic.validate_call def frequency(self, frequency: int) -> None: - _alert = self._alert.get_alert() | {"frequency": frequency} - self._alert._staging["alert"] = _alert + _alert = self._sv_obj.get_alert() | {"frequency": frequency} + self._sv_obj._staging["alert"] = _alert diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index 5678ce6d..c84d48e5 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -7,45 +7,7 @@ Rule = typing.Literal["is above", "is below", "is inside range", "is outside range"] -class MetricsAlert(AlertBase): - @property - def aggregation(self) -> Aggregate: - if not (_aggregation := self.alert.get_alert().get("aggregation")): - raise RuntimeError( - "Expected key 'aggregation' in alert definition retrieval" - ) - return _aggregation - - @property - def rule(self) -> Rule: - if not (_rule := self.alert.get_alert().get("rule")): - raise RuntimeError("Expected key 'rule' in alert definition retrieval") - return _rule - - @property - def window(self) -> int: - if not (_window := self.alert.get_alert().get("window")): - raise RuntimeError("Expected key 'window' in alert definition retrieval") - return _window - - @property - @staging_check - def frequency(self) -> int: - try: - return self.alert.get_alert()["frequency"] - except KeyError as e: - raise RuntimeError( - "Expected key 'frequency' in alert definition retrieval" - ) from e - - @frequency.setter - @pydantic.validate_call - def frequency(self, frequency: int) -> None: - _alert = self._alert.get_alert() | {"frequency": frequency} - self._alert._staging["alert"] = _alert - - -class MetricsThresholdAlert(MetricsAlert): +class MetricsThresholdAlert(AlertBase): def __init__(self, identifier: str | None = None, **kwargs) -> None: self.alert = MetricThresholdAlertDefinition(self) super().__init__(identifier, **kwargs) @@ -56,6 +18,7 @@ def new( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + metric: str, notification: typing.Literal["none", "email"], aggregation: Aggregate, rule: Rule, @@ -64,28 +27,30 @@ def new( frequency: pydantic.PositiveInt, enabled: bool = True, tags: list[str] | None = None, + offline: bool = False, ) -> typing.Self: - _alert = MetricsThresholdAlert() _alert_definition = { "rule": rule, "frequency": frequency, "window": window, + "metric": metric, "aggregation": aggregation, "threshold": threshold, } - _alert._post( + _alert = MetricsThresholdAlert( name=name, notification=notification, - source="events", + source="metrics", alert=_alert_definition, enabled=enabled, tags=tags or [], ) + _alert.offline_mode(offline) return _alert class MetricsRangeAlert(AlertBase): - def __init__(self, identifier: str, **kwargs) -> None: + def __init__(self, identifier: str | None = None, **kwargs) -> None: self.alert = MetricRangeAlertDefinition(self) super().__init__(identifier, **kwargs) @@ -95,6 +60,7 @@ def new( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + metric: str, notification: typing.Literal["none", "email"], aggregation: Aggregate, rule: Rule, @@ -104,54 +70,91 @@ def new( frequency: pydantic.PositiveInt, enabled: bool = True, tags: list[str] | None = None, + offline: bool = False, ) -> typing.Self: if range_low >= range_high: raise ValueError(f"Invalid arguments for range [{range_low}, {range_high}]") - _alert = MetricsThresholdAlert() _alert_definition = { "rule": rule, "frequency": frequency, "window": window, + "metric": metric, "aggregation": aggregation, "range_low": range_low, "range_high": range_high, } - _alert._post( + _alert = MetricsThresholdAlert( name=name, notification=notification, - source="events", - alert=_alert_definition, - enabled=enabled, + source="metrics", tags=tags or [], + enabled=enabled, + alert=_alert_definition, ) + _alert.offline_mode(offline) return _alert -class MetricThresholdAlertDefinition: - def __init__(self, alert: MetricsThresholdAlert) -> None: - self.alert = alert +class MetricsAlertDefinition: + def __init__(self, alert: MetricsRangeAlert) -> None: + self._sv_obj = alert + + @property + def aggregation(self) -> Aggregate: + if not (_aggregation := self._sv_obj.get_alert().get("aggregation")): + raise RuntimeError( + "Expected key 'aggregation' in alert definition retrieval" + ) + return _aggregation + + @property + def rule(self) -> Rule: + if not (_rule := self._sv_obj.get_alert().get("rule")): + raise RuntimeError("Expected key 'rule' in alert definition retrieval") + return _rule + @property + def window(self) -> int: + if not (_window := self._sv_obj.get_alert().get("window")): + raise RuntimeError("Expected key 'window' in alert definition retrieval") + return _window + + @property + @staging_check + def frequency(self) -> int: + try: + return self._sv_obj.get_alert()["frequency"] + except KeyError as e: + raise RuntimeError( + "Expected key 'frequency' in alert definition retrieval" + ) from e + + @frequency.setter + @pydantic.validate_call + def frequency(self, frequency: int) -> None: + _alert = self._sv_obj.get_alert() | {"frequency": frequency} + self._sv_obj._staging["alert"] = _alert + + +class MetricThresholdAlertDefinition(MetricsAlertDefinition): @property def threshold(self) -> float: - if not (threshold_l := self.alert.get_alert().get("threshold")): + if not (threshold_l := self._sv_obj.get_alert().get("threshold")): raise RuntimeError("Expected key 'threshold' in alert definition retrieval") return threshold_l -class MetricRangeAlertDefinition: - def __init__(self, alert: MetricsRangeAlert) -> None: - self.alert = alert - +class MetricRangeAlertDefinition(MetricsAlertDefinition): @property def range_low(self) -> float: - if not (range_l := self.alert.get_alert().get("range_low")): + if not (range_l := self._sv_obj.get_alert().get("range_low")): raise RuntimeError("Expected key 'range_low' in alert definition retrieval") return range_l @property def range_high(self) -> float: - if not (range_u := self.alert.get_alert().get("range_high")): + if not (range_u := self._sv_obj.get_alert().get("range_high")): raise RuntimeError( "Expected key 'range_high' in alert definition retrieval" ) diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index e69de29b..95f2399a 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -0,0 +1,27 @@ +import pydantic +import typing +from .base import AlertBase +from simvue.models import NAME_REGEX + + +class UserAlert(AlertBase): + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + notification: typing.Literal["none", "email"], + enabled: bool = True, + tags: list[str] | None = None, + offline: bool = False, + ) -> typing.Self: + _alert = UserAlert( + name=name, + notification=notification, + source="user", + enabled=enabled, + tags=tags or [], + ) + _alert.offline_mode(offline) + return _alert diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 30cba2ba..839139f0 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -8,6 +8,8 @@ Category = typing.Literal["code", "input", "output"] +__all__ = ["Artifact"] + class Artifact(SimvueObject): @classmethod @@ -21,6 +23,7 @@ def new( category: Category, file_path: pydantic.FilePath, file_type: str | None, + offline: bool = False, ): _file_type = file_type or get_mimetype_for_file(file_path) @@ -31,8 +34,7 @@ def new( _file_orig_path = file_path.expanduser().absolute() _file_checksum = calculate_sha256(f"{file_path}", is_file=True) - _artifact = Artifact() - _artifact._post( + _artifact = Artifact( name=name, run=run, storage=storage, @@ -42,57 +44,29 @@ def new( type=_file_type, checksum=_file_checksum, ) + _artifact.offline_mode(offline) + return _artifact @property def name(self) -> str: - try: - return self._get()["name"] - except KeyError as e: - raise RuntimeError( - f"Expected value for 'name' for artifact '{self._identifier}'" - ) from e + return self._get_attribute("name") @property def checksum(self) -> str: - try: - return self._get()["checksum"] - except KeyError as e: - raise RuntimeError( - f"Expected value for 'checksum' for artifact '{self._identifier}'" - ) from e + return self._get_attribute("checksum") @property def category(self) -> Category: - try: - return self._get()["category"] - except KeyError as e: - raise RuntimeError( - f"Expected value for 'category' for artifact '{self._identifier}'" - ) from e + return self._get_attribute("category") @property def original_path(self) -> str: - try: - return self._get()["originalPath"] - except KeyError as e: - raise RuntimeError( - f"Expected value for 'originalPath' for artifact '{self._identifier}'" - ) from e + return self._get_attribute("originalPath") @property def storage(self) -> str: - try: - return self._get()["storage"] - except KeyError as e: - raise RuntimeError( - f"Expected value for 'storage' for artifact '{self._identifier}'" - ) from e + return self._get_attribute("storage") @property def type(self) -> str: - try: - return self._get()["type"] - except KeyError as e: - raise RuntimeError( - f"Expected value for 'type' for artifact '{self._identifier}'" - ) from e + return self._get_attribute("type") diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index b33ec04f..2a2e472d 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -8,6 +8,7 @@ import abc import pathlib import typing +import uuid import boltons.urlutils as bo_url import http @@ -86,7 +87,12 @@ class SimvueObject(abc.ABC): def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: self._logger = logging.getLogger(f"simvue.{self.__class__.__name__}") self._label: str = getattr(self, "_label", self.__class__.__name__.lower()) - self._identifier: typing.Optional[str] = identifier + self._identifier: typing.Optional[str] = ( + identifier if identifier is not None else f"offline_{uuid.uuid1()}" + ) + self._offline: bool = identifier is not None and identifier.startswith( + "offline_" + ) _config_args = { "server_url": kwargs.pop("server_url", None), @@ -94,32 +100,88 @@ def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: } self._user_config = SimvueConfiguration.fetch(**_config_args) + self._local_staging_file: pathlib.Path = ( + self._user_config.offline.cache.joinpath("staging.json") + ) + + # Recover any locally staged changes + self._staging: dict[str, typing.Any] = self._get_local_staged() | kwargs - self._staging: dict[str, typing.Any] = kwargs self._headers: dict[str, str] = { "Authorization": f"Bearer {self._user_config.server.token}", "User-Agent": f"Simvue Python client {__version__}", } + def _get_local_staged(self) -> dict[str, typing.Any]: + """Retrieve any locally staged data for this identifier""" + if not self._local_staging_file.exists() or not self._identifier: + return {} + + with self._local_staging_file.open() as in_f: + _staged_data = json.load(in_f) + + return _staged_data.get(self._label, {}).get(self._identifier, {}) + + def _get_attribute(self, attribute: str) -> typing.Any: + try: + return self._get()[attribute] + except KeyError as e: + if self._offline: + raise AttributeError( + f"A value for attribute '{attribute}' has " + f"not yet been committed for offline {self._label} '{self._identifier}'" + ) from e + raise RuntimeError( + f"Expected key '{attribute}' for {self._label} '{self._identifier}'" + ) from e + + def _clear_staging(self) -> None: + self._staging = {} + + if not self._local_staging_file.exists(): + return + + with self._local_staging_file.open() as in_f: + _staged_data = json.load(in_f) + + if _staged_data.get(self._label): + _staged_data[self._label].pop(self._identifier, None) + + with self._local_staging_file.open("w") as out_f: + json.dump(_staged_data, out_f, indent=2) + + def offline_mode(self, is_true: bool) -> None: + self._offline = is_true + def _get_visibility(self) -> dict[str, bool | list[str]]: - if not (visibility := self._get().get("visibility")): - raise RuntimeError("Expected key 'visibility' in response") - return visibility + try: + return self._get_attribute("visibility") + except AttributeError: + return {} - @classmethod - def new(cls, **kwargs): - return SimvueObject(**kwargs) + @abc.abstractclassmethod + def new(cls, offline: bool = False, **kwargs): + pass def commit(self) -> None: if not self._staging: return + if self._offline: + _offline_dir: pathlib.Path = self._user_config.offline.cache + _offline_file = _offline_dir.joinpath("staging.json") + self._cache() + return + # Initial commit is creation of object - if not self._identifier: + if not self._identifier or self._identifier.startswith("offline_"): self._post(**self._staging) else: self._put(**self._staging) + # Clear staged changes + self._clear_staging() + @property def id(self) -> typing.Optional[str]: return self._identifier @@ -136,7 +198,7 @@ def _base_url(self) -> str: @property def url(self) -> typing.Optional[str]: - if not self._identifier: + if self._identifier is None: return None _url = bo_url.URL(self._user_config.server.url) _url.path = f"{self._url_path / self._identifier}" @@ -186,6 +248,18 @@ def _put(self, **kwargs) -> dict[str, typing.Any]: return _json_response def delete(self) -> dict[str, typing.Any]: + if self._get_local_staged(): + with self._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + _local_data[self._label].pop(self._identifier, None) + + with self._local_staging_file.open("w") as out_f: + json.dump(_local_data, out_f, indent=2) + + if self._offline: + return {"id": self._identifier} + if not self.url: raise RuntimeError( f"Identifier for instance of {self.__class__.__name__} Unknown" @@ -206,6 +280,9 @@ def delete(self) -> dict[str, typing.Any]: return _json_response def _get(self) -> dict[str, typing.Any]: + if self._offline: + return self._get_local_staged() + if not self.url: raise RuntimeError( f"Identifier for instance of {self.__class__.__name__} Unknown" @@ -225,10 +302,20 @@ def _get(self) -> dict[str, typing.Any]: ) return _json_response - def cache(self, output_file: pathlib.Path) -> None: - if not (_dir := output_file.parent).exists(): - raise FileNotFoundError( - f"Cannot write {self._label} to '{_dir}', not a directory." - ) - with output_file.open("w", encoding="utf-8") as out_f: - json.dump(self._staging, out_f, indent=2) + def _cache(self) -> None: + if not (_dir := self._local_staging_file.parent).exists(): + _dir.mkdir() + + _local_data: dict[str, typing.Any] = {} + + if self._local_staging_file.exists(): + with self._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + if not _local_data.get(self._label): + _local_data[self._label] = {} + + _local_data[self._label][self._identifier] = self._staging + + with self._local_staging_file.open("w", encoding="utf-8") as out_f: + json.dump(_local_data, out_f, indent=2) diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index 01f52e76..9611c875 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -44,22 +44,22 @@ def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: @classmethod @pydantic.validate_call - def new(cls, *, path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)]): + def new( + cls, + *, + path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], + offline: bool = False, + ): """Create a new Folder on the Simvue server with the given path""" - _folder = Folder() - _folder._post(path=path) + _folder = Folder(path=path) + _folder.offline_mode(offline) return _folder @property @staging_check def tags(self) -> list[str]: """Return list of tags assigned to this folder""" - try: - return self._get()["tags"] - except KeyError as e: - raise RuntimeError( - f"Expected value for 'tags' for folder '{self._identifier}'" - ) from e + return self._get_attribute("tags") @tags.setter @pydantic.validate_call @@ -70,12 +70,7 @@ def tags(self, tags: list[str]) -> None: @property def path(self) -> pathlib.Path: """Return the path of this folder""" - try: - return self._get()["path"] - except KeyError as e: - raise RuntimeError( - f"Expected value for 'path' for folder '{self._identifier}'" - ) from e + return self._get_attribute("path") @property @staging_check @@ -117,12 +112,7 @@ def star(self, is_true: bool = True) -> None: @staging_check def ttl(self) -> int: """Return the retention period for this folder""" - try: - return self._get()["ttl"] - except KeyError as e: - raise RuntimeError( - f"Expected value for 'ttl' for folder '{self._identifier}'" - ) from e + return self._get_attribute("ttl") @ttl.setter @pydantic.validate_call diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 6a80493b..fdec72d3 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -8,6 +8,8 @@ "lost", "failed", "completed", "terminated", "running", "created" ] +__all__ = ["Run"] + class Run(SimvueObject): def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: @@ -33,21 +35,17 @@ def new( cls, *, folder: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], + offline: bool = False, ): """Create a new Folder on the Simvue server with the given path""" - _run = Run() - _run._post(folder=folder, system=None, status="created") + _run = Run(folder=folder, system=None, status="created") + _run.offline_mode(offline) return _run @property @staging_check def name(self) -> str: - try: - return self._get()["name"] - except KeyError as e: - raise RuntimeError( - f"Expected value for 'name' for run '{self._identifier}'" - ) from e + return self._get_attribute("name") @name.setter @pydantic.validate_call @@ -59,12 +57,7 @@ def name( @property @staging_check def tags(self) -> list[str]: - try: - return self._get()["tags"] - except KeyError as e: - raise RuntimeError( - f"Expected value for 'tags' for run '{self._identifier}'" - ) from e + return self._get_attribute("tags") @tags.setter @pydantic.validate_call @@ -74,11 +67,7 @@ def tags(self, tags: list[str]) -> None: @property @staging_check def status(self) -> Status: - if not (_status := self._get().get("status")): - raise RuntimeError( - f"Expected value for 'status' for run '{self._identifier}'" - ) - return _status + return self._get_attribute("status") @status.setter @pydantic.validate_call @@ -89,12 +78,7 @@ def status(self, status: Status) -> None: @staging_check def ttl(self) -> int: """Return the retention period for this run""" - try: - return self._get()["ttl"] - except KeyError as e: - raise RuntimeError( - f"Expected value for 'ttl' for run '{self._identifier}'" - ) from e + return self._get_attribute("ttl") @ttl.setter @pydantic.validate_call @@ -105,11 +89,7 @@ def ttl(self, time_seconds: int) -> None: @property @staging_check def folder(self) -> str: - if not (_folder := self._get().get("folder")): - raise RuntimeError( - f"Expected value for 'folder' for run '{self._identifier}'" - ) - return _folder + return self._get_attribute("folder") @folder.setter @pydantic.validate_call @@ -121,11 +101,7 @@ def folder( @property @staging_check def metadata(self) -> dict[str, typing.Any]: - if not (_metadata := self._get().get("metadata")): - raise RuntimeError( - f"Expected value for 'metadata' for run '{self._identifier}'" - ) - return _metadata + return self._get_attribute("metadata") @metadata.setter @pydantic.validate_call @@ -135,11 +111,7 @@ def metadata(self, metadata: dict[str, typing.Any]) -> None: @property @staging_check def description(self) -> str: - if not (_description := self._get().get("description")): - raise RuntimeError( - f"Expected value for 'description' for run '{self._identifier}'" - ) - return _description + return self._get_attribute("description") @description.setter @pydantic.validate_call @@ -148,20 +120,12 @@ def description(self, description: str) -> None: @property def system(self) -> dict[str, typing.Any]: - if not (_system := self._get().get("system")): - raise RuntimeError( - f"Expected value for 'descriptio' for run '{self._identifier}'" - ) - return _system + return self._get_attribute("system") @property @staging_check def heartbeat_timeout(self) -> int: - if not (_heartbeat_timeout := self._get().get("heartbeat_timeout")): - raise RuntimeError( - f"Expected value for 'heartbeat_timeout' for run '{self._identifier}'" - ) - return _heartbeat_timeout + return self._get_attribute("heartbeat_timeout") @heartbeat_timeout.setter @pydantic.validate_call @@ -171,10 +135,7 @@ def heartbeat_timeout(self, time_seconds: int) -> None: @property @staging_check def notifications(self) -> typing.Literal["none", "email"]: - try: - return self._get()["notifications"] - except KeyError as e: - raise RuntimeError("Expected key 'notifications' in alert retrieval") from e + return self._get_attribute("notifications") @notifications.setter @pydantic.validate_call @@ -184,10 +145,7 @@ def notifications(self, notifications: typing.Literal["none", "email"]) -> None: @property @staging_check def alerts(self) -> list[str]: - try: - return self._get()["alerts"] - except KeyError as e: - raise RuntimeError("Expected key 'alerts' in alert retrieval") from e + return self._get_attribute("alerts") @alerts.setter @pydantic.validate_call @@ -197,10 +155,9 @@ def alerts(self, alerts: list[str]) -> None: @property @staging_check def created(self) -> datetime.datetime: - try: - return datetime.datetime.strptime(self._get()["created"], DATETIME_FORMAT) - except KeyError as e: - raise RuntimeError("Expected key 'created' in alert retrieval") from e + return datetime.datetime.strptime( + self._get_attribute("created"), DATETIME_FORMAT + ) @created.setter @pydantic.validate_call @@ -210,10 +167,9 @@ def created(self, created: datetime.datetime) -> None: @property @staging_check def started(self) -> datetime.datetime: - try: - return datetime.datetime.strptime(self._get()["started"], DATETIME_FORMAT) - except KeyError as e: - raise RuntimeError("Expected key 'started' in alert retrieval") from e + return datetime.datetime.strptime( + self._get_attribute("started"), DATETIME_FORMAT + ) @started.setter @pydantic.validate_call @@ -223,10 +179,9 @@ def started(self, started: datetime.datetime) -> None: @property @staging_check def endtime(self) -> datetime.datetime: - try: - return datetime.datetime.strptime(self._get()["endtime"], DATETIME_FORMAT) - except KeyError as e: - raise RuntimeError("Expected key 'endtime' in alert retrieval") from e + return datetime.datetime.strptime( + self._get_attribute("endtime"), DATETIME_FORMAT + ) @endtime.setter @pydantic.validate_call diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py index 0381230c..c98051cb 100644 --- a/simvue/api/objects/tag.py +++ b/simvue/api/objects/tag.py @@ -2,6 +2,8 @@ import typing from .base import SimvueObject, staging_check +__all__ = ["Tag"] + class Tag(SimvueObject): @classmethod @@ -12,6 +14,7 @@ def new( name: str, description: str | None = None, color: pydantic.color.Color | None = None, + offline: bool = False, ): """Create a new Tag on the Simvue server""" _data: dict[str, typing.Any] = {"name": name} @@ -19,17 +22,14 @@ def new( _data["description"] = description if color: _data["description"] = color.as_hex() - _tag = Tag() - _tag._post(**_data) + _tag = Tag(**_data) + _tag.offline_mode(offline) return _tag @property @staging_check def name(self) -> str: - try: - return self._get()["name"] - except KeyError as e: - raise RuntimeError("Expected key 'name' in tag retrieval") from e + return self._get_attribute("name") @name.setter @pydantic.validate_call @@ -39,11 +39,7 @@ def name(self, name: str) -> None: @property @staging_check def color(self) -> pydantic.color.RGBA: - try: - _color: str = self._get()["colour"] - return pydantic.color.parse_str(_color) - except KeyError as e: - raise RuntimeError("Expected key 'colour' in tag retrieval") from e + return pydantic.color.parse_str(self._get_attribute("colour")) @color.setter @pydantic.validate_call @@ -53,10 +49,7 @@ def color(self, color: pydantic.color.Color) -> None: @property @staging_check def description(self) -> str: - try: - return self._get()["description"] - except KeyError as e: - raise RuntimeError("Expected key 'description' in tag retrieval") from e + return self._get_attribute("description") @description.setter @pydantic.validate_call diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index 020d0fad..43d72adf 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -77,11 +77,6 @@ def check_valid_server(cls, values: "ServerSpecifications") -> bool: class OfflineSpecifications(pydantic.BaseModel): cache: typing.Optional[pathlib.Path] = None - @pydantic.field_validator("cache") - @classmethod - def cache_to_str(cls, v: typing.Any) -> str: - return f"{v}" - class DefaultRunSpecifications(pydantic.BaseModel): name: typing.Optional[str] = None diff --git a/simvue/config/user.py b/simvue/config/user.py index c827a016..eacbacd3 100644 --- a/simvue/config/user.py +++ b/simvue/config/user.py @@ -50,12 +50,12 @@ def _load_pyproject_configs(cls) -> typing.Optional[dict]: ) if not _pyproject_toml: - return + return None _project_data = toml.load(_pyproject_toml) if not (_simvue_setup := _project_data.get("tool", {}).get("simvue")): - return + return None # Do not allow reading of authentication credentials within a project file _server_credentials = _simvue_setup.get("server", {}) diff --git a/simvue/factory/proxy/offline.py b/simvue/factory/proxy/offline.py index cda666c7..f6302639 100644 --- a/simvue/factory/proxy/offline.py +++ b/simvue/factory/proxy/offline.py @@ -16,9 +16,6 @@ skip_if_failed, ) -if typing.TYPE_CHECKING: - pass - logger = logging.getLogger(__name__) @@ -32,8 +29,8 @@ def __init__( ) -> None: super().__init__(name, uniq_id, suppress_errors) - _offline_dir = SimvueConfiguration.fetch().offline.cache - self._directory: str = os.path.join(_offline_dir, self._uuid) + _offline_dir: pathlib.Path = SimvueConfiguration.fetch().offline.cache + self._directory = f"{_offline_dir.joinpath(self._uuid)}" os.makedirs(self._directory, exist_ok=True) diff --git a/simvue/run.py b/simvue/run.py index cab03b8f..9400d94f 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -405,19 +405,19 @@ def _offline_dispatch_callback( run_id: typing.Optional[str] = self._id, uuid: str = self._uuid, ) -> None: - _offline_directory = self._user_config.offline.cache - if not os.path.exists(_offline_directory): + _offline_directory: pathlib.Path = self._user_config.offline.cache + if not _offline_directory.exists(): logger.error( f"Cannot write to offline directory '{_offline_directory}', directory not found." ) return - _directory = os.path.join(_offline_directory, uuid) + _directory = _offline_directory.joinpath(uuid) unique_id = time.time() - filename = f"{_directory}/{category}-{unique_id}" + filename = _directory.joinpath(f"{category}-{unique_id}") _data = {category: buffer, "run": run_id} try: - with open(filename, "w") as fh: + with filename.open("w") as fh: json.dump(_data, fh) except Exception as err: if self._suppress_errors: diff --git a/simvue/sender.py b/simvue/sender.py index 264a183c..10daeae3 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -1,6 +1,7 @@ from concurrent.futures import ThreadPoolExecutor import glob import json +import pathlib import typing import logging import os @@ -88,10 +89,10 @@ def sender() -> str: """ Asynchronous upload of runs to Simvue server """ - directory = SimvueConfiguration.fetch().offline.cache + directory: pathlib.Path = SimvueConfiguration.fetch().offline.cache # Clean up old runs after waiting 5 mins - runs = glob.glob(f"{directory}/*/sent") + runs = directory.glob("*/sent") for run in runs: id = run.split("/")[len(run.split("/")) - 2] @@ -99,17 +100,17 @@ def sender() -> str: if time.time() - os.path.getmtime(run) > 300: try: - shutil.rmtree(f"{directory}/{id}") + shutil.rmtree(f"{directory.joinpath(id)}") except Exception: logger.error("Got exception trying to cleanup run in directory %s", id) # Deal with runs in the created, running or a terminal state runs = ( - glob.glob(f"{directory}/*/created") - + glob.glob(f"{directory}/*/running") - + glob.glob(f"{directory}/*/completed") - + glob.glob(f"{directory}/*/failed") - + glob.glob(f"{directory}/*/terminated") + directory.glob("*/created") + + directory.glob("*/running") + + directory.glob("*/completed") + + directory.glob("*/failed") + + directory.glob("*/terminated") ) if len(runs) > MAX_RUNS: diff --git a/tests/functional/test_config.py b/tests/functional/test_config.py index ce889500..d4afa0c0 100644 --- a/tests/functional/test_config.py +++ b/tests/functional/test_config.py @@ -125,7 +125,7 @@ def _mocked_find(file_names: list[str], *_, ppt_file=_ppt_file, conf_file=_confi elif use_file and use_file != "pyproject.toml": assert _config.server.url == _url assert _config.server.token == _token - assert _config.offline.cache == temp_d + assert f"{_config.offline.cache}" == temp_d if use_file == "extended": assert _config.run.description == _description diff --git a/tests/unit/test_alert.py b/tests/unit/test_alert.py deleted file mode 100644 index abdbe4dd..00000000 --- a/tests/unit/test_alert.py +++ /dev/null @@ -1,39 +0,0 @@ -import time -import pytest -import uuid - -from simvue.api.objects import Alert, MetricsRangeAlert, MetricsThresholdAlert, EventsAlert - -@pytest.mark.api -def test_event_alert_creation() -> None: - _uuid: str = f"{uuid.uuid4()}".split("-")[0] - _alert = EventsAlert.new( - name=f"events_alert_{_uuid}", - frequency=1, - pattern="completed", - notification="none" - ) - _alert.commit() - assert _alert.alert.frequency == 1 - assert _alert.alert.pattern == "completed" - assert _alert.name == f"events_alert_{_uuid}" - assert _alert.notification == "none" - - -@pytest.mark.api -def test_event_alert_modification() -> None: - _uuid: str = f"{uuid.uuid4()}".split("-")[0] - _alert = EventsAlert.new( - name=f"events_alert_{_uuid}", - frequency=1, - pattern="completed", - notification="none" - ) - _alert.commit() - time.sleep(1) - _new_alert = Alert(_alert.id) - assert isinstance(_new_alert, EventsAlert) - _new_alert.description = "updated!" - assert _new_alert.description != "updated!" - _new_alert.commit() - assert _new_alert.description == "updated!" diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index 0bf475e2..669944f8 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -8,7 +8,7 @@ from simvue.api.objects.folder import Folder @pytest.mark.api -def test_artifact_creation() -> None: +def test_artifact_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" _folder = Folder.new(path=_folder_name) @@ -32,3 +32,34 @@ def test_artifact_creation() -> None: _artifact.delete() _run.delete() _folder.delete() + + +@pytest.mark.api +def test_artifact_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name, offline=True) + _run = Run.new(folder=_folder_name, offline=True) + + with tempfile.NamedTemporaryFile(suffix=".txt") as temp_f: + _path = pathlib.Path(temp_f.name) + with _path.open("w") as out_f: + out_f.write("Hello World!") + _artifact = Artifact.new( + name=f"test_artifact_{_uuid}", + run=_run.id, + file_path=_path, + category="input", + storage=None, + file_type=None, + offline=True + ) + _folder.commit() + _run.commit() + _artifact.commit() + time.sleep(1) + assert _artifact.name == f"test_artifact_{_uuid}" + _artifact.delete() + _run.delete() + _folder.delete() + diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index 3492d71e..7ccca943 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -1,12 +1,13 @@ import typing import pytest import uuid +import json import time from simvue.api.objects.folder import Folder @pytest.mark.api -def test_folder_creation() -> None: +def test_folder_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _path = f"/simvue_unit_testing/objects/folder/{_uuid}" _folder = Folder.new(path=_path) @@ -18,8 +19,28 @@ def test_folder_creation() -> None: assert not _folder.visibility.users -@pytest.mark.api(depends=["test_folder_creation"]) -def test_folder_modification() -> None: +@pytest.mark.api +def test_folder_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _path = f"/simvue_unit_testing/objects/folder/{_uuid}" + _folder = Folder.new(path=_path, offline=True) + _folder.commit() + assert _folder.id + assert _folder.path == _path + + with pytest.raises(AttributeError): + _folder.visibility.public + + _folder.delete() + + with _folder._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert not _local_data.get(_folder._label, {}).get(_folder.id) + + +@pytest.mark.api +def test_folder_modification_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _path = f"/simvue_unit_testing/objects/folder/{_uuid}" _description = "Test study" @@ -37,4 +58,27 @@ def test_folder_modification() -> None: assert _folder_new.description == _description assert _folder.description == _description assert _folder_new.visibility.tenant + _folder.delete() + + +@pytest.mark.api +def test_folder_modification_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _path = f"/simvue_unit_testing/objects/folder/{_uuid}" + _description = "Test study" + _tags = ["testing", "api"] + _folder = Folder.new(path=_path, offline=True) + _folder.commit() + time.sleep(1) + _folder_new = Folder(identifier=_folder.id) + _folder_new.tags = _tags + _folder_new.description = _description + _folder_new.visibility.tenant = True + _folder_new.commit() + assert _folder_new.tags == _tags + assert _folder.tags == _tags + assert _folder_new.description == _description + assert _folder.description == _description + assert _folder_new.visibility.tenant + _folder_new.delete() diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index 8cd7296d..ea5c1b0f 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -1,3 +1,4 @@ +from codecarbon.output_methods.emissions_data import json import pytest import time import datetime @@ -6,7 +7,7 @@ from simvue.api.objects import Run, Folder @pytest.mark.api -def test_run_creation() -> None: +def test_run_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" _folder = Folder.new(path=_folder_name) @@ -19,7 +20,26 @@ def test_run_creation() -> None: @pytest.mark.api -def test_run_modification() -> None: +def test_run_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name, offline=True) + _run = Run.new(folder=_folder_name, offline=True) + _folder.commit() + _run.commit() + assert _run.folder == _folder_name + _run.delete() + _folder.delete() + + with _run._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert not _local_data.get(_run._label, {}).get(_run.id) + assert not _local_data.get(_folder._label, {}).get(_folder.id) + + +@pytest.mark.api +def test_run_modification_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" _folder = Folder.new(path=_folder_name) @@ -45,3 +65,37 @@ def test_run_modification() -> None: _run.delete() _folder.delete() + +@pytest.mark.api +def test_run_modification_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name, offline=True) + _run = Run.new(folder=_folder_name, offline=True) + _folder.commit() + _run.commit() + assert _run.folder == _folder_name + time.sleep(1) + _now = datetime.datetime.now() + _new_run = Run(identifier=_run.id) + _new_run.name = "simvue_test_run" + _new_run.description = "Simvue test run" + _new_run.created = _now + _new_run.tags = ["simvue", "test", "tag"] + _new_run.ttl = 120 + + # Property has not been committed to offline + # object so not yet available + with pytest.raises(AttributeError): + _new_run.ttl + + _new_run.commit() + + assert _new_run.ttl == 120 + assert _new_run.description == "Simvue test run" + assert _new_run.created == _now + assert sorted(_new_run.tags) == sorted(["simvue", "test", "tag"]) + assert _new_run.name == "simvue_test_run" + _run.delete() + _folder.delete() + diff --git a/tests/unit/test_tag.py b/tests/unit/test_tag.py index a67e9130..c05d9395 100644 --- a/tests/unit/test_tag.py +++ b/tests/unit/test_tag.py @@ -1,20 +1,39 @@ import time import pytest import uuid +import json from simvue.api.objects.tag import Tag @pytest.mark.api -def test_tag_creation() -> None: +def test_tag_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tag = Tag.new(name=f"test_tag_{_uuid}") _tag.commit() assert _tag.name == f"test_tag_{_uuid}" assert _tag.color assert not _tag.description + _tag.delete() @pytest.mark.api -def test_tag_modification() -> None: +def test_tag_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tag = Tag.new(name=f"test_tag_{_uuid}", offline=True) + _tag.commit() + assert _tag.name == f"test_tag_{_uuid}" + + with pytest.raises(AttributeError): + _tag.color + + _tag.delete() + + with _tag._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert not _local_data.get(_tag._label, {}).get(_tag.id) + +@pytest.mark.api +def test_tag_modification_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tag = Tag.new(name=f"test_tag_{_uuid}") _tag.commit() @@ -27,3 +46,21 @@ def test_tag_modification() -> None: assert _new_tag.name == f"test_modified_tag_{_uuid}" assert _new_tag.color.r == 250 / 255 assert _new_tag.description == "modified test tag" + + +@pytest.mark.api +def test_tag_modification_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tag = Tag.new(name=f"test_tag_{_uuid}", offline=True) + _tag.commit() + time.sleep(1) + _new_tag = Tag(_tag.id) + _new_tag.name = _tag.name.replace("test", "test_modified") + _new_tag.color = "rgb({r}, {g}, {b})".format(r=250, g=0, b=0) + _new_tag.description = "modified test tag" + _new_tag.commit() + assert _new_tag.name == f"test_modified_tag_{_uuid}" + assert _new_tag.color.r == 250 / 255 + assert _new_tag.description == "modified test tag" + _tag.delete() + From 2daa8b4c5f63aad1edecf1fe81ccec8a4f4064fc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 13 Nov 2024 15:24:25 +0000 Subject: [PATCH 007/163] Added stats and sorted tests --- simvue/api/objects/stats.py | 62 +++++++++++++ tests/unit/test_event_alert.py | 92 ++++++++++++++++++ tests/unit/test_metric_range_alert.py | 108 ++++++++++++++++++++++ tests/unit/test_metric_threshold_alert.py | 104 +++++++++++++++++++++ tests/unit/test_stats.py | 15 +++ tests/unit/test_user_alert.py | 80 ++++++++++++++++ 6 files changed, 461 insertions(+) create mode 100644 simvue/api/objects/stats.py create mode 100644 tests/unit/test_event_alert.py create mode 100644 tests/unit/test_metric_range_alert.py create mode 100644 tests/unit/test_metric_threshold_alert.py create mode 100644 tests/unit/test_stats.py create mode 100644 tests/unit/test_user_alert.py diff --git a/simvue/api/objects/stats.py b/simvue/api/objects/stats.py new file mode 100644 index 00000000..3664be79 --- /dev/null +++ b/simvue/api/objects/stats.py @@ -0,0 +1,62 @@ +import typing + +from .base import SimvueObject + +__all__ = ["Stats"] + + +class Stats(SimvueObject): + def __init__(self) -> None: + self.runs = RunStatistics(self) + super().__init__() + self._label = "stat" + + # Stats is a singular object (i.e. identifier is not applicable) + # set it to empty string so not None + self._identifier = "" + + @classmethod + def new(cls, **kwargs) -> None: + raise AttributeError("Creation of statistics objects is not supported") + + def offline_mode(self, is_true: bool) -> None: + if is_true: + raise AttributeError("Statistics only available online") + + def _get_run_stats(self) -> dict[str, int]: + return self._get_attribute("runs") + + def _get_local_staged(self) -> dict[str, typing.Any]: + return {} + + def _get_visibility(self) -> dict[str, bool | list[str]]: + return {} + + +class RunStatistics: + def __init__(self, sv_obj: Stats) -> None: + self._sv_obj = sv_obj + + @property + def created(self) -> int: + if (_created := self._sv_obj._get_run_stats().get("created")) is None: + raise RuntimeError("Expected key 'created' in run statistics retrieval") + return _created + + @property + def running(self) -> int: + if (_running := self._sv_obj._get_run_stats().get("running")) is None: + raise RuntimeError("Expected key 'running' in run statistics retrieval") + return _running + + @property + def completed(self) -> int: + if (_completed := self._sv_obj._get_run_stats().get("running")) is None: + raise RuntimeError("Expected key 'completed' in run statistics retrieval") + return _completed + + @property + def data(self) -> int: + if (_data := self._sv_obj._get_run_stats().get("running")) is None: + raise RuntimeError("Expected key 'data' in run statistics retrieval") + return _data diff --git a/tests/unit/test_event_alert.py b/tests/unit/test_event_alert.py new file mode 100644 index 00000000..4783ddea --- /dev/null +++ b/tests/unit/test_event_alert.py @@ -0,0 +1,92 @@ +import time +import pytest +import uuid + +from simvue.api.objects import Alert, EventsAlert + +@pytest.mark.api +def test_event_alert_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = EventsAlert.new( + name=f"events_alert_{_uuid}", + frequency=1, + pattern="completed", + notification="none" + ) + _alert.commit() + assert _alert.source == "events" + assert _alert.alert.frequency == 1 + assert _alert.alert.pattern == "completed" + assert _alert.name == f"events_alert_{_uuid}" + assert _alert.notification == "none" + _alert.delete() + + +@pytest.mark.api +def test_event_alert_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = EventsAlert.new( + name=f"events_alert_{_uuid}", + frequency=1, + pattern="completed", + notification="none", + offline=True + ) + + _alert.commit() + assert _alert.source == "events" + assert _alert.alert.frequency == 1 + assert _alert.alert.pattern == "completed" + assert _alert.name == f"events_alert_{_uuid}" + assert _alert.notification == "none" + _alert.delete() + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert not _local_data.get(_alert._label, {}).get(_alert.id) + + +@pytest.mark.api +def test_event_alert_modification_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = EventsAlert.new( + name=f"events_alert_{_uuid}", + frequency=1, + pattern="completed", + notification="none" + ) + _alert.commit() + time.sleep(1) + _new_alert = Alert(_alert.id) + assert isinstance(_new_alert, EventsAlert) + _new_alert.description = "updated!" + assert _new_alert.description != "updated!" + _new_alert.commit() + assert _new_alert.description == "updated!" + _new_alert.delete() + + +@pytest.mark.api +def test_event_alert_modification_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = EventsAlert.new( + name=f"events_alert_{_uuid}", + frequency=1, + pattern="completed", + notification="none", + offline=True + ) + _alert.commit() + time.sleep(1) + _new_alert = Alert(_alert.id) + assert isinstance(_new_alert, EventsAlert) + _new_alert.description = "updated!" + + with pytest.raises(AttributeError): + assert _new_alert.description + + _new_alert.commit() + assert _new_alert.description == "updated!" + _new_alert.delete() + diff --git a/tests/unit/test_metric_range_alert.py b/tests/unit/test_metric_range_alert.py new file mode 100644 index 00000000..9867c3fb --- /dev/null +++ b/tests/unit/test_metric_range_alert.py @@ -0,0 +1,108 @@ + +import time +import pytest +import json +import uuid + +from simvue.api.objects import MetricsRangeAlert, Alert + +@pytest.mark.api +def test_metric_range_alert_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsRangeAlert.new( + name=f"metrics_threshold_alert_{_uuid}", + frequency=1, + notification="none", + metric="x", + range_low=10, + range_high=15, + window=1, + aggregation="average", + rule="is inside range" + ) + _alert.commit() + assert _alert.source == "metrics" + assert _alert.alert.frequency == 1 + assert _alert.name == f"metrics_threshold_alert_{_uuid}" + assert _alert.notification == "none" + _alert.delete() + + +@pytest.mark.api +def test_metric_range_alert_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsRangeAlert.new( + name=f"metrics_threshold_alert_{_uuid}", + frequency=1, + notification="none", + metric="x", + range_low=10, + range_high=15, + window=1, + aggregation="average", + rule="is inside range", + offline=True + ) + _alert.commit() + assert _alert.source == "metrics" + assert _alert.alert.frequency == 1 + assert _alert.name == f"metrics_threshold_alert_{_uuid}" + assert _alert.notification == "none" + _alert.delete() + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert not _local_data.get(_alert._label, {}).get(_alert.id) + + +@pytest.mark.api +def test_metric_threshold_alert_modification_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsRangeAlert.new( + name=f"metrics_threshold_alert_{_uuid}", + frequency=1, + notification="none", + metric="x", + range_low=10, + range_high=15, + window=1, + aggregation="average", + rule="is inside range", + ) + _alert.commit() + time.sleep(1) + _new_alert = Alert(_alert.id) + assert isinstance(_new_alert, MetricsRangeAlert) + _new_alert.description = "updated!" + assert _new_alert.description != "updated!" + _new_alert.commit() + assert _new_alert.description == "updated!" + _new_alert.delete() + + +@pytest.mark.api +def test_metric_threshold_alert_modification_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsRangeAlert.new( + name=f"metrics_threshold_alert_{_uuid}", + frequency=1, + notification="none", + metric="x", + range_low=10, + range_high=15, + window=1, + aggregation="average", + rule="is inside range", + offline=True + ) + _alert.commit() + time.sleep(1) + _new_alert = Alert(_alert.id) + assert isinstance(_new_alert, MetricsRangeAlert) + _new_alert.description = "updated!" + assert _new_alert.description != "updated!" + _new_alert.commit() + assert _new_alert.description == "updated!" + _new_alert.delete() + diff --git a/tests/unit/test_metric_threshold_alert.py b/tests/unit/test_metric_threshold_alert.py new file mode 100644 index 00000000..e0520859 --- /dev/null +++ b/tests/unit/test_metric_threshold_alert.py @@ -0,0 +1,104 @@ +import time +import pytest +import json +import uuid + +from simvue.api.objects import MetricsThresholdAlert, Alert + + +@pytest.mark.api +def test_metric_threshold_alert_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsThresholdAlert.new( + name=f"metrics_threshold_alert_{_uuid}", + frequency=1, + notification="none", + metric="x", + threshold=10, + window=1, + rule="is above", + aggregation="average" + ) + _alert.commit() + assert _alert.source == "metrics" + assert _alert.alert.frequency == 1 + assert _alert.name == f"metrics_threshold_alert_{_uuid}" + assert _alert.notification == "none" + _alert.delete() + + +@pytest.mark.api +def test_metric_threshold_alert_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsThresholdAlert.new( + name=f"metrics_threshold_alert_{_uuid}", + frequency=1, + notification="none", + threshold=10, + window=1, + metric="x", + rule="is above", + aggregation="average", + offline=True + ) + _alert.commit() + assert _alert.source == "metrics" + assert _alert.alert.frequency == 1 + assert _alert.name == f"metrics_threshold_alert_{_uuid}" + assert _alert.notification == "none" + _alert.delete() + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert not _local_data.get(_alert._label, {}).get(_alert.id) + + +@pytest.mark.api +def test_metric_threshold_alert_modification_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsThresholdAlert.new( + name=f"metrics_threshold_alert_{_uuid}", + frequency=1, + notification="none", + threshold=10, + window=1, + metric="x", + rule="is above", + aggregation="average", + ) + _alert.commit() + time.sleep(1) + _new_alert = Alert(_alert.id) + assert isinstance(_new_alert, MetricsThresholdAlert) + _new_alert.description = "updated!" + assert _new_alert.description != "updated!" + _new_alert.commit() + assert _new_alert.description == "updated!" + _new_alert.delete() + + +@pytest.mark.api +def test_metric_threshold_alert_modification_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsThresholdAlert.new( + name=f"metrics_threshold_alert_{_uuid}", + frequency=1, + notification="none", + threshold=10, + window=1, + metric="x", + rule="is above", + aggregation="average", + offline=True + ) + _alert.commit() + time.sleep(1) + _new_alert = Alert(_alert.id) + assert isinstance(_new_alert, MetricsThresholdAlert) + _new_alert.description = "updated!" + assert _new_alert.description != "updated!" + _new_alert.commit() + assert _new_alert.description == "updated!" + _new_alert.delete() + diff --git a/tests/unit/test_stats.py b/tests/unit/test_stats.py new file mode 100644 index 00000000..b11f1853 --- /dev/null +++ b/tests/unit/test_stats.py @@ -0,0 +1,15 @@ +import pytest + +from simvue.api.objects import Stats + +@pytest.mark.api +def test_stats() -> None: + _statistics = Stats() + assert isinstance(_statistics.runs.created, int) + assert isinstance(_statistics.runs.running, int) + assert isinstance(_statistics.runs.completed, int) + assert isinstance(_statistics.runs.data, int) + + with pytest.raises(AttributeError): + Stats.new() + diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py new file mode 100644 index 00000000..207a4bd2 --- /dev/null +++ b/tests/unit/test_user_alert.py @@ -0,0 +1,80 @@ +import time +import json +import pytest +import uuid + +from simvue.api.objects import Alert, UserAlert + +@pytest.mark.api +def test_user_alert_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = UserAlert.new( + name=f"users_alert_{_uuid}", + notification="none" + ) + _alert.commit() + assert _alert.source == "user" + assert _alert.name == f"users_alert_{_uuid}" + assert _alert.notification == "none" + _alert.delete() + + +@pytest.mark.api +def test_user_alert_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = UserAlert.new( + name=f"users_alert_{_uuid}", + notification="none", + offline=True + ) + _alert.commit() + assert _alert.source == "user" + assert _alert.name == f"users_alert_{_uuid}" + assert _alert.notification == "none" + _alert.delete() + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert not _local_data.get(_alert._label, {}).get(_alert.id) + + +@pytest.mark.api +def test_user_alert_modification_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = UserAlert.new( + name=f"users_alert_{_uuid}", + notification="none", + ) + _alert.commit() + time.sleep(1) + _new_alert = Alert(_alert.id) + assert isinstance(_new_alert, UserAlert) + _new_alert.description = "updated!" + assert _new_alert.description != "updated!" + _new_alert.commit() + assert _new_alert.description == "updated!" + _new_alert.delete() + + +@pytest.mark.api +def test_user_alert_modification_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = UserAlert.new( + name=f"users_alert_{_uuid}", + notification="none", + offline=True + ) + _alert.commit() + time.sleep(1) + _new_alert = Alert(_alert.id) + assert isinstance(_new_alert, UserAlert) + _new_alert.description = "updated!" + + with pytest.raises(AttributeError): + assert _new_alert.description + + _new_alert.commit() + assert _new_alert.description == "updated!" + _new_alert.delete() + From f56e32fda33184c1cee01e58d3dc12649c8d8739 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 14 Nov 2024 15:06:03 +0000 Subject: [PATCH 008/163] Add storage objects --- simvue/api/objects/__init__.py | 1 + simvue/api/objects/administrator/__init__.py | 1 + simvue/api/objects/administrator/tenant.py | 32 ++++ simvue/api/objects/alert/base.py | 37 +++- simvue/api/objects/alert/events.py | 40 ++++- simvue/api/objects/alert/fetch.py | 17 +- simvue/api/objects/alert/metrics.py | 98 +++++++++- simvue/api/objects/alert/user.py | 28 ++- simvue/api/objects/artifact.py | 40 ++++- simvue/api/objects/base.py | 25 ++- simvue/api/objects/storage/__init__.py | 2 + simvue/api/objects/storage/base.py | 58 ++++++ simvue/api/objects/storage/file.py | 21 +++ simvue/api/objects/storage/s3.py | 178 +++++++++++++++++++ simvue/api/objects/tag.py | 8 +- simvue/api/request.py | 17 -- tests/unit/test_file_storage.py | 22 +++ tests/unit/test_s3_storage.py | 52 ++++++ tests/unit/test_tenant.py | 34 ++++ 19 files changed, 666 insertions(+), 45 deletions(-) create mode 100644 simvue/api/objects/administrator/__init__.py create mode 100644 simvue/api/objects/administrator/tenant.py create mode 100644 simvue/api/objects/storage/__init__.py create mode 100644 simvue/api/objects/storage/base.py create mode 100644 simvue/api/objects/storage/file.py create mode 100644 simvue/api/objects/storage/s3.py create mode 100644 tests/unit/test_file_storage.py create mode 100644 tests/unit/test_s3_storage.py create mode 100644 tests/unit/test_tenant.py diff --git a/simvue/api/objects/__init__.py b/simvue/api/objects/__init__.py index 438e26e5..4da60f72 100644 --- a/simvue/api/objects/__init__.py +++ b/simvue/api/objects/__init__.py @@ -5,6 +5,7 @@ MetricsRangeAlert as MetricsRangeAlert, UserAlert as UserAlert, ) +from .storage import S3Storage as S3Storage, FileStorage as FileStorage from .stats import Stats as Stats from .artifact import Artifact as Artifact from .run import Run as Run diff --git a/simvue/api/objects/administrator/__init__.py b/simvue/api/objects/administrator/__init__.py new file mode 100644 index 00000000..a23f023e --- /dev/null +++ b/simvue/api/objects/administrator/__init__.py @@ -0,0 +1 @@ +from .tenant import Tenant as Tenant diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py new file mode 100644 index 00000000..9c45156e --- /dev/null +++ b/simvue/api/objects/administrator/tenant.py @@ -0,0 +1,32 @@ +import typing + +import pydantic +from .base import SimvueObject, staging_check + + +class Tenant(SimvueObject): + @classmethod + @pydantic.validate_call + def new( + cls, *, name: str, enabled: bool = True, offline: bool = False + ) -> typing.Self: + _tenant = Tenant(name=name, enabled=enabled, offline=offline) + _tenant.offline_mode(offline) + return _tenant # type: ignore + + @property + def name(self) -> str: + """Retrieve the name of the tenant""" + return self._get_attribute("name") + + @property + @staging_check + def enabled(self) -> bool: + """Retrieve if alert is enabled""" + return self._get_attribute("enabled") + + @enabled.setter + @pydantic.validate_call + def enabled(self, enabled: str) -> None: + """Enable/disable alert""" + self._staging["enabled"] = enabled diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 988334bd..4d8c79a6 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -1,3 +1,12 @@ +""" +Alert Object Base +================= + +Contains general definitions for Simvue Alert objects. + +""" + +import abc import pydantic import typing from simvue.api.objects.base import SimvueObject, staging_check @@ -5,17 +14,22 @@ class AlertBase(SimvueObject): - @classmethod - def new(cls, offline: bool = False, **kwargs): - _alert = AlertBase(**kwargs) - _alert.offline_mode(offline) - return _alert + """Class for interfacing with Simvue alerts + + Contains properties common to all alert types. + """ + + @abc.abstractclassmethod + def new(cls, **kwargs): + pass def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + """Retrieve an alert from the Simvue server by identifier""" self._label = "alert" super().__init__(identifier, **kwargs) def get_alert(self) -> dict[str, typing.Any]: + """Retrieve alert definition""" try: return self._get_attribute("alert") except AttributeError: @@ -23,6 +37,7 @@ def get_alert(self) -> dict[str, typing.Any]: @property def name(self) -> str: + """Retrieve alert name""" return self._get_attribute("name") @name.setter @@ -30,58 +45,70 @@ def name(self) -> str: def name( self, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] ) -> None: + """Set alert name""" self._staging["name"] = name @property @staging_check def description(self) -> str | None: + """Retrieve alert description""" return self._get_attribute("description") @description.setter @pydantic.validate_call def description(self, description: str | None) -> None: + """Set alert description""" self._staging["description"] = description @property @staging_check def tags(self) -> list[str]: + """Retrieve alert tags""" return self._get_attribute("tags") @tags.setter @pydantic.validate_call def tags(self, tags: list[str]) -> None: + """Set alert tags""" self._staging["tags"] = tags @property @staging_check def notification(self) -> typing.Literal["none", "email"]: + """Retrieve alert notification setting""" return self._get_attribute("notification") @notification.setter @pydantic.validate_call def notification(self, notification: typing.Literal["none", "email"]) -> None: + """Configure alert notification setting""" self._staging["notification"] = notification @property def source(self) -> typing.Literal["events", "metrics", "user"]: + """Retrieve alert source""" return self._get_attribute("source") @property @staging_check def enabled(self) -> bool: + """Retrieve if alert is enabled""" return self._get_attribute("enabled") @enabled.setter @pydantic.validate_call def enabled(self, enabled: str) -> None: + """Enable/disable alert""" self._staging["enabled"] = enabled @property @staging_check def abort(self) -> bool: + """Retrieve if alert can abort simulations""" return self._get_attribute("abort") @abort.setter @pydantic.validate_call def abort(self, abort: str) -> None: + """Configure alert to trigger aborts""" self._staging["abort"] = abort diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index 0401864c..ec16c5bb 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -1,3 +1,11 @@ +""" +Simvue Event Alerts +=================== + +Interface to event-based Simvue alerts. + +""" + import typing import pydantic from .base import AlertBase, staging_check @@ -5,7 +13,10 @@ class EventsAlert(AlertBase): + """Connect to an event-based alert either locally or on a server""" + def __init__(self, identifier: str | None = None, **kwargs) -> None: + """Initialise a connection to an event alert by identifier""" self.alert = EventAlertDefinition(self) super().__init__(identifier, **kwargs) @@ -19,9 +30,29 @@ def new( pattern: str, frequency: pydantic.PositiveInt, enabled: bool = True, - tags: list[str] | None = None, offline: bool = False, ) -> typing.Self: + """Create a new event-based alert + + Note parameters are keyword arguments only. + + Parameters + ---------- + name : str + name of the alert + notification : "none" | "email" + configure notifications sent by this alert + pattern : str + pattern to monitor in event logs + frequency : int + how often to check for updates + enabled : bool, optional + enable this alert upon creation, default is True + offline : bool, optional + create alert locally, default is False + + """ + _alert_definition = {"pattern": pattern, "frequency": frequency} _alert = EventsAlert( name=name, @@ -29,18 +60,21 @@ def new( source="events", alert=_alert_definition, enabled=enabled, - tags=tags or [], ) _alert.offline_mode(offline) return _alert class EventAlertDefinition: + """Event alert definition sub-class""" + def __init__(self, alert: EventsAlert) -> None: + """Initialise an alert definition with its parent alert""" self._sv_obj = alert @property def pattern(self) -> str: + """Retrieve the event log pattern monitored by this alert""" try: return self._sv_obj.get_alert()["pattern"] except KeyError as e: @@ -51,6 +85,7 @@ def pattern(self) -> str: @property @staging_check def frequency(self) -> int: + """Retrieve the update frequency for this alert""" try: return self._sv_obj.get_alert()["frequency"] except KeyError as e: @@ -61,5 +96,6 @@ def frequency(self) -> int: @frequency.setter @pydantic.validate_call def frequency(self, frequency: int) -> None: + """Set the update frequency for this alert""" _alert = self._sv_obj.get_alert() | {"frequency": frequency} self._sv_obj._staging["alert"] = _alert diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index 9accc519..05f87732 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -1,12 +1,21 @@ +""" +Simvue Alert Retrieval +====================== + +To simplify case whereby user does not know the alert type associated +with an identifier, use a generic alert object. +""" + from .events import EventsAlert from .metrics import MetricsThresholdAlert, MetricsRangeAlert from .base import AlertBase class Alert: - def __new__( - cls, identifier: str | None = None, **kwargs - ) -> EventsAlert | MetricsRangeAlert | MetricsThresholdAlert: + """Generic Simvue alert retrieval class""" + + def __new__(cls, identifier: str | None = None, **kwargs): + """Retrieve an object representing an alert either locally or on the server by id""" _alert_pre = AlertBase(identifier) if _alert_pre.source == "events": return EventsAlert(identifier) @@ -14,3 +23,5 @@ def __new__( return MetricsThresholdAlert(identifier) elif _alert_pre.source == "metrics": return MetricsRangeAlert(identifier) + + raise RuntimeError(f"Unknown source type '{_alert_pre.source}'") diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index c84d48e5..1eabffbd 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -1,3 +1,12 @@ +""" +Simvue Metric Alerts +==================== + +Classes for interacting with metric-based alerts either defined +locally or on a Simvue server + +""" + import pydantic import typing from .base import AlertBase, staging_check @@ -8,7 +17,10 @@ class MetricsThresholdAlert(AlertBase): + """Class for connecting to/creating a local or remotely defined metric threshold alert""" + def __init__(self, identifier: str | None = None, **kwargs) -> None: + """Connect to a local or remote threshold alert by identifier""" self.alert = MetricThresholdAlertDefinition(self) super().__init__(identifier, **kwargs) @@ -21,14 +33,41 @@ def new( metric: str, notification: typing.Literal["none", "email"], aggregation: Aggregate, - rule: Rule, + rule: typing.Literal["is above", "is below"], window: pydantic.PositiveInt, - threshold: float, + threshold: float | int, frequency: pydantic.PositiveInt, enabled: bool = True, - tags: list[str] | None = None, offline: bool = False, ) -> typing.Self: + """Create a new metric threshold alert either locally or on the server + + Note all arguments are keyword arguments. + + Parameters + ---------- + name : str + name to assign to this alert + metric : str + the metric to monitor + notification : "none" | "email" + the notification settings for this alert + aggregation : "average" | "sum" | "at least one" | "all" + how to aggregate metric values to deduce if alert is triggered + rule : "is above" | "is below" + threshold condition + window : int + window over which to calculate aggregation + threshold : float | int + the value defining the threshold + frequency : int + how often to monitor the metric + enabled : bool, optional + whether this alert is enabled upon creation, default is True + offline : bool, optional + whether to create the alert locally, default is False + + """ _alert_definition = { "rule": rule, "frequency": frequency, @@ -43,14 +82,16 @@ def new( source="metrics", alert=_alert_definition, enabled=enabled, - tags=tags or [], ) _alert.offline_mode(offline) return _alert class MetricsRangeAlert(AlertBase): + """Class for connecting to/creating a local or remotely defined metric range alert""" + def __init__(self, identifier: str | None = None, **kwargs) -> None: + """Connect to a local or remote threshold alert by identifier""" self.alert = MetricRangeAlertDefinition(self) super().__init__(identifier, **kwargs) @@ -63,15 +104,44 @@ def new( metric: str, notification: typing.Literal["none", "email"], aggregation: Aggregate, - rule: Rule, + rule: typing.Literal["is inside range", "is outside range"], window: pydantic.PositiveInt, range_high: float, range_low: float, frequency: pydantic.PositiveInt, enabled: bool = True, - tags: list[str] | None = None, offline: bool = False, ) -> typing.Self: + """Create a new metric range alert either locally or on the server + + Note all arguments are keyword arguments. + + Parameters + ---------- + name : str + name to assign to this alert + metric : str + the metric to monitor + notification : "none" | "email" + the notification settings for this alert + aggregation : "average" | "sum" | "at least one" | "all" + how to aggregate metric values to deduce if alert is triggered + rule : "is inside range" | "is outside range" + threshold condition + window : int + window over which to calculate aggregation + range_high : float | int + the value defining the upper limit + range_low : float | int + the value defining the lower limit + frequency : int + how often to monitor the metric + enabled : bool, optional + whether this alert is enabled upon creation, default is True + offline : bool, optional + whether to create the alert locally, default is False + + """ if range_low >= range_high: raise ValueError(f"Invalid arguments for range [{range_low}, {range_high}]") @@ -88,7 +158,6 @@ def new( name=name, notification=notification, source="metrics", - tags=tags or [], enabled=enabled, alert=_alert_definition, ) @@ -97,11 +166,15 @@ def new( class MetricsAlertDefinition: + """General alert definition for a metric alert""" + def __init__(self, alert: MetricsRangeAlert) -> None: + """Initialise definition with target alert""" self._sv_obj = alert @property def aggregation(self) -> Aggregate: + """Retrieve the aggregation strategy for this alert""" if not (_aggregation := self._sv_obj.get_alert().get("aggregation")): raise RuntimeError( "Expected key 'aggregation' in alert definition retrieval" @@ -110,12 +183,14 @@ def aggregation(self) -> Aggregate: @property def rule(self) -> Rule: + """Retrieve the rule for this alert""" if not (_rule := self._sv_obj.get_alert().get("rule")): raise RuntimeError("Expected key 'rule' in alert definition retrieval") return _rule @property def window(self) -> int: + """Retrieve the aggregation window for this alert""" if not (_window := self._sv_obj.get_alert().get("window")): raise RuntimeError("Expected key 'window' in alert definition retrieval") return _window @@ -123,6 +198,7 @@ def window(self) -> int: @property @staging_check def frequency(self) -> int: + """Retrieve the monitor frequency for this alert""" try: return self._sv_obj.get_alert()["frequency"] except KeyError as e: @@ -133,27 +209,35 @@ def frequency(self) -> int: @frequency.setter @pydantic.validate_call def frequency(self, frequency: int) -> None: + """Set the monitor frequency for this alert""" _alert = self._sv_obj.get_alert() | {"frequency": frequency} self._sv_obj._staging["alert"] = _alert class MetricThresholdAlertDefinition(MetricsAlertDefinition): + """Alert definition for metric threshold alerts""" + @property def threshold(self) -> float: + """Retrieve the threshold value for this alert""" if not (threshold_l := self._sv_obj.get_alert().get("threshold")): raise RuntimeError("Expected key 'threshold' in alert definition retrieval") return threshold_l class MetricRangeAlertDefinition(MetricsAlertDefinition): + """Alert definition for metric range alerts""" + @property def range_low(self) -> float: + """Retrieve the lower limit for metric range""" if not (range_l := self._sv_obj.get_alert().get("range_low")): raise RuntimeError("Expected key 'range_low' in alert definition retrieval") return range_l @property def range_high(self) -> float: + """Retrieve upper limit for metric range""" if not (range_u := self._sv_obj.get_alert().get("range_high")): raise RuntimeError( "Expected key 'range_high' in alert definition retrieval" diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index 95f2399a..e5e630fa 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -1,3 +1,11 @@ +""" +Simvue User Alert +================= + +Class for connecting with a local/remote user defined alert. + +""" + import pydantic import typing from .base import AlertBase @@ -5,6 +13,8 @@ class UserAlert(AlertBase): + """Connect to/create a user defined alert either locally or on server""" + @classmethod @pydantic.validate_call def new( @@ -13,15 +23,29 @@ def new( name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], notification: typing.Literal["none", "email"], enabled: bool = True, - tags: list[str] | None = None, offline: bool = False, ) -> typing.Self: + """Create a new user-defined alert + + Note all arguments are keyword arguments. + + Parameters + ---------- + name : str + the name to assign to this alert + notification : "none" | "email" + configure notification settings for this alert + enabled : bool, optional + whether this alert is enabled upon creation, default is True + offline : bool, optional + whether this alert should be created locally, default is False + + """ _alert = UserAlert( name=name, notification=notification, source="user", enabled=enabled, - tags=tags or [], ) _alert.offline_mode(offline) return _alert diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 839139f0..03be8abe 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -1,3 +1,11 @@ +""" +Simvue Artifact +=============== + +Class for defining and interacting with artifact objects. + +""" + import typing import os.path import pydantic @@ -12,6 +20,8 @@ class Artifact(SimvueObject): + """Connect to/create an artifact locally or on the server""" + @classmethod @pydantic.validate_call def new( @@ -24,7 +34,29 @@ def new( file_path: pydantic.FilePath, file_type: str | None, offline: bool = False, - ): + ) -> typing.Self: + """Create a new artifact either locally or on the server + + Note all arguments are keyword arguments + + Parameters + ---------- + name : str + the name for this artifact + run : str + the identifier with which this artifact is associated + storage : str | None + the identifier for the storage location for this object + category : "code" | "input" | "output" + the category of this artifact + file_path : pathlib.Path | str + path to the file this artifact represents + file_type : str | None + the mime type for this file, else this is determined + offline : bool, optional + whether to define this artifact locally, default is False + + """ _file_type = file_type or get_mimetype_for_file(file_path) if _file_type not in get_mimetypes(): @@ -49,24 +81,30 @@ def new( @property def name(self) -> str: + """Retrieve the name for this artifact""" return self._get_attribute("name") @property def checksum(self) -> str: + """Retrieve the checksum for this artifact""" return self._get_attribute("checksum") @property def category(self) -> Category: + """Retrieve the category for this artifact""" return self._get_attribute("category") @property def original_path(self) -> str: + """Retrieve the original path of the file associated with this artifact""" return self._get_attribute("originalPath") @property def storage(self) -> str: + """Retrieve the storage identifier for this artifact""" return self._get_attribute("storage") @property def type(self) -> str: + """Retrieve the MIME type for this artifact""" return self._get_attribute("type") diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 2a2e472d..804d2220 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -48,38 +48,48 @@ def _wrapper(self) -> typing.Any: class Visibility: + """Interface for object visibility definition""" + def __init__(self, sv_obj: "SimvueObject") -> None: + """Initialise visibility with target object""" self._sv_obj = sv_obj def _update_visibility(self, key: str, value: typing.Any) -> None: + """Update the visibility configuration for this object""" _visibility = self._sv_obj._get_visibility() | {key: value} self._sv_obj._staging["visibility"] = _visibility @property @staging_check def users(self) -> list[str]: + """Retrieve the list of users able to see this object""" return self._sv_obj._get_visibility().get("users", []) @users.setter def users(self, users: list[str]) -> None: + """Set the list of users able to see this object""" self._update_visibility("users", users) @property @staging_check def public(self) -> bool: + """Retrieve if this object is publically visible""" return self._sv_obj._get_visibility().get("public", False) @public.setter def public(self, public: bool) -> None: + """Set if this object is publically visible""" self._update_visibility("public", public) @property @staging_check def tenant(self) -> bool: + """Retrieve the tenant group this object is visible to""" return self._sv_obj._get_visibility().get("tenant", False) @tenant.setter def tenant(self, tenant: bool) -> None: + """Set the tenant group this object is visible to""" self._update_visibility("tenant", tenant) @@ -87,6 +97,7 @@ class SimvueObject(abc.ABC): def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: self._logger = logging.getLogger(f"simvue.{self.__class__.__name__}") self._label: str = getattr(self, "_label", self.__class__.__name__.lower()) + self._endpoint: str = f"{self._label}s" self._identifier: typing.Optional[str] = ( identifier if identifier is not None else f"offline_{uuid.uuid1()}" ) @@ -188,7 +199,7 @@ def id(self) -> typing.Optional[str]: @property def _url_path(self) -> pathlib.Path: - return pathlib.Path(f"api/{self._label}s") + return pathlib.Path(f"api/{self._endpoint}") @property def _base_url(self) -> str: @@ -208,6 +219,12 @@ def _post(self, **kwargs) -> dict[str, typing.Any]: _response = sv_post( url=self._base_url, headers=self._headers, data=kwargs, is_json=True ) + + if _response.status_code == http.HTTPStatus.FORBIDDEN: + raise RuntimeError( + f"Forbidden: You do not have permission to create object of type '{self._label}'" + ) + _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], @@ -232,6 +249,12 @@ def _put(self, **kwargs) -> dict[str, typing.Any]: _response = sv_put( url=self.url, headers=self._headers, data=kwargs, is_json=True ) + + if _response.status_code == http.HTTPStatus.FORBIDDEN: + raise RuntimeError( + f"Forbidden: You do not have permission to create object of type '{self._label}'" + ) + _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], diff --git a/simvue/api/objects/storage/__init__.py b/simvue/api/objects/storage/__init__.py new file mode 100644 index 00000000..742d86dc --- /dev/null +++ b/simvue/api/objects/storage/__init__.py @@ -0,0 +1,2 @@ +from .file import FileStorage as FileStorage +from .s3 import S3Storage as S3Storage diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py new file mode 100644 index 00000000..c78ae2a2 --- /dev/null +++ b/simvue/api/objects/storage/base.py @@ -0,0 +1,58 @@ +import typing +import abc +from simvue.api.objects.base import SimvueObject, staging_check + + +class Storage(SimvueObject): + def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + """Retrieve an alert from the Simvue server by identifier""" + self._label = "storage" + self._endpoint = self._label + super().__init__(identifier, **kwargs) + + @abc.abstractclassmethod + def new(cls, **_): + pass + + @property + @staging_check + def name(self) -> str: + """Retrieve the name for this storage""" + return self._get_attribute("name") + + @name.setter + def name(self, name: list[str]) -> None: + """Set name assigned to this folder""" + self._staging["name"] = name + + @property + def type(self) -> str: + """Retrieve the type of storage""" + return self._get_attribute("type") + + @property + @staging_check + def default(self) -> bool: + """Retrieve if this is the default storage for the user""" + return self._get_attribute("default") + + @default.setter + def default(self, is_default: bool) -> None: + """Set this storage to be the default""" + self._staging["default"] = is_default + + @property + @staging_check + def tenant_usable(self) -> bool: + """Retrieve if this is usable by the current user tenant""" + return self._get_attribute("tenant_usable") + + @tenant_usable.setter + def tenant_usable(self, is_tenant_usable: bool) -> None: + """Set this storage to be usable by the current user tenant""" + self._staging["tenant_usable"] = is_tenant_usable + + @property + def disable_check(self) -> bool: + """Retrieve if checks are disabled for this storage""" + return self._get_attribute("disable_check") diff --git a/simvue/api/objects/storage/file.py b/simvue/api/objects/storage/file.py new file mode 100644 index 00000000..8c4eb434 --- /dev/null +++ b/simvue/api/objects/storage/file.py @@ -0,0 +1,21 @@ +import typing +import pydantic + +from .base import Storage +from simvue.models import NAME_REGEX + + +class FileStorage(Storage): + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + disable_check: bool, + offline: bool = False, + ) -> typing.Self: + """Create a new file storage object""" + _storage = FileStorage(name=name, type="file", disable_check=disable_check) + _storage.offline_mode(offline) + return _storage diff --git a/simvue/api/objects/storage/s3.py b/simvue/api/objects/storage/s3.py new file mode 100644 index 00000000..fb073a57 --- /dev/null +++ b/simvue/api/objects/storage/s3.py @@ -0,0 +1,178 @@ +import typing +import pydantic + +from .base import Storage, staging_check +from simvue.models import NAME_REGEX + + +class S3Storage(Storage): + def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + self.config = Config(self) + super().__init__(identifier, **kwargs) + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + disable_check: bool, + endpoint_url: str, + region_name: str, + access_key_id: str, + secret_access_key: pydantic.SecretStr, + bucket: str, + offline: bool = False, + ) -> typing.Self: + """Create a new S3 storage object""" + _config: dict[str, str] = { + "endpoint_url": endpoint_url, + "region_name": region_name, + "access_key_id": access_key_id, + "secret_access_key": secret_access_key.get_secret_value(), + "bucket": bucket, + } + _storage = S3Storage( + name=name, type="S3", config=_config, disable_check=disable_check + ) + _storage.offline_mode(offline) + return _storage + + def get_config(self) -> dict[str, typing.Any]: + """Retrieve configuration""" + try: + return self._get_attribute("config") + except AttributeError: + return {} + + +class Config: + def __init__(self, storage: S3Storage) -> None: + self._sv_obj = storage + + @property + @staging_check + def endpoint_url(self) -> str: + if self._sv_obj.type == "file": + raise ValueError( + f"Storage type '{self._sv_obj.type}' has no attribute 'endpoint_url'" + ) + + try: + return self._sv_obj.get_config()["endpoint_url"] + except KeyError as e: + raise RuntimeError( + "Expected key 'frequency' in alert definition retrieval" + ) from e + + @endpoint_url.setter + def endpoint_url(self, endpoint_url: str) -> None: + if self._sv_obj.type == "file": + raise ValueError( + f"Cannot set attribute 'endpoint_url' for storage type '{self._sv_obj.type}'" + ) + + _config = self._sv_obj.get_config() | {"endpoint_url": endpoint_url} + self._sv_obj._staging["config"] = _config + + @property + @staging_check + def region_name(self) -> str: + if self._sv_obj.type == "file": + raise ValueError( + f"Storage type '{self._sv_obj.type}' has no attribute 'region_name'" + ) + + try: + return self._sv_obj.get_config()["region_name"] + except KeyError as e: + raise RuntimeError( + "Expected key 'frequency' in alert definition retrieval" + ) from e + + @region_name.setter + def region_name(self, region_name: str) -> None: + if self._sv_obj.type == "file": + raise ValueError( + f"Cannot set attribute 'region_name' for storage type '{self._sv_obj.type}'" + ) + + _config = self._sv_obj.get_config() | {"region_name": region_name} + self._sv_obj._staging["config"] = _config + + @property + @staging_check + def access_key_id(self) -> str: + if self._sv_obj.type == "file": + raise ValueError( + f"Storage type '{self._sv_obj.type}' has no attribute 'access_key_id'" + ) + + try: + return self._sv_obj.get_config()["access_key_id"] + except KeyError as e: + raise RuntimeError( + "Expected key 'frequency' in alert definition retrieval" + ) from e + + @access_key_id.setter + def access_key_id(self, access_key_id: str) -> None: + if self._sv_obj.type == "file": + raise ValueError( + f"Cannot set attribute 'access_key_id' for storage type '{self._sv_obj.type}'" + ) + + _config = self._sv_obj.get_config() | {"access_key_id": access_key_id} + self._sv_obj._staging["config"] = _config + + @property + @staging_check + def secret_access_key(self) -> pydantic.SecretStr: + if self._sv_obj.type == "file": + raise ValueError( + f"Storage type '{self._sv_obj.type}' has no attribute 'secret_access_key'" + ) + + try: + return pydantic.SecretStr(self._sv_obj.get_config()["secret_access_key"]) + except KeyError as e: + raise RuntimeError( + "Expected key 'frequency' in alert definition retrieval" + ) from e + + @secret_access_key.setter + def secret_access_key(self, secret_access_key: pydantic.SecretStr) -> None: + if self._sv_obj.type == "file": + raise ValueError( + f"Cannot set attribute 'secret_access_key' for storage type '{self._sv_obj.type}'" + ) + + _config = self._sv_obj.get_config() | { + "secret_access_key": secret_access_key.get_secret_value() + } + self._sv_obj._staging["config"] = _config + + @property + @staging_check + def bucket(self) -> str: + if self._sv_obj.type == "file": + raise ValueError( + f"Storage type '{self._sv_obj.type}' has no attribute 'bucket'" + ) + + try: + return self._sv_obj.get_config()["bucket"] + except KeyError as e: + raise RuntimeError( + "Expected key 'frequency' in alert definition retrieval" + ) from e + + @bucket.setter + def bucket(self, bucket: str) -> None: + if self._sv_obj.type == "file": + raise ValueError( + f"Cannot set attribute 'bucket' for storage type '{self._sv_obj.type}'" + ) + + _config = self._sv_obj.get_config() | {"bucket": bucket} + self._sv_obj._staging["config"] = _config diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py index c98051cb..5f3aad85 100644 --- a/simvue/api/objects/tag.py +++ b/simvue/api/objects/tag.py @@ -12,17 +12,11 @@ def new( cls, *, name: str, - description: str | None = None, - color: pydantic.color.Color | None = None, offline: bool = False, ): """Create a new Tag on the Simvue server""" _data: dict[str, typing.Any] = {"name": name} - if description: - _data["description"] = description - if color: - _data["description"] = color.as_hex() - _tag = Tag(**_data) + _tag = Tag(name=name) _tag.offline_mode(offline) return _tag diff --git a/simvue/api/request.py b/simvue/api/request.py index 97c80df2..524714f9 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -92,29 +92,12 @@ def post( url, headers=headers, data=data_sent, timeout=DEFAULT_API_TIMEOUT ) - if response.status_code in ( - http.HTTPStatus.UNAUTHORIZED, - http.HTTPStatus.FORBIDDEN, - ): - raise RuntimeError( - f"Authorization error [{response.status_code}]: {response.text}" - ) - if response.status_code == http.HTTPStatus.UNPROCESSABLE_ENTITY: _parsed_response = parse_validation_response(response.json()) raise ValueError( f"Validation error for '{url}' [{response.status_code}]:\n{_parsed_response}" ) - if response.status_code not in ( - http.HTTPStatus.OK, - http.HTTPStatus.CREATED, - http.HTTPStatus.CONFLICT, - ): - raise RuntimeError( - f"HTTP error for '{url}' [{response.status_code}]: {response.text}" - ) - return response diff --git a/tests/unit/test_file_storage.py b/tests/unit/test_file_storage.py new file mode 100644 index 00000000..65627a53 --- /dev/null +++ b/tests/unit/test_file_storage.py @@ -0,0 +1,22 @@ +import pytest +import time +import json +import uuid + +from simvue.api.objects import FileStorage + +@pytest.mark.api +def test_create_file_storage_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _storage = FileStorage.new(name=_uuid, disable_check=False) + _storage.commit() + assert _storage.name == _uuid + + +@pytest.mark.api +def test_create_file_storage_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _storage = FileStorage.new(name=_uuid, disable_check=False, offline=True) + _storage.commit() + assert _storage.name == _uuid + _storage.delete() diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py new file mode 100644 index 00000000..f7f5abe9 --- /dev/null +++ b/tests/unit/test_s3_storage.py @@ -0,0 +1,52 @@ +import pytest +import time +import json +import uuid + +from simvue.api.objects import S3Storage + +@pytest.mark.api +def test_create_s3_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _storage = S3Storage.new( + name=_uuid, + endpoint_url="https://not_a_real_url.io", + disable_check=False, + region_name="fictionsville", + access_key_id="dummy_key", + secret_access_key="not_a_key", + bucket="dummy_bucket" + ) + _storage.commit() + assert _storage.name == _uuid + assert not _storage.disable_check + assert _storage.config.endpoint_url == "https://not_a_real_url.io" + assert _storage.config.region_name == "fictionsville" + assert _storage.config.access_key_id == "dummy_key" + assert _storage.config.secret_access_key.get_secret_value() == "not_a_key" + assert _storage.config.bucket == "dummy_bucket" + _storage.delete() + + +@pytest.mark.api +def test_create_s3_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _storage = S3Storage.new( + name=_uuid, + endpoint_url="https://not_a_real_url.io", + disable_check=False, + region_name="fictionsville", + access_key_id="dummy_key", + secret_access_key="not_a_key", + bucket="dummy_bucket", + offline=True + ) + _storage.commit() + assert _storage.name == _uuid + assert not _storage.disable_check + assert _storage.config.endpoint_url == "https://not_a_real_url.io" + assert _storage.config.region_name == "fictionsville" + assert _storage.config.access_key_id == "dummy_key" + assert _storage.config.secret_access_key.get_secret_value() == "not_a_key" + assert _storage.config.bucket == "dummy_bucket" + _storage.delete() diff --git a/tests/unit/test_tenant.py b/tests/unit/test_tenant.py new file mode 100644 index 00000000..63d07e49 --- /dev/null +++ b/tests/unit/test_tenant.py @@ -0,0 +1,34 @@ +import pytest +import time +import json +import uuid + +from simvue.api.objects.administrator import Tenant + + +@pytest.mark.api +def test_create_tenant_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tenant = Tenant.new(name=_uuid) + try: + _tenant.commit() + except RuntimeError as e: + assert "You do not have permission" in str(e) + return + time.sleep(1) + _new_tenant = Tenant(_tenant.id) + assert _new_tenant.name == _uuid + assert _new_tenant.enabled + _new_tenant.delete() + + +@pytest.mark.api +def test_create_tenant_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tenant = Tenant.new(name=_uuid, offline=True) + _tenant.commit() + time.sleep(1) + _new_tenant = Tenant(_tenant.id) + assert _new_tenant.name == _uuid + assert _new_tenant.enabled + _new_tenant.delete() From f40b7f145973f5b713c6afb98b98284e8c88e155 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 15 Nov 2024 13:47:16 +0000 Subject: [PATCH 009/163] Added read-only mode for multi object retrieval --- simvue/api/objects/__init__.py | 6 +- simvue/api/objects/administrator/tenant.py | 3 + simvue/api/objects/administrator/user.py | 100 ++++++++++++++++++ simvue/api/objects/alert/base.py | 10 +- simvue/api/objects/alert/events.py | 15 ++- simvue/api/objects/alert/fetch.py | 62 ++++++++++- simvue/api/objects/alert/metrics.py | 15 ++- simvue/api/objects/alert/user.py | 6 ++ simvue/api/objects/base.py | 79 +++++++++++++- simvue/api/objects/folder.py | 15 ++- simvue/api/objects/run.py | 23 ++++- simvue/api/objects/storage/__init__.py | 1 + simvue/api/objects/storage/base.py | 60 +++++++++-- simvue/api/objects/storage/fetch.py | 64 ++++++++++++ simvue/api/objects/storage/file.py | 14 ++- simvue/api/objects/storage/s3.py | 113 +++++---------------- simvue/api/objects/tag.py | 5 +- simvue/api/request.py | 7 +- tests/unit/test_file_storage.py | 6 +- tests/unit/test_folder.py | 7 ++ tests/unit/test_s3_storage.py | 23 +++-- tests/unit/test_user_alert.py | 1 + 22 files changed, 499 insertions(+), 136 deletions(-) create mode 100644 simvue/api/objects/administrator/user.py create mode 100644 simvue/api/objects/storage/fetch.py diff --git a/simvue/api/objects/__init__.py b/simvue/api/objects/__init__.py index 4da60f72..7add4fbb 100644 --- a/simvue/api/objects/__init__.py +++ b/simvue/api/objects/__init__.py @@ -5,7 +5,11 @@ MetricsRangeAlert as MetricsRangeAlert, UserAlert as UserAlert, ) -from .storage import S3Storage as S3Storage, FileStorage as FileStorage +from .storage import ( + S3Storage as S3Storage, + FileStorage as FileStorage, + Storage as Storage, +) from .stats import Stats as Stats from .artifact import Artifact as Artifact from .run import Run as Run diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py index 9c45156e..68c64eca 100644 --- a/simvue/api/objects/administrator/tenant.py +++ b/simvue/api/objects/administrator/tenant.py @@ -1,6 +1,8 @@ import typing import pydantic + +from simvue.api.objects.base import write_only from .base import SimvueObject, staging_check @@ -26,6 +28,7 @@ def enabled(self) -> bool: return self._get_attribute("enabled") @enabled.setter + @write_only @pydantic.validate_call def enabled(self, enabled: str) -> None: """Enable/disable alert""" diff --git a/simvue/api/objects/administrator/user.py b/simvue/api/objects/administrator/user.py new file mode 100644 index 00000000..f6106c6a --- /dev/null +++ b/simvue/api/objects/administrator/user.py @@ -0,0 +1,100 @@ +import pydantic +import typing +from simvue.api.objects.base import SimvueObject, staging_check, write_only + + +class User(SimvueObject): + @classmethod + @pydantic.validate_call + def new( + cls, + *, + username: str, + fullname: str, + email: pydantic.EmailStr, + manager: bool, + admin: bool, + readonly: bool, + welcome: bool, + tenant: str, + enabled: bool = True, + offline: bool = False, + ) -> typing.Self: + _user_info: dict[str, str | bool] = { + "username": username, + "fullname": fullname, + "email": email, + "manager": manager, + "readonly": readonly, + "welcome": welcome, + "admin": admin, + "enabled": enabled, + } + _user = User(user=_user_info, tenant=tenant, offline=offline) + _user.offline_mode(offline) + return _user # type: ignore + + @property + @staging_check + def username(self) -> str: + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["username"] + return self._get_attribute("username") + + @username.setter + @write_only + @pydantic.validate_call + def username(self, username: str) -> None: + self._staging["username"] = username + + @property + @staging_check + def fullname(self) -> str: + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["fullname"] + return self._get_attribute("fullname") + + @fullname.setter + @write_only + @pydantic.validate_call + def fullname(self, fullname: str) -> None: + self._staging["fullname"] = fullname + + @property + @staging_check + def manager(self) -> bool: + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["manager"] + return self._get_attribute("manager") + + @manager.setter + @write_only + @pydantic.validate_call + def manager(self, manager: bool) -> None: + self._staging["manager"] = manager + + @property + @staging_check + def admin(self) -> bool: + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["admin"] + return self._get_attribute("admin") + + @admin.setter + @write_only + @pydantic.validate_call + def admin(self, admin: bool) -> None: + self._staging["admin"] = admin + + @property + @staging_check + def readonly(self) -> bool: + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["readonly"] + return self._get_attribute("readonly") + + @readonly.setter + @write_only + @pydantic.validate_call + def readonly(self, readonly: bool) -> None: + self._staging["readonly"] = readonly diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 4d8c79a6..dcb18fce 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -6,7 +6,6 @@ """ -import abc import pydantic import typing from simvue.api.objects.base import SimvueObject, staging_check @@ -19,15 +18,18 @@ class AlertBase(SimvueObject): Contains properties common to all alert types. """ - @abc.abstractclassmethod + @classmethod def new(cls, **kwargs): pass - def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + def __init__( + self, identifier: typing.Optional[str] = None, read_only: bool = False, **kwargs + ) -> None: """Retrieve an alert from the Simvue server by identifier""" self._label = "alert" - super().__init__(identifier, **kwargs) + super().__init__(identifier, read_only, **kwargs) + @staging_check def get_alert(self) -> dict[str, typing.Any]: """Retrieve alert definition""" try: diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index ec16c5bb..f78b3699 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -8,6 +8,8 @@ import typing import pydantic + +from simvue.api.objects.base import write_only from .base import AlertBase, staging_check from simvue.models import NAME_REGEX @@ -15,10 +17,18 @@ class EventsAlert(AlertBase): """Connect to an event-based alert either locally or on a server""" - def __init__(self, identifier: str | None = None, **kwargs) -> None: + def __init__( + self, identifier: str | None = None, read_only: bool = False, **kwargs + ) -> None: """Initialise a connection to an event alert by identifier""" self.alert = EventAlertDefinition(self) - super().__init__(identifier, **kwargs) + super().__init__(identifier, read_only, **kwargs) + + @classmethod + def get_all( + cls, count: int | None = None, offset: int | None = None + ) -> dict[str, typing.Any]: + raise NotImplementedError("Retrieve of only event alerts is not yet supported") @classmethod @pydantic.validate_call @@ -94,6 +104,7 @@ def frequency(self) -> int: ) from e @frequency.setter + @write_only @pydantic.validate_call def frequency(self, frequency: int) -> None: """Set the update frequency for this alert""" diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index 05f87732..8e10161b 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -6,22 +6,76 @@ with an identifier, use a generic alert object. """ +import typing +import http + +from simvue.api.objects.alert.user import UserAlert +from simvue.api.request import get_json_from_response +from simvue.api.request import get as sv_get from .events import EventsAlert from .metrics import MetricsThresholdAlert, MetricsRangeAlert from .base import AlertBase +AlertType = EventsAlert | UserAlert | MetricsThresholdAlert | MetricsRangeAlert + class Alert: """Generic Simvue alert retrieval class""" def __new__(cls, identifier: str | None = None, **kwargs): """Retrieve an object representing an alert either locally or on the server by id""" - _alert_pre = AlertBase(identifier) + _alert_pre = AlertBase(identifier=identifier, **kwargs) if _alert_pre.source == "events": - return EventsAlert(identifier) + return EventsAlert(identifier=identifier, **kwargs) elif _alert_pre.source == "metrics" and _alert_pre.get_alert().get("threshold"): - return MetricsThresholdAlert(identifier) + return MetricsThresholdAlert(identifier=identifier, **kwargs) elif _alert_pre.source == "metrics": - return MetricsRangeAlert(identifier) + return MetricsRangeAlert(identifier=identifier, **kwargs) raise RuntimeError(f"Unknown source type '{_alert_pre.source}'") + + @classmethod + def get_all( + cls, count: int | None = None, offset: int | None = None + ) -> typing.Generator[tuple[str, AlertType], None, None]: + _class_instance = AlertBase(read_only=True) + _url = f"{_class_instance._base_url}" + _response = sv_get( + _url, + headers=_class_instance._headers, + params={"start": offset, "count": count}, + ) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", + ) + + if not isinstance(_json_response, dict): + raise RuntimeError( + f"Expected dict from JSON response during {_class_instance.__class__.__name__.lower()}s retrieval " + f"but got '{type(_json_response)}'" + ) + + if not (_data := _json_response.get("data")): + raise RuntimeError( + f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" + ) + + _out_dict: dict[str, AlertType] = {} + + for _entry in _json_response["data"]: + if _entry["source"] == "events": + yield _entry["id"], EventsAlert(read_only=True, **_entry) + elif _entry["source"] == "user": + yield _entry["id"], UserAlert(read_only=True, **_entry) + elif _entry["source"] == "metrics" and _entry.get("alert", {}).get( + "threshold" + ): + yield _entry["id"], MetricsThresholdAlert(read_only=True, **_entry) + elif _entry["source"] == "metrics" and _entry.get("alert", {}).get( + "range_low" + ): + yield _entry["id"], MetricsRangeAlert(read_only=True, **_entry) + else: + raise RuntimeError(f"Unrecognised alert source '{_entry['source']}'") diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index 1eabffbd..ea940502 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -9,6 +9,8 @@ import pydantic import typing + +from simvue.api.objects.base import write_only from .base import AlertBase, staging_check from simvue.models import NAME_REGEX @@ -19,10 +21,18 @@ class MetricsThresholdAlert(AlertBase): """Class for connecting to/creating a local or remotely defined metric threshold alert""" - def __init__(self, identifier: str | None = None, **kwargs) -> None: + def __init__( + self, identifier: str | None = None, read_only: bool = False, **kwargs + ) -> None: """Connect to a local or remote threshold alert by identifier""" self.alert = MetricThresholdAlertDefinition(self) - super().__init__(identifier, **kwargs) + super().__init__(identifier, read_only, **kwargs) + + @classmethod + def get_all( + cls, count: int | None = None, offset: int | None = None + ) -> dict[str, typing.Any]: + raise NotImplementedError("Retrieve of only metric alerts is not yet supported") @classmethod @pydantic.validate_call @@ -207,6 +217,7 @@ def frequency(self) -> int: ) from e @frequency.setter + @write_only @pydantic.validate_call def frequency(self, frequency: int) -> None: """Set the monitor frequency for this alert""" diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index e5e630fa..4009de69 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -49,3 +49,9 @@ def new( ) _alert.offline_mode(offline) return _alert + + @classmethod + def get_all( + cls, count: int | None = None, offset: int | None = None + ) -> dict[str, typing.Any]: + raise NotImplementedError("Retrieve of only user alerts is not yet supported") diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 804d2220..93baee7f 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -44,6 +44,21 @@ def _wrapper(self) -> typing.Any: ) return member_func(self) + _wrapper.__name__ = member_func.__name__ + return _wrapper + + +def write_only(attribute_func: typing.Callable) -> typing.Callable: + def _wrapper(self: "SimvueObject", *args, **kwargs) -> typing.Any: + _sv_obj = getattr(self, "_sv_obj", self) + if _sv_obj._read_only: + raise AssertionError( + f"Cannot set property '{attribute_func.__name__}' " + f"on read-only object of type '{self._label}'" + ) + return attribute_func(self, *args, **kwargs) + + _wrapper.__name__ = attribute_func.__name__ return _wrapper @@ -66,6 +81,7 @@ def users(self) -> list[str]: return self._sv_obj._get_visibility().get("users", []) @users.setter + @write_only def users(self, users: list[str]) -> None: """Set the list of users able to see this object""" self._update_visibility("users", users) @@ -77,6 +93,7 @@ def public(self) -> bool: return self._sv_obj._get_visibility().get("public", False) @public.setter + @write_only def public(self, public: bool) -> None: """Set if this object is publically visible""" self._update_visibility("public", public) @@ -88,15 +105,19 @@ def tenant(self) -> bool: return self._sv_obj._get_visibility().get("tenant", False) @tenant.setter + @write_only def tenant(self, tenant: bool) -> None: """Set the tenant group this object is visible to""" self._update_visibility("tenant", tenant) class SimvueObject(abc.ABC): - def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + def __init__( + self, identifier: typing.Optional[str] = None, read_only: bool = False, **kwargs + ) -> None: self._logger = logging.getLogger(f"simvue.{self.__class__.__name__}") self._label: str = getattr(self, "_label", self.__class__.__name__.lower()) + self._read_only: bool = read_only self._endpoint: str = f"{self._label}s" self._identifier: typing.Optional[str] = ( identifier if identifier is not None else f"offline_{uuid.uuid1()}" @@ -115,8 +136,12 @@ def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: self._user_config.offline.cache.joinpath("staging.json") ) - # Recover any locally staged changes - self._staging: dict[str, typing.Any] = self._get_local_staged() | kwargs + # Recover any locally staged changes if not read-only + self._staging: dict[str, typing.Any] = ( + self._get_local_staged() if read_only else {} + ) + + self._staging |= kwargs self._headers: dict[str, str] = { "Authorization": f"Bearer {self._user_config.server.token}", @@ -134,6 +159,13 @@ def _get_local_staged(self) -> dict[str, typing.Any]: return _staged_data.get(self._label, {}).get(self._identifier, {}) def _get_attribute(self, attribute: str) -> typing.Any: + # In the case where the object is read-only, staging is the data + # already retrieved from the server + if (_attr := getattr(self, "_read_only", None)) and isinstance( + type(_attr), staging_check + ): + return self._staging[attribute] + try: return self._get()[attribute] except KeyError as e: @@ -174,6 +206,47 @@ def _get_visibility(self) -> dict[str, bool | list[str]]: def new(cls, offline: bool = False, **kwargs): pass + @classmethod + def get_all( + cls, count: int | None = None, offset: int | None = None + ) -> typing.Generator[tuple[str, "SimvueObject"], None, None]: + _class_instance = cls(read_only=True) + _url = f"{_class_instance._base_url}" + _response = sv_get( + _url, + headers=_class_instance._headers, + params={"start": offset, "count": count}, + ) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", + ) + + if not isinstance(_json_response, dict): + raise RuntimeError( + f"Expected dict from JSON response during {_class_instance.__class__.__name__.lower()}s retrieval " + f"but got '{type(_json_response)}'" + ) + + if not (_data := _json_response.get("data")): + raise RuntimeError( + f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" + ) + + for _entry in _json_response["data"]: + yield _entry["id"], cls(read_only=True, **_entry) + + def read_only(self, is_read_only: bool) -> None: + self._read_only = is_read_only + + # If using writable mode, clear the staging dictionary as + # in this context it contains existing data retrieved + # from the server/local entry which we dont want token + # repush unnecessarily then read any locally staged changes + if not self._read_only: + self._staging = self._get_local_staged() + def commit(self) -> None: if not self._staging: return diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index 9611c875..75aa74fd 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -11,7 +11,7 @@ import typing import pydantic -from .base import SimvueObject, Visibility, staging_check +from .base import SimvueObject, Visibility, staging_check, write_only from simvue.models import FOLDER_REGEX @@ -25,7 +25,9 @@ class Folder(SimvueObject): """ - def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + def __init__( + self, identifier: typing.Optional[str] = None, read_only: bool = False, **kwargs + ) -> None: """Initialise a Folder If an identifier is provided a connection will be made to the @@ -36,11 +38,13 @@ def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: ---------- identifier : str, optional the remote server unique id for the target folder + read_only : bool, optional + create object in read-only mode **kwargs : dict any additional arguments to be passed to the object initialiser """ self.visibility = Visibility(self) - super().__init__(identifier, **kwargs) + super().__init__(identifier, read_only, **kwargs) @classmethod @pydantic.validate_call @@ -62,6 +66,7 @@ def tags(self) -> list[str]: return self._get_attribute("tags") @tags.setter + @write_only @pydantic.validate_call def tags(self, tags: list[str]) -> None: """Set tags assigned to this folder""" @@ -79,6 +84,7 @@ def description(self) -> typing.Optional[str]: return self._get().get("description") @description.setter + @write_only @pydantic.validate_call def description(self, description: str) -> None: """Update the folder description""" @@ -91,6 +97,7 @@ def name(self) -> typing.Optional[str]: return self._get().get("name") @name.setter + @write_only @pydantic.validate_call def name(self, name: str) -> None: """Update the folder name""" @@ -103,6 +110,7 @@ def star(self) -> bool: return self._get().get("starred", False) @star.setter + @write_only @pydantic.validate_call def star(self, is_true: bool = True) -> None: """Star this folder as a favourite""" @@ -115,6 +123,7 @@ def ttl(self) -> int: return self._get_attribute("ttl") @ttl.setter + @write_only @pydantic.validate_call def ttl(self, time_seconds: int) -> None: """Update the retention period for this folder""" diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index fdec72d3..f1e8e626 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -1,7 +1,7 @@ import typing import pydantic import datetime -from .base import SimvueObject, staging_check, Visibility +from .base import SimvueObject, staging_check, Visibility, write_only from simvue.models import FOLDER_REGEX, NAME_REGEX, DATETIME_FORMAT Status = typing.Literal[ @@ -12,7 +12,9 @@ class Run(SimvueObject): - def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + def __init__( + self, identifier: typing.Optional[str] = None, read_only: bool = False, **kwargs + ) -> None: """Initialise a Run If an identifier is provided a connection will be made to the @@ -23,11 +25,13 @@ def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: ---------- identifier : str, optional the remote server unique id for the target run + read_only : bool, optional + create object in read-only mode **kwargs : dict any additional arguments to be passed to the object initialiser """ self.visibility = Visibility(self) - super().__init__(identifier, **kwargs) + super().__init__(identifier, read_only, **kwargs) @classmethod @pydantic.validate_call @@ -48,6 +52,7 @@ def name(self) -> str: return self._get_attribute("name") @name.setter + @write_only @pydantic.validate_call def name( self, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] @@ -60,6 +65,7 @@ def tags(self) -> list[str]: return self._get_attribute("tags") @tags.setter + @write_only @pydantic.validate_call def tags(self, tags: list[str]) -> None: self._staging["tags"] = tags @@ -70,6 +76,7 @@ def status(self) -> Status: return self._get_attribute("status") @status.setter + @write_only @pydantic.validate_call def status(self, status: Status) -> None: self._staging["status"] = status @@ -81,6 +88,7 @@ def ttl(self) -> int: return self._get_attribute("ttl") @ttl.setter + @write_only @pydantic.validate_call def ttl(self, time_seconds: int) -> None: """Update the retention period for this run""" @@ -92,6 +100,7 @@ def folder(self) -> str: return self._get_attribute("folder") @folder.setter + @write_only @pydantic.validate_call def folder( self, folder: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)] @@ -104,6 +113,7 @@ def metadata(self) -> dict[str, typing.Any]: return self._get_attribute("metadata") @metadata.setter + @write_only @pydantic.validate_call def metadata(self, metadata: dict[str, typing.Any]) -> None: self._staging["metadata"] = metadata @@ -114,6 +124,7 @@ def description(self) -> str: return self._get_attribute("description") @description.setter + @write_only @pydantic.validate_call def description(self, description: str) -> None: self._staging["description"] = description @@ -128,6 +139,7 @@ def heartbeat_timeout(self) -> int: return self._get_attribute("heartbeat_timeout") @heartbeat_timeout.setter + @write_only @pydantic.validate_call def heartbeat_timeout(self, time_seconds: int) -> None: self._staging["heartbeat_timeout"] = time_seconds @@ -138,6 +150,7 @@ def notifications(self) -> typing.Literal["none", "email"]: return self._get_attribute("notifications") @notifications.setter + @write_only @pydantic.validate_call def notifications(self, notifications: typing.Literal["none", "email"]) -> None: self._staging["notifications"] = notifications @@ -148,6 +161,7 @@ def alerts(self) -> list[str]: return self._get_attribute("alerts") @alerts.setter + @write_only @pydantic.validate_call def alerts(self, alerts: list[str]) -> None: self._staging["alerts"] = alerts @@ -160,6 +174,7 @@ def created(self) -> datetime.datetime: ) @created.setter + @write_only @pydantic.validate_call def created(self, created: datetime.datetime) -> None: self._staging["created"] = created.strftime(DATETIME_FORMAT) @@ -172,6 +187,7 @@ def started(self) -> datetime.datetime: ) @started.setter + @write_only @pydantic.validate_call def started(self, started: datetime.datetime) -> None: self._staging["started"] = started.strftime(DATETIME_FORMAT) @@ -184,6 +200,7 @@ def endtime(self) -> datetime.datetime: ) @endtime.setter + @write_only @pydantic.validate_call def endtime(self, endtime: datetime.datetime) -> None: self._staging["endtime"] = endtime.strftime(DATETIME_FORMAT) diff --git a/simvue/api/objects/storage/__init__.py b/simvue/api/objects/storage/__init__.py index 742d86dc..a89d5690 100644 --- a/simvue/api/objects/storage/__init__.py +++ b/simvue/api/objects/storage/__init__.py @@ -1,2 +1,3 @@ from .file import FileStorage as FileStorage from .s3 import S3Storage as S3Storage +from .fetch import Storage as Storage diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py index c78ae2a2..60202fe9 100644 --- a/simvue/api/objects/storage/base.py +++ b/simvue/api/objects/storage/base.py @@ -1,19 +1,27 @@ import typing -import abc -from simvue.api.objects.base import SimvueObject, staging_check +import pydantic +from simvue.api.objects.base import SimvueObject, staging_check, write_only +from simvue.models import NAME_REGEX -class Storage(SimvueObject): - def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + +class StorageBase(SimvueObject): + def __init__( + self, identifier: typing.Optional[str] = None, read_only: bool = False, **kwargs + ) -> None: """Retrieve an alert from the Simvue server by identifier""" + super().__init__(identifier, read_only, **kwargs) + self.status = Status(self) self._label = "storage" self._endpoint = self._label - super().__init__(identifier, **kwargs) - @abc.abstractclassmethod - def new(cls, **_): + @classmethod + def new(cls, **kwargs): pass + def get_status(self) -> dict[str, typing.Any]: + return {} if self._offline else self._get_attribute("status") + @property @staging_check def name(self) -> str: @@ -21,7 +29,11 @@ def name(self) -> str: return self._get_attribute("name") @name.setter - def name(self, name: list[str]) -> None: + @write_only + @pydantic.validate_call + def name( + self, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] + ) -> None: """Set name assigned to this folder""" self._staging["name"] = name @@ -37,6 +49,8 @@ def default(self) -> bool: return self._get_attribute("default") @default.setter + @write_only + @pydantic.validate_call def default(self, is_default: bool) -> None: """Set this storage to be the default""" self._staging["default"] = is_default @@ -48,11 +62,35 @@ def tenant_usable(self) -> bool: return self._get_attribute("tenant_usable") @tenant_usable.setter + @write_only + @pydantic.validate_call def tenant_usable(self, is_tenant_usable: bool) -> None: """Set this storage to be usable by the current user tenant""" self._staging["tenant_usable"] = is_tenant_usable @property - def disable_check(self) -> bool: - """Retrieve if checks are disabled for this storage""" - return self._get_attribute("disable_check") + def usage(self) -> int | None: + return None if self._offline else self._get_attribute("usage") + + @property + def user(self) -> str | None: + return None if self._offline else self._get_attribute("user") + + +class Status: + def __init__(self, storage: StorageBase) -> None: + self._sv_obj = storage + + @property + def status(self) -> str: + try: + return self._sv_obj.get_status()["status"] + except KeyError as e: + raise RuntimeError("Expected key 'status' in status retrieval") from e + + @property + def timestamp(self) -> str: + try: + return self._sv_obj.get_status()["timestamp"] + except KeyError as e: + raise RuntimeError("Expected key 'timestamp' in status retrieval") from e diff --git a/simvue/api/objects/storage/fetch.py b/simvue/api/objects/storage/fetch.py new file mode 100644 index 00000000..22632a07 --- /dev/null +++ b/simvue/api/objects/storage/fetch.py @@ -0,0 +1,64 @@ +""" +Simvue Storage Retrieval +==============--======== + +To simplify case whereby user does not know the storage type associated +with an identifier, use a generic storage object. +""" + +import typing +import http + +from simvue.api.request import get_json_from_response +from simvue.api.request import get as sv_get + +from .s3 import S3Storage +from .file import FileStorage +from .base import StorageBase + + +class Storage: + """Generic Simvue storage retrieval class""" + + def __new__(cls, identifier: str | None = None, **kwargs): + """Retrieve an object representing an storage either locally or on the server by id""" + _storage_pre = StorageBase(identifier=identifier, **kwargs) + if _storage_pre.type == "S3": + return S3Storage(identifier=identifier, **kwargs) + elif _storage_pre.type == "File": + return FileStorage(identifier=identifier, **kwargs) + + raise RuntimeError(f"Unknown type '{_storage_pre.type}'") + + @classmethod + def get_all( + cls, count: int | None = None, offset: int | None = None + ) -> typing.Generator[tuple[str, FileStorage | S3Storage], None, None]: + _class_instance = StorageBase(read_only=True) + _url = f"{_class_instance._base_url}" + _response = sv_get( + _url, + headers=_class_instance._headers, + params={"start": offset, "count": count}, + ) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", + ) + + if not isinstance(_json_response, list): + raise RuntimeError( + f"Expected list from JSON response during {_class_instance.__class__.__name__.lower()}s retrieval " + f"but got '{type(_json_response)}'" + ) + + _out_dict: dict[str, FileStorage | S3Storage] = {} + + for _entry in _json_response: + if _entry["type"] == "S3": + yield _entry["id"], S3Storage(read_only=True, **_entry) + elif _entry["type"] == "File": + yield _entry["id"], FileStorage(read_only=True, **_entry) + else: + raise RuntimeError(f"Unrecognised storage type '{_entry['type']}'") diff --git a/simvue/api/objects/storage/file.py b/simvue/api/objects/storage/file.py index 8c4eb434..254b3dd0 100644 --- a/simvue/api/objects/storage/file.py +++ b/simvue/api/objects/storage/file.py @@ -1,11 +1,11 @@ import typing import pydantic -from .base import Storage +from .base import StorageBase from simvue.models import NAME_REGEX -class FileStorage(Storage): +class FileStorage(StorageBase): @classmethod @pydantic.validate_call def new( @@ -13,9 +13,17 @@ def new( *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], disable_check: bool, + tenant_usable: bool, + default: bool, offline: bool = False, ) -> typing.Self: """Create a new file storage object""" - _storage = FileStorage(name=name, type="file", disable_check=disable_check) + _storage = FileStorage( + name=name, + type="File", + disable_check=disable_check, + tenant_useable=tenant_usable, + default=default, + ) _storage.offline_mode(offline) return _storage diff --git a/simvue/api/objects/storage/s3.py b/simvue/api/objects/storage/s3.py index fb073a57..176e89ad 100644 --- a/simvue/api/objects/storage/s3.py +++ b/simvue/api/objects/storage/s3.py @@ -1,11 +1,13 @@ import typing import pydantic -from .base import Storage, staging_check +from simvue.api.objects.base import write_only + +from .base import StorageBase, staging_check from simvue.models import NAME_REGEX -class S3Storage(Storage): +class S3Storage(StorageBase): def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: self.config = Config(self) super().__init__(identifier, **kwargs) @@ -17,27 +19,35 @@ def new( *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], disable_check: bool, - endpoint_url: str, + endpoint_url: pydantic.HttpUrl, region_name: str, access_key_id: str, secret_access_key: pydantic.SecretStr, bucket: str, + tenant_usable: bool, + default: bool, offline: bool = False, ) -> typing.Self: """Create a new S3 storage object""" _config: dict[str, str] = { - "endpoint_url": endpoint_url, + "endpoint_url": endpoint_url.__str__(), "region_name": region_name, "access_key_id": access_key_id, "secret_access_key": secret_access_key.get_secret_value(), "bucket": bucket, } _storage = S3Storage( - name=name, type="S3", config=_config, disable_check=disable_check + name=name, + type="S3", + config=_config, + disable_check=disable_check, + tenant_useable=tenant_usable, + default=default, ) _storage.offline_mode(offline) return _storage + @staging_check def get_config(self) -> dict[str, typing.Any]: """Retrieve configuration""" try: @@ -53,121 +63,50 @@ def __init__(self, storage: S3Storage) -> None: @property @staging_check def endpoint_url(self) -> str: - if self._sv_obj.type == "file": - raise ValueError( - f"Storage type '{self._sv_obj.type}' has no attribute 'endpoint_url'" - ) - try: return self._sv_obj.get_config()["endpoint_url"] except KeyError as e: raise RuntimeError( - "Expected key 'frequency' in alert definition retrieval" + "Expected key 'endpoint_url' in alert definition retrieval" ) from e @endpoint_url.setter - def endpoint_url(self, endpoint_url: str) -> None: - if self._sv_obj.type == "file": - raise ValueError( - f"Cannot set attribute 'endpoint_url' for storage type '{self._sv_obj.type}'" - ) - - _config = self._sv_obj.get_config() | {"endpoint_url": endpoint_url} + @write_only + @pydantic.validate_call + def endpoint_url(self, endpoint_url: pydantic.HttpUrl) -> None: + _config = self._sv_obj.get_config() | {"endpoint_url": endpoint_url.__str__()} self._sv_obj._staging["config"] = _config @property @staging_check def region_name(self) -> str: - if self._sv_obj.type == "file": - raise ValueError( - f"Storage type '{self._sv_obj.type}' has no attribute 'region_name'" - ) - try: return self._sv_obj.get_config()["region_name"] except KeyError as e: raise RuntimeError( - "Expected key 'frequency' in alert definition retrieval" + "Expected key 'region_name' in alert definition retrieval" ) from e @region_name.setter + @write_only + @pydantic.validate_call def region_name(self, region_name: str) -> None: - if self._sv_obj.type == "file": - raise ValueError( - f"Cannot set attribute 'region_name' for storage type '{self._sv_obj.type}'" - ) - _config = self._sv_obj.get_config() | {"region_name": region_name} self._sv_obj._staging["config"] = _config - @property - @staging_check - def access_key_id(self) -> str: - if self._sv_obj.type == "file": - raise ValueError( - f"Storage type '{self._sv_obj.type}' has no attribute 'access_key_id'" - ) - - try: - return self._sv_obj.get_config()["access_key_id"] - except KeyError as e: - raise RuntimeError( - "Expected key 'frequency' in alert definition retrieval" - ) from e - - @access_key_id.setter - def access_key_id(self, access_key_id: str) -> None: - if self._sv_obj.type == "file": - raise ValueError( - f"Cannot set attribute 'access_key_id' for storage type '{self._sv_obj.type}'" - ) - - _config = self._sv_obj.get_config() | {"access_key_id": access_key_id} - self._sv_obj._staging["config"] = _config - - @property - @staging_check - def secret_access_key(self) -> pydantic.SecretStr: - if self._sv_obj.type == "file": - raise ValueError( - f"Storage type '{self._sv_obj.type}' has no attribute 'secret_access_key'" - ) - - try: - return pydantic.SecretStr(self._sv_obj.get_config()["secret_access_key"]) - except KeyError as e: - raise RuntimeError( - "Expected key 'frequency' in alert definition retrieval" - ) from e - - @secret_access_key.setter - def secret_access_key(self, secret_access_key: pydantic.SecretStr) -> None: - if self._sv_obj.type == "file": - raise ValueError( - f"Cannot set attribute 'secret_access_key' for storage type '{self._sv_obj.type}'" - ) - - _config = self._sv_obj.get_config() | { - "secret_access_key": secret_access_key.get_secret_value() - } - self._sv_obj._staging["config"] = _config - @property @staging_check def bucket(self) -> str: - if self._sv_obj.type == "file": - raise ValueError( - f"Storage type '{self._sv_obj.type}' has no attribute 'bucket'" - ) - try: return self._sv_obj.get_config()["bucket"] except KeyError as e: raise RuntimeError( - "Expected key 'frequency' in alert definition retrieval" + "Expected key 'bucket' in alert definition retrieval" ) from e @bucket.setter + @write_only + @pydantic.validate_call def bucket(self, bucket: str) -> None: if self._sv_obj.type == "file": raise ValueError( diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py index 5f3aad85..a5ff441f 100644 --- a/simvue/api/objects/tag.py +++ b/simvue/api/objects/tag.py @@ -1,6 +1,6 @@ import pydantic.color import typing -from .base import SimvueObject, staging_check +from .base import SimvueObject, staging_check, write_only __all__ = ["Tag"] @@ -26,6 +26,7 @@ def name(self) -> str: return self._get_attribute("name") @name.setter + @write_only @pydantic.validate_call def name(self, name: str) -> None: self._staging["name"] = name @@ -36,6 +37,7 @@ def color(self) -> pydantic.color.RGBA: return pydantic.color.parse_str(self._get_attribute("colour")) @color.setter + @write_only @pydantic.validate_call def color(self, color: pydantic.color.Color) -> None: self._staging["colour"] = color.as_hex() @@ -46,6 +48,7 @@ def description(self) -> str: return self._get_attribute("description") @description.setter + @write_only @pydantic.validate_call def description(self, description: str) -> None: self._staging["description"] = description diff --git a/simvue/api/request.py b/simvue/api/request.py index 524714f9..632dc7ed 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -154,7 +154,10 @@ def put( reraise=True, ) def get( - url: str, headers: dict[str, str], timeout: int = DEFAULT_API_TIMEOUT + url: str, + headers: dict[str, str], + params: dict[str, str | int | float] | None = None, + timeout: int = DEFAULT_API_TIMEOUT, ) -> requests.Response: """HTTP GET @@ -172,7 +175,7 @@ def get( requests.Response response from executing GET """ - response = requests.get(url, headers=headers, timeout=timeout) + response = requests.get(url, headers=headers, timeout=timeout, params=params) response.raise_for_status() return response diff --git a/tests/unit/test_file_storage.py b/tests/unit/test_file_storage.py index 65627a53..db037cd5 100644 --- a/tests/unit/test_file_storage.py +++ b/tests/unit/test_file_storage.py @@ -8,15 +8,17 @@ @pytest.mark.api def test_create_file_storage_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] - _storage = FileStorage.new(name=_uuid, disable_check=False) + _storage = FileStorage.new(name=_uuid, disable_check=False, tenant_usable=False, default=False) _storage.commit() + assert _storage.status.status assert _storage.name == _uuid + _storage.delete() @pytest.mark.api def test_create_file_storage_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] - _storage = FileStorage.new(name=_uuid, disable_check=False, offline=True) + _storage = FileStorage.new(name=_uuid, disable_check=False, tenant_usable=False, default=False, offline=True) _storage.commit() assert _storage.name == _uuid _storage.delete() diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index 7ccca943..ab4118a3 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -17,6 +17,13 @@ def test_folder_creation_online() -> None: assert not _folder.visibility.public assert not _folder.visibility.tenant assert not _folder.visibility.users + _folders = Folder.get_all(count=10) + assert _folders + assert _folders[_folder.id] + assert _folders[_folder.id]._read_only + with pytest.raises(AssertionError): + _folders[_folder.id].name = "hello" + _folder.delete() @pytest.mark.api diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index f7f5abe9..f1053c81 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -4,6 +4,7 @@ import uuid from simvue.api.objects import S3Storage +from simvue.api.objects.storage.fetch import Storage @pytest.mark.api def test_create_s3_online() -> None: @@ -11,7 +12,9 @@ def test_create_s3_online() -> None: _storage = S3Storage.new( name=_uuid, endpoint_url="https://not_a_real_url.io", - disable_check=False, + disable_check=True, + tenant_usable=False, + default=False, region_name="fictionsville", access_key_id="dummy_key", secret_access_key="not_a_key", @@ -19,12 +22,14 @@ def test_create_s3_online() -> None: ) _storage.commit() assert _storage.name == _uuid - assert not _storage.disable_check - assert _storage.config.endpoint_url == "https://not_a_real_url.io" + assert _storage.config.endpoint_url == "https://not_a_real_url.io/" assert _storage.config.region_name == "fictionsville" - assert _storage.config.access_key_id == "dummy_key" - assert _storage.config.secret_access_key.get_secret_value() == "not_a_key" assert _storage.config.bucket == "dummy_bucket" + assert not _storage.usage + assert _storage.status.status + assert _storage.status.timestamp + assert _storage.user + assert dict(Storage.get_all()) _storage.delete() @@ -39,14 +44,16 @@ def test_create_s3_offline() -> None: access_key_id="dummy_key", secret_access_key="not_a_key", bucket="dummy_bucket", + default=False, + tenant_usable=False, offline=True ) _storage.commit() assert _storage.name == _uuid - assert not _storage.disable_check assert _storage.config.endpoint_url == "https://not_a_real_url.io" assert _storage.config.region_name == "fictionsville" - assert _storage.config.access_key_id == "dummy_key" - assert _storage.config.secret_access_key.get_secret_value() == "not_a_key" assert _storage.config.bucket == "dummy_bucket" + assert not _storage.status + assert not _storage.user + assert not _storage.usage _storage.delete() diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py index 207a4bd2..6841508f 100644 --- a/tests/unit/test_user_alert.py +++ b/tests/unit/test_user_alert.py @@ -16,6 +16,7 @@ def test_user_alert_creation_online() -> None: assert _alert.source == "user" assert _alert.name == f"users_alert_{_uuid}" assert _alert.notification == "none" + assert dict(Alert.get_all()) _alert.delete() From 00d86a9c8a7973f860ef6dfe82b7f0e87626062f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 18 Nov 2024 11:43:41 +0000 Subject: [PATCH 010/163] Start modification of upper run class --- simvue/api/objects/metric.py | 16 ++++ simvue/api/objects/run.py | 44 +++++++++++ simvue/run.py | 145 ++++++++++++++++------------------- 3 files changed, 128 insertions(+), 77 deletions(-) create mode 100644 simvue/api/objects/metric.py diff --git a/simvue/api/objects/metric.py b/simvue/api/objects/metric.py new file mode 100644 index 00000000..80a60ca9 --- /dev/null +++ b/simvue/api/objects/metric.py @@ -0,0 +1,16 @@ +import typing +from .base import SimvueObject + + +class Metrics(SimvueObject): + def __init__( + self, + run_identifier: typing.Optional[str] = None, + read_only: bool = False, + **kwargs, + ) -> None: + super().__init__(run_identifier, read_only, **kwargs) + + @property + def url(self) -> str: + return f"{self._base_url}/{self._url_path}" diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index f1e8e626..5c1e756a 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -1,7 +1,11 @@ +import http import typing import pydantic import datetime +import boltons.urlutils as bo_url + from .base import SimvueObject, staging_check, Visibility, write_only +from simvue.api.request import get as sv_get, put as sv_put, get_json_from_response from simvue.models import FOLDER_REGEX, NAME_REGEX, DATETIME_FORMAT Status = typing.Literal[ @@ -204,3 +208,43 @@ def endtime(self) -> datetime.datetime: @pydantic.validate_call def endtime(self, endtime: datetime.datetime) -> None: self._staging["endtime"] = endtime.strftime(DATETIME_FORMAT) + + @write_only + def send_heartbeat(self) -> dict[str, typing.Any] | None: + if self._offline: + return None + + _url = bo_url.URL(self._user_config.server.url) + _url.path = f"{self._url_path / 'heartbeat' / self._identifier}" + _response = sv_put(f"{_url}", headers=self._headers, data={}) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario="Retrieving abort status", + ) + if not isinstance(_json_response, dict): + raise RuntimeError( + f"Expected dictionary from JSON response during {self._label} abort status check " + f"but got '{type(_json_response)}'" + ) + return _json_response + + @property + def abort_trigger(self) -> bool: + if self._offline: + return False + + _url = bo_url.URL(self._user_config.server.url) + _url.path = f"{self._url_path}/abort" + _response = sv_get(f"{_url}", headers=self._headers) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario="Retrieving abort status", + ) + if not isinstance(_json_response, dict): + raise RuntimeError( + f"Expected dictionary from JSON response during {self._label} abort status check " + f"but got '{type(_json_response)}'" + ) + return _json_response.get("status", False) diff --git a/simvue/run.py b/simvue/run.py index 9400d94f..4c2b3519 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -30,16 +30,15 @@ import click import msgpack import psutil -from pydantic import ValidationError + from .config.user import SimvueConfiguration import simvue.api.request as sv_api from .factory.dispatch import Dispatcher from .executor import Executor -from .factory.proxy import Simvue from .metrics import get_gpu_metrics, get_process_cpu, get_process_memory -from .models import RunInput, FOLDER_REGEX, NAME_REGEX, MetricKeyString +from .models import FOLDER_REGEX, NAME_REGEX, MetricKeyString from .serialization import serialize_object from .system import get_system from .metadata import git_info, environment @@ -51,6 +50,7 @@ validate_timestamp, simvue_timestamp, ) +from .api.objects import Run as RunObject, Artifact try: from typing import Self @@ -59,7 +59,6 @@ if typing.TYPE_CHECKING: - from .factory.proxy import SimvueBaseClass from .factory.dispatch import DispatcherBaseClass UPLOAD_TIMEOUT: int = 30 @@ -173,7 +172,7 @@ def __init__( self._headers: dict[str, str] = { "Authorization": f"Bearer {self._user_config.server.token}" } - self._simvue: typing.Optional[SimvueBaseClass] = None + self._sv_obj: typing.Optional[RunObject] = None self._pid: typing.Optional[int] = 0 self._shutdown_event: typing.Optional[threading.Event] = None self._configuration_lock = threading.Lock() @@ -356,7 +355,7 @@ def _heartbeat( # Check if the user has aborted the run with self._configuration_lock: - if self._simvue and self._simvue.get_abort_status(): + if self._sv_obj and self._sv_obj.abort_trigger: self._alert_raised_trigger.set() logger.debug("Received abort request from server") @@ -379,8 +378,8 @@ def _heartbeat( if self._abort_on_alert == "terminate": os._exit(1) - if self._simvue: - self._simvue.send_heartbeat() + if self._sv_obj: + self._sv_obj.send_heartbeat() return _heartbeat @@ -479,14 +478,13 @@ def _start(self, reconnect: bool = False) -> bool: logger.debug("Starting run") - data: dict[str, typing.Any] = {"status": self._status} - - if reconnect: - data["system"] = get_system() + if self._sv_obj: + self._sv_obj.status = self._status - if self._simvue and not self._simvue.update(data): - return False + if reconnect: + self._sv_obj.system = get_system() + self._sv_obj.commit() self._start_time = time.time() if self._pid == 0: @@ -559,13 +557,15 @@ def _error(self, message: str, join_threads: bool = True) -> None: if not self._suppress_errors: raise RuntimeError(message) - else: - # Simvue support now terminated as the instance of Run has entered - # the dormant state due to exception throw so set listing to be 'lost' - if self._status == "running" and self._simvue: - self._simvue.update({"name": self._name, "status": "lost"}) - logger.error(message) + # Simvue support now terminated as the instance of Run has entered + # the dormant state due to exception throw so set listing to be 'lost' + if self._status == "running" and self._sv_obj: + self._sv_obj.name = self._name + self._sv_obj.status = "lost" + self._sv_obj.commit() + + logger.error(message) self._aborted = True @@ -680,40 +680,25 @@ def init( self._timer = time.time() - data: dict[str, typing.Any] = { - "metadata": (metadata or {}) | git_info(os.getcwd()) | environment(), - "tags": tags or [], - "status": self._status, - "ttl": self._retention, - "folder": folder, - "name": name, - "description": description, - "system": get_system() if self._status == "running" else None, - "visibility": { - "users": [] if not isinstance(visibility, list) else visibility, - "tenant": visibility == "tenant", - "public": visibility == "public", - }, - "heartbeat_timeout": timeout, + self._sv_obj = RunObject.new(folder=folder) + self._sv_obj.description = description + self._sv_obj.name = name + self._sv_obj.visibility = { + "users": visibility if isinstance(visibility, list) else [], + "tenant": visibility == "tenant", + "public": visibility == "public", } - - # Check against the expected run input - try: - RunInput(**data) - except ValidationError as err: - self._error(f"{err}") - return False - - self._simvue = Simvue( - name=self._name, - uniq_id=self._uuid, - mode=self._mode, - config=self._user_config, - suppress_errors=self._suppress_errors, + self._sv_obj.ttl = self._retention + self._sv_obj.status = self._status + self._sv_obj.metadata = ( + (metadata or {}) | git_info(os.getcwd()) | environment(), ) - name, self._id = self._simvue.create_run(data) + self._sv_obj.heartbeat_timeout = timeout + self._sv_obj.system = get_system() if self._status == "running" else None + self._data = self._sv_obj._staging + self._sv_obj.commit() - self._data = data + name, self._id = self._sv_obj.name, self._sv_obj.id if not name: return False @@ -938,9 +923,7 @@ def reconnect(self, run_id: str) -> bool: self._status = "running" self._id = run_id - self._simvue = Simvue( - self._name, self._id, self._mode, self._user_config, self._suppress_errors - ) + self._sv_obj = RunObject(identifier=self._id) self._start(reconnect=True) return True @@ -1065,7 +1048,7 @@ def update_metadata(self, metadata: dict[str, typing.Any]) -> bool: bool if the update was successful """ - if not self._simvue: + if not self._sv_obj: self._error("Cannot update metadata, run not initialised") return False @@ -1073,12 +1056,12 @@ def update_metadata(self, metadata: dict[str, typing.Any]) -> bool: self._error("metadata must be a dict") return False - data: dict[str, dict[str, typing.Any]] = {"metadata": metadata} - - if self._simvue and self._simvue.update(data): + if self._sv_obj: + self._sv_obj.metadata = metadata + self._sv_obj.commit() return True - return False + return True @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @@ -1096,16 +1079,14 @@ def set_tags(self, tags: list[str]) -> bool: bool whether the update was successful """ - if not self._simvue: + if not self._sv_obj: self._error("Cannot update tags, run not initialised") return False - data: dict[str, list[str]] = {"tags": tags} - - if self._simvue and self._simvue.update(data): - return True + self._sv_obj.tags = tags + self._sv_obj.commit() - return False + return True @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @@ -1123,11 +1104,11 @@ def update_tags(self, tags: list[str]) -> bool: bool whether the update was successful """ - if not self._simvue: + if not self._sv_obj: return False try: - current_tags: list[str] = self._simvue.list_tags() or [] + current_tags: list[str] = self._sv_obj.tags except RuntimeError as e: self._error(f"{e.args[0]}") return False @@ -1161,7 +1142,7 @@ def log_event(self, message: str, timestamp: typing.Optional[str] = None) -> boo if self._aborted: return False - if not self._simvue or not self._dispatcher: + if not self._sv_obj or not self._dispatcher: self._error("Cannot log events, run not initialised") return False @@ -1197,7 +1178,7 @@ def _add_metrics_to_dispatch( if not metrics: return True - if not self._simvue or not self._dispatcher: + if not self._sv_obj or not self._dispatcher: self._error("Cannot log metrics, run not initialised", join_on_fail) return False @@ -1315,7 +1296,7 @@ def save_object( # Register file try: - return self._simvue is not None and self._simvue.save_file(data) is not None + return self._sv_obj is not None and self._simvue.save_file(data) is not None except RuntimeError as e: self._error(f"{e.args[0]}") return False @@ -1402,6 +1383,19 @@ def save_file( return True # Register file + _artifact = Artifact.new( + name=name or stored_file_name, + run=self._sv_obj.id, + storage=self._storage_id, + file_path=file_path, + offline=self._mode == "offline", + file_type=None, + category=category, + ) + _artifact.commit() + + _storage_id = _artifact.storage + try: return self._simvue.save_file(data) is not None except RuntimeError as e: @@ -1522,15 +1516,12 @@ def set_status( self._error("Run is not active") return False - data: dict[str, str] = {"name": self._name, "status": status} self._status = status - try: - if self._simvue and self._simvue.update(data): - return True - except RuntimeError as e: - self._error(f"{e.args[0]}") - return False + if self._sv_obj: + self._sv_obj.status = status + self._sv_obj.commit() + return True return False From d2b23ff3946d0811f654381f32c1acbeef0601ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 20 Nov 2024 13:07:18 +0000 Subject: [PATCH 011/163] Fixed recursive delete --- simvue/api/objects/administrator/tenant.py | 2 +- simvue/api/objects/administrator/user.py | 10 +- simvue/api/objects/alert/base.py | 3 + simvue/api/objects/alert/events.py | 9 +- simvue/api/objects/alert/fetch.py | 11 +- simvue/api/objects/alert/metrics.py | 36 +++- simvue/api/objects/alert/user.py | 3 +- simvue/api/objects/artifact.py | 169 +++++++++++++++-- simvue/api/objects/base.py | 43 +++-- simvue/api/objects/folder.py | 12 +- simvue/api/objects/run.py | 10 +- simvue/api/objects/storage/base.py | 4 +- simvue/api/objects/storage/fetch.py | 11 +- simvue/api/objects/storage/file.py | 1 + simvue/api/objects/storage/s3.py | 1 + simvue/api/objects/tag.py | 11 +- simvue/api/request.py | 27 ++- simvue/client.py | 210 +++------------------ simvue/run.py | 185 +++++++++--------- simvue/serialization.py | 5 +- simvue/utilities.py | 25 +-- tests/unit/test_folder.py | 2 +- tests/unit/test_s3_storage.py | 2 +- tests/unit/test_user_alert.py | 2 +- 24 files changed, 416 insertions(+), 378 deletions(-) diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py index 68c64eca..905c2a1c 100644 --- a/simvue/api/objects/administrator/tenant.py +++ b/simvue/api/objects/administrator/tenant.py @@ -12,7 +12,7 @@ class Tenant(SimvueObject): def new( cls, *, name: str, enabled: bool = True, offline: bool = False ) -> typing.Self: - _tenant = Tenant(name=name, enabled=enabled, offline=offline) + _tenant = Tenant(name=name, enabled=enabled, offline=offline, read_only=False) _tenant.offline_mode(offline) return _tenant # type: ignore diff --git a/simvue/api/objects/administrator/user.py b/simvue/api/objects/administrator/user.py index f6106c6a..d61ed137 100644 --- a/simvue/api/objects/administrator/user.py +++ b/simvue/api/objects/administrator/user.py @@ -30,10 +30,18 @@ def new( "admin": admin, "enabled": enabled, } - _user = User(user=_user_info, tenant=tenant, offline=offline) + _user = User(user=_user_info, tenant=tenant, offline=offline, read_only=False) _user.offline_mode(offline) return _user # type: ignore + @classmethod + def get( + cls, *, count: int | None = None, offset: int | None = None, **kwargs + ) -> dict[str, "User"]: + # Currently no user filters + kwargs.pop("filters", None) + return super().get(count=count, offset=offset, **kwargs) + @property @staging_check def username(self) -> str: diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index dcb18fce..f1d3ddce 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -29,6 +29,9 @@ def __init__( self._label = "alert" super().__init__(identifier, read_only, **kwargs) + def compare(self, other: "AlertBase") -> bool: + return type(self) is type(other) and self.name == other.name + @staging_check def get_alert(self) -> dict[str, typing.Any]: """Retrieve alert definition""" diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index f78b3699..ea6bbcac 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -17,15 +17,13 @@ class EventsAlert(AlertBase): """Connect to an event-based alert either locally or on a server""" - def __init__( - self, identifier: str | None = None, read_only: bool = False, **kwargs - ) -> None: + def __init__(self, identifier: str | None = None, **kwargs) -> None: """Initialise a connection to an event alert by identifier""" self.alert = EventAlertDefinition(self) - super().__init__(identifier, read_only, **kwargs) + super().__init__(identifier, **kwargs) @classmethod - def get_all( + def get( cls, count: int | None = None, offset: int | None = None ) -> dict[str, typing.Any]: raise NotImplementedError("Retrieve of only event alerts is not yet supported") @@ -70,6 +68,7 @@ def new( source="events", alert=_alert_definition, enabled=enabled, + _read_only=False, ) _alert.offline_mode(offline) return _alert diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index 8e10161b..3b17d379 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -22,7 +22,7 @@ class Alert: """Generic Simvue alert retrieval class""" - def __new__(cls, identifier: str | None = None, **kwargs): + def __new__(cls, identifier: str | None = None, **kwargs) -> AlertType: """Retrieve an object representing an alert either locally or on the server by id""" _alert_pre = AlertBase(identifier=identifier, **kwargs) if _alert_pre.source == "events": @@ -35,10 +35,13 @@ def __new__(cls, identifier: str | None = None, **kwargs): raise RuntimeError(f"Unknown source type '{_alert_pre.source}'") @classmethod - def get_all( - cls, count: int | None = None, offset: int | None = None + def get( + cls, count: int | None = None, offset: int | None = None, **kwargs ) -> typing.Generator[tuple[str, AlertType], None, None]: - _class_instance = AlertBase(read_only=True) + # Currently no alert filters + kwargs.pop("filters", None) + + _class_instance = AlertBase(read_only=True, **kwargs) _url = f"{_class_instance._base_url}" _response = sv_get( _url, diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index ea940502..34cada1b 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -21,15 +21,13 @@ class MetricsThresholdAlert(AlertBase): """Class for connecting to/creating a local or remotely defined metric threshold alert""" - def __init__( - self, identifier: str | None = None, read_only: bool = False, **kwargs - ) -> None: + def __init__(self, identifier: str | None = None, **kwargs) -> None: """Connect to a local or remote threshold alert by identifier""" self.alert = MetricThresholdAlertDefinition(self) - super().__init__(identifier, read_only, **kwargs) + super().__init__(identifier, **kwargs) @classmethod - def get_all( + def get( cls, count: int | None = None, offset: int | None = None ) -> dict[str, typing.Any]: raise NotImplementedError("Retrieve of only metric alerts is not yet supported") @@ -92,6 +90,7 @@ def new( source="metrics", alert=_alert_definition, enabled=enabled, + _read_only=False, ) _alert.offline_mode(offline) return _alert @@ -105,6 +104,9 @@ def __init__(self, identifier: str | None = None, **kwargs) -> None: self.alert = MetricRangeAlertDefinition(self) super().__init__(identifier, **kwargs) + def compare(self, other: "MetricsRangeAlert") -> bool: + return all([self.alert.compare(other.alert), super().compare(other)]) + @classmethod @pydantic.validate_call def new( @@ -144,7 +146,7 @@ def new( the value defining the upper limit range_low : float | int the value defining the lower limit - frequency : int + frequency : int | None how often to monitor the metric enabled : bool, optional whether this alert is enabled upon creation, default is True @@ -182,6 +184,16 @@ def __init__(self, alert: MetricsRangeAlert) -> None: """Initialise definition with target alert""" self._sv_obj = alert + def compare(self, other: "MetricsAlertDefinition") -> bool: + return all( + [ + self.aggregation == other.aggregation, + self.frequency == other.frequency, + self.rule == other.rule, + self.window == other.window, + ] + ) + @property def aggregation(self) -> Aggregate: """Retrieve the aggregation strategy for this alert""" @@ -228,6 +240,9 @@ def frequency(self, frequency: int) -> None: class MetricThresholdAlertDefinition(MetricsAlertDefinition): """Alert definition for metric threshold alerts""" + def compare(self, other: "MetricThresholdAlertDefinition") -> bool: + return all([super().compare(other), self.threshold == other.threshold]) + @property def threshold(self) -> float: """Retrieve the threshold value for this alert""" @@ -239,6 +254,15 @@ def threshold(self) -> float: class MetricRangeAlertDefinition(MetricsAlertDefinition): """Alert definition for metric range alerts""" + def compare(self, other: "MetricRangeAlertDefinition") -> bool: + return all( + [ + super().compare(other), + self.range_high == other.range_high, + self.range_low == other.range_low, + ] + ) + @property def range_low(self) -> float: """Retrieve the lower limit for metric range""" diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index 4009de69..a0b62111 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -46,12 +46,13 @@ def new( notification=notification, source="user", enabled=enabled, + _read_only=False, ) _alert.offline_mode(offline) return _alert @classmethod - def get_all( + def get( cls, count: int | None = None, offset: int | None = None ) -> dict[str, typing.Any]: raise NotImplementedError("Retrieve of only user alerts is not yet supported") diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 03be8abe..3cb46316 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -6,25 +6,40 @@ """ +import http import typing -import os.path import pydantic +import os.path +import sys from simvue.models import NAME_REGEX from simvue.utilities import get_mimetype_for_file, get_mimetypes, calculate_sha256 -from .base import SimvueObject +from simvue.api.objects.base import SimvueObject +from simvue.serialization import serialize_object +from simvue.api.request import put as sv_put, get_json_from_response Category = typing.Literal["code", "input", "output"] -__all__ = ["Artifact"] +UPLOAD_TIMEOUT: int = 30 class Artifact(SimvueObject): """Connect to/create an artifact locally or on the server""" + def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + super().__init__(identifier, **kwargs) + self._storage_url: str | None = None + self._label = "artifact" + + @classmethod + def new(cls, *_, **__) -> None: + raise NotImplementedError( + "No method 'new' for type 'artifact', use 'new_file' or 'new_object'" + ) + @classmethod @pydantic.validate_call - def new( + def new_file( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], @@ -66,19 +81,140 @@ def new( _file_orig_path = file_path.expanduser().absolute() _file_checksum = calculate_sha256(f"{file_path}", is_file=True) - _artifact = Artifact( - name=name, - run=run, - storage=storage, - category=category, - originalPath=os.path.expandvars(_file_orig_path), - size=_file_size, - type=_file_type, - checksum=_file_checksum, - ) + _upload_data = { + "name": name, + "storage": storage, + "category": category, + "originalPath": os.path.expandvars(_file_orig_path), + "size": _file_size, + "type": _file_type, + "checksum": _file_checksum, + } + + _artifact = Artifact(_read_only=False, **_upload_data) + + _artifact.offline_mode(offline) + + with open(file_path, "rb") as out_f: + _artifact._upload(artifact_data=out_f, run_id=run, **_upload_data) + + return _artifact + + @classmethod + @pydantic.validate_call + def new_object( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + run: str, + storage: str | None, + category: Category, + obj: typing.Any, + allow_pickling: bool = True, + offline: bool = False, + ) -> typing.Self: + """Create a new artifact either locally or on the server + + Note all arguments are keyword arguments + + Parameters + ---------- + name : str + the name for this artifact + run : str + the identifier with which this artifact is associated + storage : str | None + the identifier for the storage location for this object + category : "code" | "input" | "output" + the category of this artifact + obj : Any + object to serialize and upload + allow_pickling : bool, optional + whether to allow the object to be pickled if no other + serialiazation found. Default is True + offline : bool, optional + whether to define this artifact locally, default is False + + """ + _serialization = serialize_object(obj, allow_pickling) + + if not _serialization or not (_serialized := _serialization[0]): + raise ValueError(f"Could not serialize object of type '{type(obj)}'") + + if not (_data_type := _serialization[1]) and not allow_pickling: + raise ValueError( + f"Could not serialize object of type '{type(obj)}' without pickling" + ) + + _checksum = calculate_sha256(_serialized, is_file=False) + _upload_data = { + "name": name, + "storage": storage, + "category": category, + "originalPath": "", + "size": sys.getsizeof(obj), + "type": _data_type, + "checksum": _checksum, + } + + _artifact = Artifact(read_only=False, **_upload_data) _artifact.offline_mode(offline) + _artifact._upload(artifact_data=_serialized, run_id=run, **_upload_data) return _artifact + def _post(self, **kwargs) -> dict[str, typing.Any]: + # The ID is the checksum, set this only if the post + # to server was successful (else offline_ prefix kept) + _identifier = self._staging["checksum"] + _response = super()._post(**kwargs) + self._storage_url = _response.get("url") + self._identifier = _identifier + return _response + + def commit(self) -> None: + raise TypeError("Cannot call method 'commit' on write-once type 'Artifact'") + + def _upload( + self, artifact_data: typing.Any, run_id: str, **_obj_parameters + ) -> None: + # If local file store then do nothing + if not self.storage_url or self._offline: + return + + # NOTE: Assumes URL for Run is always same format as Artifact + _run_artifacts_url: str = self._base_url.replace(self._label, "run") + + _response = sv_put( + url=self._storage_url, + headers={}, + data=artifact_data, + is_json=False, + timeout=UPLOAD_TIMEOUT, + ) + + self._logger.debug( + "Got status code %d when uploading artifact", + _response.status_code, + ) + + get_json_from_response( + expected_status=[http.HTTPStatus.OK], + scenario=f"uploading artifact '{self.name}' to object storage", + response=_response, + ) + + sv_put( + url=_run_artifacts_url, + headers=self._headers, + data=_obj_parameters | {self.storage}, + ) + + get_json_from_response( + expected_status=[http.HTTPStatus.OK], + scenario=f"adding artifact '{self.name}' to run '{run_id}'", + response=_response, + ) + @property def name(self) -> str: """Retrieve the name for this artifact""" @@ -108,3 +244,8 @@ def storage(self) -> str: def type(self) -> str: """Retrieve the MIME type for this artifact""" return self._get_attribute("type") + + @property + def storage_url(self) -> str | None: + """Retrieve storage URL for the artifact""" + return self._storage_url diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 93baee7f..59ae3193 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -14,6 +14,7 @@ from codecarbon.external.logger import logging from codecarbon.output_methods.emissions_data import json +from requests.models import HTTPError from simvue.config.user import SimvueConfiguration from simvue.version import __version__ @@ -113,12 +114,12 @@ def tenant(self, tenant: bool) -> None: class SimvueObject(abc.ABC): def __init__( - self, identifier: typing.Optional[str] = None, read_only: bool = False, **kwargs + self, identifier: typing.Optional[str] = None, _read_only: bool = True, **kwargs ) -> None: self._logger = logging.getLogger(f"simvue.{self.__class__.__name__}") self._label: str = getattr(self, "_label", self.__class__.__name__.lower()) - self._read_only: bool = read_only - self._endpoint: str = f"{self._label}s" + self._read_only: bool = _read_only + self._endpoint: str = getattr(self, "_endpoint", f"{self._label}s") self._identifier: typing.Optional[str] = ( identifier if identifier is not None else f"offline_{uuid.uuid1()}" ) @@ -138,7 +139,7 @@ def __init__( # Recover any locally staged changes if not read-only self._staging: dict[str, typing.Any] = ( - self._get_local_staged() if read_only else {} + self._get_local_staged() if _read_only else {} ) self._staging |= kwargs @@ -148,6 +149,16 @@ def __init__( "User-Agent": f"Simvue Python client {__version__}", } + if identifier: + try: + self._get_attribute("id") + except HTTPError as e: + if e.response.status_code == http.HTTPStatus.NOT_FOUND: + raise ValueError( + f"Failed to retrieve {self._label} '{identifier}', " + "no such object" + ) from e + def _get_local_staged(self) -> dict[str, typing.Any]: """Retrieve any locally staged data for this identifier""" if not self._local_staging_file.exists() or not self._identifier: @@ -162,7 +173,7 @@ def _get_attribute(self, attribute: str) -> typing.Any: # In the case where the object is read-only, staging is the data # already retrieved from the server if (_attr := getattr(self, "_read_only", None)) and isinstance( - type(_attr), staging_check + type(_attr), type(staging_check) ): return self._staging[attribute] @@ -207,15 +218,15 @@ def new(cls, offline: bool = False, **kwargs): pass @classmethod - def get_all( - cls, count: int | None = None, offset: int | None = None + def get( + cls, count: int | None = None, offset: int | None = None, **kwargs ) -> typing.Generator[tuple[str, "SimvueObject"], None, None]: _class_instance = cls(read_only=True) _url = f"{_class_instance._base_url}" _response = sv_get( _url, headers=_class_instance._headers, - params={"start": offset, "count": count}, + params={"start": offset, "count": count} | kwargs, ) _json_response = get_json_from_response( response=_response, @@ -248,8 +259,8 @@ def read_only(self, is_read_only: bool) -> None: self._staging = self._get_local_staged() def commit(self) -> None: - if not self._staging: - return + if self._read_only: + raise AttributeError("Cannot commit object in 'read-only' mode") if self._offline: _offline_dir: pathlib.Path = self._user_config.offline.cache @@ -258,9 +269,10 @@ def commit(self) -> None: return # Initial commit is creation of object + # if staging is empty then we do not need to use PUT if not self._identifier or self._identifier.startswith("offline_"): self._post(**self._staging) - else: + elif self._staging: self._put(**self._staging) # Clear staged changes @@ -309,8 +321,9 @@ def _post(self, **kwargs) -> dict[str, typing.Any]: f"Expected dictionary from JSON response during {self._label} creation " f"but got '{type(_json_response)}'" ) - self._logger.debug("'%s' created successfully", _json_response["id"]) - self._identifier = _json_response["id"] + if _id := _json_response.get("id"): + self._logger.debug("'%s' created successfully", _id) + self._identifier = _id return _json_response @@ -343,7 +356,7 @@ def _put(self, **kwargs) -> dict[str, typing.Any]: return _json_response - def delete(self) -> dict[str, typing.Any]: + def delete(self, **kwargs) -> dict[str, typing.Any]: if self._get_local_staged(): with self._local_staging_file.open() as in_f: _local_data = json.load(in_f) @@ -360,7 +373,7 @@ def delete(self) -> dict[str, typing.Any]: raise RuntimeError( f"Identifier for instance of {self.__class__.__name__} Unknown" ) - _response = sv_delete(url=self.url, headers=self._headers) + _response = sv_delete(url=self.url, headers=self._headers, params=kwargs) _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NO_CONTENT], diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index 75aa74fd..2c7e4774 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -11,6 +11,7 @@ import typing import pydantic + from .base import SimvueObject, Visibility, staging_check, write_only from simvue.models import FOLDER_REGEX @@ -25,9 +26,7 @@ class Folder(SimvueObject): """ - def __init__( - self, identifier: typing.Optional[str] = None, read_only: bool = False, **kwargs - ) -> None: + def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: """Initialise a Folder If an identifier is provided a connection will be made to the @@ -44,7 +43,7 @@ def __init__( any additional arguments to be passed to the object initialiser """ self.visibility = Visibility(self) - super().__init__(identifier, read_only, **kwargs) + super().__init__(identifier, **kwargs) @classmethod @pydantic.validate_call @@ -55,7 +54,7 @@ def new( offline: bool = False, ): """Create a new Folder on the Simvue server with the given path""" - _folder = Folder(path=path) + _folder = Folder(path=path, _read_only=False) _folder.offline_mode(offline) return _folder @@ -128,3 +127,6 @@ def ttl(self) -> int: def ttl(self, time_seconds: int) -> None: """Update the retention period for this folder""" self._staging["ttl"] = time_seconds + + def delete(self, *, recursive: bool, delete_runs: bool) -> dict[str, typing.Any]: + return super().delete(recursive=recursive, runs=delete_runs) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 5c1e756a..a06c7864 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -16,9 +16,7 @@ class Run(SimvueObject): - def __init__( - self, identifier: typing.Optional[str] = None, read_only: bool = False, **kwargs - ) -> None: + def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: """Initialise a Run If an identifier is provided a connection will be made to the @@ -29,13 +27,11 @@ def __init__( ---------- identifier : str, optional the remote server unique id for the target run - read_only : bool, optional - create object in read-only mode **kwargs : dict any additional arguments to be passed to the object initialiser """ self.visibility = Visibility(self) - super().__init__(identifier, read_only, **kwargs) + super().__init__(identifier, **kwargs) @classmethod @pydantic.validate_call @@ -46,7 +42,7 @@ def new( offline: bool = False, ): """Create a new Folder on the Simvue server with the given path""" - _run = Run(folder=folder, system=None, status="created") + _run = Run(folder=folder, system=None, status="created", _read_only=False) _run.offline_mode(offline) return _run diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py index 60202fe9..6f11af33 100644 --- a/simvue/api/objects/storage/base.py +++ b/simvue/api/objects/storage/base.py @@ -10,10 +10,10 @@ def __init__( self, identifier: typing.Optional[str] = None, read_only: bool = False, **kwargs ) -> None: """Retrieve an alert from the Simvue server by identifier""" - super().__init__(identifier, read_only, **kwargs) - self.status = Status(self) self._label = "storage" self._endpoint = self._label + super().__init__(identifier, read_only, **kwargs) + self.status = Status(self) @classmethod def new(cls, **kwargs): diff --git a/simvue/api/objects/storage/fetch.py b/simvue/api/objects/storage/fetch.py index 22632a07..d4f989c7 100644 --- a/simvue/api/objects/storage/fetch.py +++ b/simvue/api/objects/storage/fetch.py @@ -8,6 +8,7 @@ import typing import http +import pydantic from simvue.api.request import get_json_from_response from simvue.api.request import get as sv_get @@ -31,10 +32,14 @@ def __new__(cls, identifier: str | None = None, **kwargs): raise RuntimeError(f"Unknown type '{_storage_pre.type}'") @classmethod - def get_all( - cls, count: int | None = None, offset: int | None = None + @pydantic.validate_call + def get( + cls, count: int | None = None, offset: int | None = None, **kwargs ) -> typing.Generator[tuple[str, FileStorage | S3Storage], None, None]: - _class_instance = StorageBase(read_only=True) + # Currently no storage filters + kwargs.pop("filters", None) + + _class_instance = StorageBase(read_only=True, **kwargs) _url = f"{_class_instance._base_url}" _response = sv_get( _url, diff --git a/simvue/api/objects/storage/file.py b/simvue/api/objects/storage/file.py index 254b3dd0..fc1989bd 100644 --- a/simvue/api/objects/storage/file.py +++ b/simvue/api/objects/storage/file.py @@ -24,6 +24,7 @@ def new( disable_check=disable_check, tenant_useable=tenant_usable, default=default, + _read_only=False, ) _storage.offline_mode(offline) return _storage diff --git a/simvue/api/objects/storage/s3.py b/simvue/api/objects/storage/s3.py index 176e89ad..d2962f05 100644 --- a/simvue/api/objects/storage/s3.py +++ b/simvue/api/objects/storage/s3.py @@ -43,6 +43,7 @@ def new( disable_check=disable_check, tenant_useable=tenant_usable, default=default, + _read_only=False, ) _storage.offline_mode(offline) return _storage diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py index a5ff441f..8af118a8 100644 --- a/simvue/api/objects/tag.py +++ b/simvue/api/objects/tag.py @@ -16,7 +16,7 @@ def new( ): """Create a new Tag on the Simvue server""" _data: dict[str, typing.Any] = {"name": name} - _tag = Tag(name=name) + _tag = Tag(name=name, _read_only=False) _tag.offline_mode(offline) return _tag @@ -52,3 +52,12 @@ def description(self) -> str: @pydantic.validate_call def description(self, description: str) -> None: self._staging["description"] = description + + @classmethod + def get( + cls, *, count: int | None = None, offset: int | None = None, **kwargs + ) -> dict[str, "SimvueObject"]: + # There are currently no tag filters + kwargs.pop("filters", None) + + return super().get(count=count, offset=offset, **kwargs) diff --git a/simvue/api/request.py b/simvue/api/request.py index 632dc7ed..c31705c4 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -140,11 +140,7 @@ def put( else: data_sent = data - response = requests.put(url, headers=headers, data=data_sent, timeout=timeout) - - response.raise_for_status() - - return response + return requests.put(url, headers=headers, data=data_sent, timeout=timeout) @retry( @@ -156,7 +152,7 @@ def put( def get( url: str, headers: dict[str, str], - params: dict[str, str | int | float] | None = None, + params: dict[str, str | int | float | None] | None = None, timeout: int = DEFAULT_API_TIMEOUT, ) -> requests.Response: """HTTP GET @@ -175,10 +171,7 @@ def get( requests.Response response from executing GET """ - response = requests.get(url, headers=headers, timeout=timeout, params=params) - response.raise_for_status() - - return response + return requests.get(url, headers=headers, timeout=timeout, params=params) @retry( @@ -188,7 +181,10 @@ def get( reraise=True, ) def delete( - url: str, headers: dict[str, str], timeout: int = DEFAULT_API_TIMEOUT + url: str, + headers: dict[str, str], + timeout: int = DEFAULT_API_TIMEOUT, + params: dict[str, typing.Any] | None = None, ) -> requests.Response: """HTTP DELETE @@ -200,16 +196,15 @@ def delete( headers for the post request timeout : int, optional timeout of request, by default DEFAULT_API_TIMEOUT + params : dict, optional + parameters for deletion Returns ------- requests.Response response from executing DELETE """ - response = requests.delete(url, headers=headers, timeout=timeout) - response.raise_for_status() - - return response + return requests.delete(url, headers=headers, timeout=timeout, params=params) def get_json_from_response( @@ -231,7 +226,7 @@ def get_json_from_response( details = "could not request JSON response" else: error_str += f"with status {_status_code}" - details = (json_response or {}).get("details") + details = (json_response or {}).get("detail") try: txt_response = response.text diff --git a/simvue/client.py b/simvue/client.py index f8a01cdf..f091cb54 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -6,6 +6,7 @@ server including deletion and retrieval. """ +import contextlib import json import logging import os @@ -28,6 +29,7 @@ from .models import FOLDER_REGEX, NAME_REGEX from .config.user import SimvueConfiguration from .api.request import get_json_from_response +from .api.objects import Run, Folder, Tag, Artifact CONCURRENT_DOWNLOADS = 10 @@ -146,42 +148,22 @@ def get_run_id_from_name( if either information could not be retrieved from the server, or multiple/no runs are found """ - params: dict[str, str] = {"filters": json.dumps([f"name == {name}"])} + _runs = Run.get(filters=json.dumps([f"name == {name}"])) - response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/runs", - headers=self._headers, - params=params, - ) - - json_response = get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario="Retrieval of run ID from name", - response=response, - ) - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary as response for ID " - f"retrieval but got {type(json_response)}" - ) - - if not (response_data := json_response.get("data")): - raise RuntimeError(f"No ID found for run '{name}'") - - if len(response_data) == 0: + if len(_runs) == 0: raise RuntimeError("Could not collect ID - no run found with this name.") - if len(response_data) > 1: + if len(_runs) > 1: raise RuntimeError( "Could not collect ID - more than one run exists with this name." ) - if not (first_id := response_data[0].get("id")): - raise RuntimeError("Failed to retrieve identifier for run.") - return first_id + + _id, _ = next(_runs) + + return _id @prettify_pydantic @pydantic.validate_call - def get_run(self, run_id: str) -> typing.Optional[dict[str, typing.Any]]: + def get_run(self, run_id: str) -> typing.Optional[Run]: """Retrieve a single run Parameters @@ -199,26 +181,7 @@ def get_run(self, run_id: str) -> typing.Optional[dict[str, typing.Any]]: RuntimeError if retrieval of information from the server on this run failed """ - - response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/runs/{run_id}", headers=self._headers - ) - - json_response = get_json_from_response( - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], - scenario=f"Retrieval of run '{run_id}'", - response=response, - ) - - if response.status_code == http.HTTPStatus.NOT_FOUND: - return None - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response during run retrieval " - f"but got '{type(json_response)}'" - ) - return json_response + return Run(identifier=run_id, read_only=True) @prettify_pydantic @pydantic.validate_call @@ -235,17 +198,7 @@ def get_run_name_from_id(self, run_id: str) -> str: str the registered name for the run """ - if not run_id: - raise ValueError("Expected value for run_id but got None") - - _run_data = self.get_run(run_id) - - if not _run_data: - raise RuntimeError(f"Failed to retrieve data for run '{run_id}'") - - if not (_name := _run_data.get("name")): - raise RuntimeError("Expected key 'name' in server response") - return _name + return Run(identifier=run_id).name @prettify_pydantic @pydantic.validate_call @@ -367,27 +320,7 @@ def delete_run(self, run_id: str) -> typing.Optional[dict]: RuntimeError if the deletion failed due to server request error """ - - response = requests.delete( - f"{self._user_config.server.url}/api/runs/{run_id}", - headers=self._headers, - ) - - json_response = get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario=f"Deletion of run '{run_id}'", - response=response, - ) - - logger.debug(f"Run '{run_id}' deleted successfully") - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response during run deletion " - f"but got '{type(json_response)}'" - ) - - return json_response or None + return Run(identifier=run_id).delete() or None def _get_folder_id_from_path(self, path: str) -> typing.Optional[str]: """Retrieve folder identifier for the specified path if found @@ -402,22 +335,10 @@ def _get_folder_id_from_path(self, path: str) -> typing.Optional[str]: str | None if a match is found, return the identifier of the folder """ - params: dict[str, str] = {"filters": json.dumps([f"path == {path}"])} - - response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/folders", - headers=self._headers, - params=params, - ) - - if ( - response.status_code == http.HTTPStatus.OK - and (response_data := response.json().get("data")) - and (identifier := response_data[0].get("id")) - ): - return identifier + _folders = Folder.get(filters=json.dumps([f"path == {path}"])) + _id, _ = next(_folders) - return None + return _id @prettify_pydantic @pydantic.validate_call @@ -511,30 +432,11 @@ def delete_folder( f"Deletion of folder '{folder_path}' failed, " "folder does not exist." ) - - params: dict[str, bool] = {"runs": True} if remove_runs else {} - params |= {"recursive": recursive} - - response = requests.delete( - f"{self._user_config.server.url}/api/folders/{folder_id}", - headers=self._headers, - params=params, + _response = Folder(identifier=folder_id).delete( + delete_runs=remove_runs, recursive=recursive ) - json_response = get_json_from_response( - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], - scenario=f"Deletion of folder '{folder_path}'", - response=response, - ) - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response during folder deletion " - f"but got '{type(json_response)}'" - ) - - runs: list[dict] = json_response.get("runs", []) - return runs + return _response.get("runs", []) @prettify_pydantic @pydantic.validate_call @@ -606,30 +508,7 @@ def _retrieve_artifact_from_server( run_id: str, name: str, ) -> typing.Union[dict, list]: - params: dict[str, str | None] = {"name": name} - - response = requests.get( - f"{self._user_config.server.url}/api/runs/{run_id}/artifacts", - headers=self._headers, - params=params, - ) - - json_response = get_json_from_response( - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], - scenario=f"Retrieval of artifact '{name}' for run '{run_id}'", - response=response, - ) - - if isinstance(json_response, dict) and (detail := json_response.get("detail")): - raise RuntimeError(f"Failed to retrieve artifact '{name}': {detail}") - - if not isinstance(json_response, list): - raise RuntimeError( - "Expected list from JSON response during retrieval of " - f"artifact but got '{type(json_response)}'" - ) - - return json_response + return Artifact.get(runs=[run_id], name=name) @prettify_pydantic @pydantic.validate_call @@ -1463,31 +1342,12 @@ def delete_tag(self, tag_id: str) -> None: RuntimeError if the deletion failed due to a server request error """ - - response = requests.delete( - f"{self._user_config.server.url}/api/tags/{tag_id}", - headers=self._headers, - ) - - json_response = get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario=f"Deletion of tag '{tag_id}'", - response=response, - ) - - logger.debug(f"Tag '{tag_id}' deleted successfully") - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response during run deletion " - f"but got '{type(json_response)}'" - ) - - return json_response or None + with contextlib.suppress(ValueError): + Tag(identifier=tag_id).delete() @prettify_pydantic @pydantic.validate_call - def get_tag(self, tag_id: str) -> typing.Optional[dict[str, typing.Any]]: + def get_tag(self, tag_id: str) -> Tag | None: """Retrieve a single tag Parameters @@ -1497,7 +1357,7 @@ def get_tag(self, tag_id: str) -> typing.Optional[dict[str, typing.Any]]: Returns ------- - dict[str, Any] + Tag response containing information on the given tag Raises @@ -1505,23 +1365,7 @@ def get_tag(self, tag_id: str) -> typing.Optional[dict[str, typing.Any]]: RuntimeError if retrieval of information from the server on this tag failed """ - - response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/tag/{tag_id}", headers=self._headers - ) - - json_response = get_json_from_response( - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], - scenario=f"Retrieval of tag '{tag_id}'", - response=response, - ) - - if response.status_code == http.HTTPStatus.NOT_FOUND: + try: + return Tag(identifier=tag_id) + except ValueError: return None - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response during tag retrieval " - f"but got '{type(json_response)}'" - ) - return json_response diff --git a/simvue/run.py b/simvue/run.py index 4c2b3519..12135ed2 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -31,6 +31,8 @@ import msgpack import psutil +from simvue.api.objects.alert.fetch import Alert + from .config.user import SimvueConfiguration import simvue.api.request as sv_api @@ -45,12 +47,18 @@ from .eco import SimvueEmissionsTracker from .utilities import ( calculate_sha256, - compare_alerts, skip_if_failed, validate_timestamp, simvue_timestamp, ) -from .api.objects import Run as RunObject, Artifact +from .api.objects import ( + Run as RunObject, + Artifact, + MetricsThresholdAlert, + MetricsRangeAlert, + UserAlert, + EventsAlert, +) try: from typing import Self @@ -80,7 +88,7 @@ def _wrapper(self: Self, *args: typing.Any, **kwargs: typing.Any) -> typing.Any: self._active = False raise RuntimeError("Cannot update expired Simvue Run") - if not self._simvue: + if not self._sv_obj: raise RuntimeError( "Simvue Run must be initialised before calling " f"'{function.__name__}'" @@ -480,11 +488,8 @@ def _start(self, reconnect: bool = False) -> bool: if self._sv_obj: self._sv_obj.status = self._status + self._sv_obj.commit() - if reconnect: - self._sv_obj.system = get_system() - - self._sv_obj.commit() self._start_time = time.time() if self._pid == 0: @@ -1334,7 +1339,7 @@ def save_file( bool whether the upload was successful """ - if not self._simvue: + if not self._sv_obj: self._error("Cannot save files, run not initialised") return False @@ -1430,7 +1435,7 @@ def save_directory( bool if the directory save was successful """ - if not self._simvue: + if not self._sv_obj: self._error("Cannot save directory, run not inirialised") return False @@ -1576,7 +1581,7 @@ def close(self) -> bool: """ self._executor.wait_for_completion() - if not self._simvue: + if not self._sv_obj: self._error("Cannot close run, not initialised") return False @@ -1616,7 +1621,7 @@ def set_folder_details( bool returns True if update was successful """ - if not self._simvue: + if not self._sv_obj: self._error("Cannot update folder details, run was not initialised") return False @@ -1666,7 +1671,7 @@ def add_alerts( bool returns True if successful """ - if not self._simvue: + if not self._sv_obj: self._error("Cannot add alerts, run not initialised") return False @@ -1675,10 +1680,10 @@ def add_alerts( if names and not ids: try: - if alerts := self._simvue.list_alerts(): + if alerts := Alert.get(): for alert in alerts: - if alert["name"] in names: - ids.append(alert["id"]) + if alert.name in names: + ids.append(alert.id) except RuntimeError as e: self._error(f"{e.args[0]}") return False @@ -1689,14 +1694,8 @@ def add_alerts( self._error("Need to provide alert ids or alert names") return False - data: dict[str, typing.Any] = {"id": self._id, "alerts": ids} - - try: - if self._simvue.update(data): - return True - except RuntimeError as e: - self._error(f"{e.args[0]}") - return False + self._sv_obj.alerts = self._sv_obj.alerts + [ids] + self._sv_obj.commit() return False @@ -1792,7 +1791,7 @@ def create_alert( str | None returns the created alert ID if successful """ - if not self._simvue: + if not self._sv_obj: self._error("Cannot add alert, run not initialised") return None @@ -1808,74 +1807,93 @@ def create_alert( ) return None - alert_definition = {} - - if source == "metrics": - alert_definition["aggregation"] = aggregation - alert_definition["metric"] = metric - alert_definition["window"] = window - alert_definition["rule"] = rule - alert_definition["frequency"] = frequency - if threshold is not None: - alert_definition["threshold"] = threshold - elif range_low is not None and range_high is not None: - alert_definition["range_low"] = range_low - alert_definition["range_high"] = range_high + _alert: EventsAlert | MetricsRangeAlert | MetricsThresholdAlert | UserAlert + + if source == "metrics" and threshold: + if not metric or not aggregation or not rule: + self._error("Missing arguments for alert of type 'metric threshold'") + return None + + _alert = MetricsThresholdAlert.new( + name=name, + metric=metric, + window=window, + aggregation=aggregation, + rule=rule, + notification=notification, + threshold=threshold, + frequency=frequency or 60, + offline=self._mode == "offline", + ) + elif source == "metrics": + if ( + not metric + or not aggregation + or not rule + or not range_low + or not range_high + ): + self._error("Missing arguments for alert of type 'metric range'") + return None + + _alert = MetricsRangeAlert.new( + name=name, + metric=metric, + window=window, + aggregation=aggregation, + notification=notification, + rule=rule, + range_low=range_low, + range_high=range_high, + frequency=frequency or 60, + offline=self._mode == "offline", + ) elif source == "events": - alert_definition["pattern"] = pattern - alert_definition["frequency"] = frequency + if not pattern: + self._error("Missing arguments for alert of type 'events'") + return None + + _alert = EventsAlert.new( + name=name, + pattern=pattern, + notification=notification, + frequency=frequency or 60, + offline=self._mode == "offline", + ) else: - alert_definition = None + _alert = UserAlert.new( + name=name, notification=notification, offline=self._mode == "offline" + ) - alert: dict[str, typing.Any] = { - "name": name, - "notification": notification, - "source": source, - "alert": alert_definition, - "description": description, - "abort": trigger_abort, - } + _alert.abort = trigger_abort # Check if the alert already exists - alert_id: typing.Optional[str] = None + _alert_id: typing.Optional[str] = None + try: - alerts = self._simvue.list_alerts() + _alerts = Alert.get() except RuntimeError as e: self._error(f"{e.args[0]}") return None - if alerts: - for existing_alert in alerts: - if existing_alert["name"] == alert["name"]: - if compare_alerts(existing_alert, alert): - alert_id = existing_alert["id"] - logger.info("Existing alert found with id: %s", alert_id) - break - - if not alert_id: - try: - logger.debug(f"Creating new alert with definition: {alert}") - response = self._simvue.add_alert(alert) - except RuntimeError as e: - self._error(f"{e.args[0]}") - return None + if _alerts: + for _, _existing_alert in _alerts: + if _existing_alert.name == _alert.name and _existing_alert.compare( + _alert + ): + _alert_id = _existing_alert.id + logger.info("Existing alert found with id: %s", _existing_alert.id) + break - if not (alert_id := (response or {}).get("id")): - self._error("unable to create alert") - return None + if not _alert_id: + _alert.commit() + _alert_id = _alert.id - if alert_id: - # TODO: What if we keep existing alerts/add a new one later? - data = {"id": self._id, "alerts": [alert_id]} - logger.debug(f"Updating run with info: {data}") + self._sv_obj.alerts = self._sv_obj.alerts + [_alert_id] - try: - self._simvue.update(data) - except RuntimeError as e: - self._error(f"{e.args[0]}") - return None + self._sv_obj.commit() - return alert_id + return _alert_id @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @@ -1900,14 +1918,9 @@ def log_alert( if state not in ("ok", "critical"): self._error('state must be either "ok" or "critical"') return False - if not self._simvue: - self._error("Cannot log alert, run not initialised") - return False - try: - self._simvue.set_alert_state(identifier, state) - except RuntimeError as e: - self._error(f"{e.args[0]}") - return False + _alert = Alert(identifier=identifier) + _alert.state = state + _alert.commit() return True diff --git a/simvue/serialization.py b/simvue/serialization.py index d8252713..7fcb7a22 100644 --- a/simvue/serialization.py +++ b/simvue/serialization.py @@ -5,6 +5,7 @@ Contains serializers for storage of objects on the Simvue server """ +import contextlib import typing import pickle import pandas @@ -76,13 +77,11 @@ def serialize_object( elif _is_torch_tensor(data): return _serialize_torch_tensor(data) elif module_name == "builtins" and class_name == "module" and not allow_pickle: - try: + with contextlib.suppress(ImportError): import matplotlib.pyplot if data == matplotlib.pyplot: return _serialize_matplotlib(data) - except ImportError: - pass elif serialized := _serialize_json(data): return serialized diff --git a/simvue/utilities.py b/simvue/utilities.py index d87e8d67..dee937f2 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -17,6 +17,9 @@ from datetime import timezone +if typing.TYPE_CHECKING: + pass + CHECKSUM_BLOCK_SIZE = 4096 EXTRAS: tuple[str, ...] = ("plot", "torch") @@ -341,28 +344,6 @@ def validate_timestamp(timestamp): return True -def compare_alerts(first, second): - """ """ - for key in ("name", "description", "source", "frequency", "notification"): - if key in first and key in second: - if not first[key]: - continue - - if first[key] != second[key]: - return False - - if "alerts" in first and "alerts" in second: - for key in ("rule", "window", "metric", "threshold", "range_low", "range_high"): - if key in first["alerts"] and key in second["alerts"]: - if not first[key]: - continue - - if first["alerts"][key] != second["alerts"]["key"]: - return False - - return True - - def simvue_timestamp(date_time: typing.Optional[datetime.datetime] = None) -> str: """Return the Simvue valid timestamp diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index ab4118a3..5ce7ba91 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -17,7 +17,7 @@ def test_folder_creation_online() -> None: assert not _folder.visibility.public assert not _folder.visibility.tenant assert not _folder.visibility.users - _folders = Folder.get_all(count=10) + _folders = Folder.get(count=10) assert _folders assert _folders[_folder.id] assert _folders[_folder.id]._read_only diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index f1053c81..47aad7ef 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -29,7 +29,7 @@ def test_create_s3_online() -> None: assert _storage.status.status assert _storage.status.timestamp assert _storage.user - assert dict(Storage.get_all()) + assert dict(Storage.get()) _storage.delete() diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py index 6841508f..afc5e6a7 100644 --- a/tests/unit/test_user_alert.py +++ b/tests/unit/test_user_alert.py @@ -16,7 +16,7 @@ def test_user_alert_creation_online() -> None: assert _alert.source == "user" assert _alert.name == f"users_alert_{_uuid}" assert _alert.notification == "none" - assert dict(Alert.get_all()) + assert dict(Alert.get()) _alert.delete() From 4414315daa833386115e8094f889c10ebed7dfad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 22 Nov 2024 08:24:18 +0000 Subject: [PATCH 012/163] Added custom URL class for mutable path --- pyproject.toml | 1 - simvue/api/url.py | 64 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 64 insertions(+), 1 deletion(-) create mode 100644 simvue/api/url.py diff --git a/pyproject.toml b/pyproject.toml index a18750a9..e09d3586 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,7 +55,6 @@ tabulate = "^0.9.0" randomname = "^0.2.1" codecarbon = "^2.7.1" numpy = "^2.1.2" -boltons = "^24.1.0" [tool.poetry.extras] plot = ["matplotlib", "plotly"] diff --git a/simvue/api/url.py b/simvue/api/url.py new file mode 100644 index 00000000..76efb7e5 --- /dev/null +++ b/simvue/api/url.py @@ -0,0 +1,64 @@ +import typing +import urllib.parse +import copy + + +class URL: + def __init__(self, url: str) -> None: + _url = urllib.parse.urlparse(url) + self._scheme: str = _url.scheme + self._path: str = _url.path + self._host: str | None = _url.hostname + + if self._host and self._host.endswith("/"): + self._host = self._host[:-1] + + self._port: int | None = _url.port + self._fragment: str = _url.fragment + + def __truediv__(self, other: str) -> typing.Self: + _new = copy.deepcopy(self) + _new /= other + return _new + + def __itruediv__(self, other: str) -> typing.Self: + if other.startswith("/"): + other = other[1:] + if other.endswith("/"): + other = other[:-1] + self._path = f"{self._path}/{other}" + return self + + @property + def scheme(self) -> str: + return self._scheme + + @property + def path(self) -> str: + return self._path + + @property + def hostname(self) -> str | None: + return self._host + + @property + def fragment(self) -> str: + return self._fragment + + @property + def port(self) -> int | None: + return self._port + + def __str__(self) -> str: + _out_str: str = "" + if self.scheme: + _out_str += f"{self.scheme}://" + if self.hostname: + _out_str += self.hostname + if self.port: + _out_str += f":{self.port}" + if self.path: + _out_str += self.path + if self.fragment: + _out_str += self.fragment + return _out_str From 612b95a566e1cb4b8a18b583a38f90574b65ea4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 22 Nov 2024 08:25:17 +0000 Subject: [PATCH 013/163] Use custom URL class, fix URLs for artifacts and make read-only default --- simvue/api/objects/alert/base.py | 6 +-- simvue/api/objects/alert/fetch.py | 2 + simvue/api/objects/artifact.py | 33 +++++++++---- simvue/api/objects/base.py | 51 ++++++++------------ simvue/api/objects/metric.py | 4 -- simvue/api/objects/run.py | 78 +++++++++++++++++++++++++++---- simvue/api/request.py | 11 +++-- simvue/client.py | 60 ++++++------------------ simvue/config/parameters.py | 8 ++-- simvue/factory/proxy/remote.py | 34 ++++++-------- simvue/run.py | 2 +- tests/functional/test_client.py | 32 +++++++++++++ 12 files changed, 190 insertions(+), 131 deletions(-) diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index f1d3ddce..8602b0ac 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -22,12 +22,10 @@ class AlertBase(SimvueObject): def new(cls, **kwargs): pass - def __init__( - self, identifier: typing.Optional[str] = None, read_only: bool = False, **kwargs - ) -> None: + def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: """Retrieve an alert from the Simvue server by identifier""" self._label = "alert" - super().__init__(identifier, read_only, **kwargs) + super().__init__(identifier, **kwargs) def compare(self, other: "AlertBase") -> bool: return type(self) is type(other) and self.name == other.name diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index 3b17d379..19767d0a 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -31,6 +31,8 @@ def __new__(cls, identifier: str | None = None, **kwargs) -> AlertType: return MetricsThresholdAlert(identifier=identifier, **kwargs) elif _alert_pre.source == "metrics": return MetricsRangeAlert(identifier=identifier, **kwargs) + elif _alert_pre.source == "user": + return UserAlert(identifier=identifier, **kwargs) raise RuntimeError(f"Unknown source type '{_alert_pre.source}'") diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 3cb46316..dff22ed9 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -12,6 +12,7 @@ import os.path import sys +from simvue.api.url import URL from simvue.models import NAME_REGEX from simvue.utilities import get_mimetype_for_file, get_mimetypes, calculate_sha256 from simvue.api.objects.base import SimvueObject @@ -29,6 +30,7 @@ class Artifact(SimvueObject): def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: super().__init__(identifier, **kwargs) self._storage_url: str | None = None + self._storage: str | None = None self._label = "artifact" @classmethod @@ -92,6 +94,8 @@ def new_file( } _artifact = Artifact(_read_only=False, **_upload_data) + _artifact._storage = storage + _artifact._post(**_artifact._staging) _artifact.offline_mode(offline) @@ -158,6 +162,8 @@ def new_object( } _artifact = Artifact(read_only=False, **_upload_data) + _artifact._storage = storage + _artifact._post(**_artifact._staging) _artifact.offline_mode(offline) _artifact._upload(artifact_data=_serialized, run_id=run, **_upload_data) return _artifact @@ -167,6 +173,7 @@ def _post(self, **kwargs) -> dict[str, typing.Any]: # to server was successful (else offline_ prefix kept) _identifier = self._staging["checksum"] _response = super()._post(**kwargs) + self._storage = _response.get("storage_id") self._storage_url = _response.get("url") self._identifier = _identifier return _response @@ -181,11 +188,13 @@ def _upload( if not self.storage_url or self._offline: return - # NOTE: Assumes URL for Run is always same format as Artifact - _run_artifacts_url: str = self._base_url.replace(self._label, "run") + # NOTE: Assumes URL for Run artifacts is always same + _run_artifacts_url: URL = ( + URL(self._user_config.server.url) / f"runs/{run_id}/artifacts" + ) _response = sv_put( - url=self._storage_url, + url=f"{self._storage_url}", headers={}, data=artifact_data, is_json=False, @@ -199,22 +208,26 @@ def _upload( get_json_from_response( expected_status=[http.HTTPStatus.OK], - scenario=f"uploading artifact '{self.name}' to object storage", + allow_parse_failure=True, # JSON response from S3 not parsible + scenario=f"uploading artifact '{_obj_parameters['name']}' to object storage", response=_response, ) - sv_put( - url=_run_artifacts_url, + _response = sv_put( + url=f"{_run_artifacts_url}", headers=self._headers, - data=_obj_parameters | {self.storage}, + data=_obj_parameters | {"storage": self.storage}, ) get_json_from_response( expected_status=[http.HTTPStatus.OK], - scenario=f"adding artifact '{self.name}' to run '{run_id}'", + scenario=f"adding artifact '{_obj_parameters['name']}' to run '{run_id}'", response=_response, ) + def _get(self, storage: str | None = None) -> dict[str, typing.Any]: + return super()._get(storage=self._storage) + @property def name(self) -> str: """Retrieve the name for this artifact""" @@ -236,9 +249,9 @@ def original_path(self) -> str: return self._get_attribute("originalPath") @property - def storage(self) -> str: + def storage(self) -> str | None: """Retrieve the storage identifier for this artifact""" - return self._get_attribute("storage") + return self._storage @property def type(self) -> str: diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 59ae3193..bb5d9473 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -9,7 +9,6 @@ import pathlib import typing import uuid -import boltons.urlutils as bo_url import http from codecarbon.external.logger import logging @@ -25,6 +24,7 @@ delete as sv_delete, get_json_from_response, ) +from simvue.api.url import URL def staging_check(member_func: typing.Callable) -> typing.Callable: @@ -169,7 +169,7 @@ def _get_local_staged(self) -> dict[str, typing.Any]: return _staged_data.get(self._label, {}).get(self._identifier, {}) - def _get_attribute(self, attribute: str) -> typing.Any: + def _get_attribute(self, attribute: str, *default) -> typing.Any: # In the case where the object is read-only, staging is the data # already retrieved from the server if (_attr := getattr(self, "_read_only", None)) and isinstance( @@ -180,6 +180,9 @@ def _get_attribute(self, attribute: str) -> typing.Any: try: return self._get()[attribute] except KeyError as e: + if default: + return default[0] + if self._offline: raise AttributeError( f"A value for attribute '{attribute}' has " @@ -283,26 +286,18 @@ def id(self) -> typing.Optional[str]: return self._identifier @property - def _url_path(self) -> pathlib.Path: - return pathlib.Path(f"api/{self._endpoint}") - - @property - def _base_url(self) -> str: - _url = bo_url.URL(self._user_config.server.url) - _url.path = self._url_path - return f"{_url}" + def _base_url(self) -> URL: + return URL(self._user_config.server.url) / self._endpoint @property - def url(self) -> typing.Optional[str]: + def url(self) -> typing.Optional[URL]: if self._identifier is None: return None - _url = bo_url.URL(self._user_config.server.url) - _url.path = f"{self._url_path / self._identifier}" - return f"{_url}" + return self._base_url / self._identifier def _post(self, **kwargs) -> dict[str, typing.Any]: _response = sv_post( - url=self._base_url, headers=self._headers, data=kwargs, is_json=True + url=f"{self._base_url}", headers=self._headers, data=kwargs, is_json=True ) if _response.status_code == http.HTTPStatus.FORBIDDEN: @@ -313,7 +308,7 @@ def _post(self, **kwargs) -> dict[str, typing.Any]: _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], - scenario=f"Creation of {self.__class__.__name__.lower()} '{kwargs}'", + scenario=f"Creation of {self._label} '{kwargs}'", ) if not isinstance(_json_response, dict): @@ -329,9 +324,7 @@ def _post(self, **kwargs) -> dict[str, typing.Any]: def _put(self, **kwargs) -> dict[str, typing.Any]: if not self.url: - raise RuntimeError( - f"Identifier for instance of {self.__class__.__name__} Unknown" - ) + raise RuntimeError(f"Identifier for instance of {self._label} Unknown") _response = sv_put( url=self.url, headers=self._headers, data=kwargs, is_json=True ) @@ -344,7 +337,7 @@ def _put(self, **kwargs) -> dict[str, typing.Any]: _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], - scenario=f"Creation of {self.__class__.__name__.lower()} '{self._identifier}", + scenario=f"Creation of {self._label} '{self._identifier}", ) if not isinstance(_json_response, dict): @@ -370,14 +363,12 @@ def delete(self, **kwargs) -> dict[str, typing.Any]: return {"id": self._identifier} if not self.url: - raise RuntimeError( - f"Identifier for instance of {self.__class__.__name__} Unknown" - ) - _response = sv_delete(url=self.url, headers=self._headers, params=kwargs) + raise RuntimeError(f"Identifier for instance of {self._label} Unknown") + _response = sv_delete(url=f"{self.url}", headers=self._headers, params=kwargs) _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NO_CONTENT], - scenario=f"Deletion of {self.__class__.__name__.lower()} '{self._identifier}'", + scenario=f"Deletion of {self._label} '{self._identifier}'", ) self._logger.debug("'%s' deleted successfully", self._identifier) @@ -388,19 +379,17 @@ def delete(self, **kwargs) -> dict[str, typing.Any]: ) return _json_response - def _get(self) -> dict[str, typing.Any]: + def _get(self, **kwargs) -> dict[str, typing.Any]: if self._offline: return self._get_local_staged() if not self.url: - raise RuntimeError( - f"Identifier for instance of {self.__class__.__name__} Unknown" - ) - _response = sv_get(url=self.url, headers=self._headers) + raise RuntimeError(f"Identifier for instance of {self._label} Unknown") + _response = sv_get(url=f"{self.url}", headers=self._headers, params=kwargs) _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], - scenario=f"Retrieval of {self.__class__.__name__.lower()} '{self._identifier}'", + scenario=f"Retrieval of {self._label} '{self._identifier}'", ) self._logger.debug("'%s' retrieved successfully", self._identifier) diff --git a/simvue/api/objects/metric.py b/simvue/api/objects/metric.py index 80a60ca9..33fe3609 100644 --- a/simvue/api/objects/metric.py +++ b/simvue/api/objects/metric.py @@ -10,7 +10,3 @@ def __init__( **kwargs, ) -> None: super().__init__(run_identifier, read_only, **kwargs) - - @property - def url(self) -> str: - return f"{self._base_url}/{self._url_path}" diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index a06c7864..880c9d1c 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -2,10 +2,10 @@ import typing import pydantic import datetime -import boltons.urlutils as bo_url from .base import SimvueObject, staging_check, Visibility, write_only from simvue.api.request import get as sv_get, put as sv_put, get_json_from_response +from simvue.api.url import URL from simvue.models import FOLDER_REGEX, NAME_REGEX, DATETIME_FORMAT Status = typing.Literal[ @@ -207,11 +207,11 @@ def endtime(self, endtime: datetime.datetime) -> None: @write_only def send_heartbeat(self) -> dict[str, typing.Any] | None: - if self._offline: + if self._offline or not self._identifier: return None - _url = bo_url.URL(self._user_config.server.url) - _url.path = f"{self._url_path / 'heartbeat' / self._identifier}" + _url = self._base_url + _url /= f"heartbeat/{self._identifier}" _response = sv_put(f"{_url}", headers=self._headers, data={}) _json_response = get_json_from_response( response=_response, @@ -225,18 +225,32 @@ def send_heartbeat(self) -> dict[str, typing.Any] | None: ) return _json_response + @property + def _abort_url(self) -> URL | None: + if not self._identifier: + return None + _url = self._base_url + _url /= f"abort/{self._identifier}" + return _url + + @property + def _artifact_url(self) -> URL | None: + if not self._identifier or not self.url: + return None + _url = self.url + _url /= "artifacts" + return _url + @property def abort_trigger(self) -> bool: - if self._offline: + if self._offline or not self._identifier: return False - _url = bo_url.URL(self._user_config.server.url) - _url.path = f"{self._url_path}/abort" - _response = sv_get(f"{_url}", headers=self._headers) + _response = sv_get(f"{self._abort_url}", headers=self._headers) _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], - scenario="Retrieving abort status", + scenario=f"Retrieving abort status for run '{self.id}'", ) if not isinstance(_json_response, dict): raise RuntimeError( @@ -244,3 +258,49 @@ def abort_trigger(self) -> bool: f"but got '{type(_json_response)}'" ) return _json_response.get("status", False) + + @property + def artifacts(self) -> list[dict[str, typing.Any]]: + """Retrieve the artifacts for this run""" + if self._offline or not self._artifact_url: + return [] + + _response = sv_get(url=self._artifact_url, headers=self._headers) + + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieving artifacts for run '{self.id}'", + ) + + if not isinstance(_json_response, list): + raise RuntimeError( + f"Expected list from JSON response during {self._label} " + f"'{self.id}' artifact retrieval " + f"but got '{type(_json_response)}'" + ) + + return _json_response + + @pydantic.validate_call + def abort(self, reason: str) -> dict[str, typing.Any]: + if not self._abort_url: + return {} + + _url = self._abort_url / self._identifier + + _response = sv_put(f"{_url}", headers=self._headers, data={"reason": reason}) + + _json_response = get_json_from_response( + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], + scenario=f"Abort of run '{self.id}'", + response=_response, + ) + + if not isinstance(_json_response, dict): + raise RuntimeError( + "Expected dict from JSON response during abort of " + f"run but got '{type(_json_response)}'" + ) + + return _json_response diff --git a/simvue/api/request.py b/simvue/api/request.py index c31705c4..2c240ca8 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -211,19 +211,22 @@ def get_json_from_response( expected_status: list[int], scenario: str, response: requests.Response, + allow_parse_failure: bool = False, ) -> typing.Union[dict, list]: try: json_response = response.json() json_response = json_response or {} - except json.JSONDecodeError: - json_response = None + decode_error = "" + except json.JSONDecodeError as e: + json_response = {} if allow_parse_failure else None + decode_error = f"{e}" - error_str = f"{scenario} failed " + error_str = f"{scenario} failed for url '{response.url}'" if (_status_code := response.status_code) in expected_status: if json_response is not None: return json_response - details = "could not request JSON response" + details = f"could not request JSON response: {decode_error}" else: error_str += f"with status {_status_code}" details = (json_response or {}).get("detail") diff --git a/simvue/client.py b/simvue/client.py index f091cb54..c0e427ac 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -29,7 +29,7 @@ from .models import FOLDER_REGEX, NAME_REGEX from .config.user import SimvueConfiguration from .api.request import get_json_from_response -from .api.objects import Run, Folder, Tag, Artifact +from .api.objects import Run, Folder, Tag, Artifact, Alert CONCURRENT_DOWNLOADS = 10 @@ -271,7 +271,7 @@ def get_runs( } response = requests.get( - f"{self._user_config.server.url}/api/runs", + f"{self._user_config.server.url}/runs", headers=self._headers, params=params, ) @@ -371,7 +371,7 @@ def delete_runs( params: dict[str, bool] = {"runs_only": True, "runs": True} response = requests.delete( - f"{self._user_config.server.url}/api/folders/{folder_id}", + f"{self._user_config.server.url}/folders/{folder_id}", headers=self._headers, params=params, ) @@ -448,19 +448,7 @@ def delete_alert(self, alert_id: str) -> None: alert_id : str the unique identifier for the alert """ - response = requests.delete( - f"{self._user_config.server.url}/api/alerts/{alert_id}", - headers=self._headers, - ) - - if response.status_code == http.HTTPStatus.OK: - logger.debug(f"Alert '{alert_id}' deleted successfully") - return - - raise RuntimeError( - f"Deletion of alert '{alert_id}' failed" - f"with code {response.status_code}: {response.text}" - ) + Alert(identifier=alert_id).delete() @prettify_pydantic @pydantic.validate_call @@ -485,7 +473,7 @@ def list_artifacts(self, run_id: str) -> list[dict[str, typing.Any]]: params: dict[str, str] = {"runs": json.dumps([run_id])} response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/artifacts", + f"{self._user_config.server.url}/artifacts", headers=self._headers, params=params, ) @@ -527,27 +515,7 @@ def abort_run(self, run_id: str, reason: str) -> typing.Union[dict, list]: dict | list response from server """ - body: dict[str, str | None] = {"id": run_id, "reason": reason} - - response = requests.put( - f"{self._user_config.server.url}/api/runs/abort", - headers=self._headers, - json=body, - ) - - json_response = get_json_from_response( - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], - scenario=f"Abort of run '{run_id}'", - response=response, - ) - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected list from JSON response during retrieval of " - f"artifact but got '{type(json_response)}'" - ) - - return json_response + return Run(identifier=run_id).abort(reason=reason) @prettify_pydantic @pydantic.validate_call @@ -724,7 +692,7 @@ def get_artifacts_as_files( params: dict[str, typing.Optional[str]] = {"category": category} response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/runs/{run_id}/artifacts", + f"{self._user_config.server.url}/runs/{run_id}/artifacts", headers=self._headers, params=params, ) @@ -817,7 +785,7 @@ def get_folders( } response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/folders", + f"{self._user_config.server.url}/folders", headers=self._headers, params=params, ) @@ -864,7 +832,7 @@ def get_metrics_names(self, run_id: str) -> list[str]: params = {"runs": json.dumps([run_id])} response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/metrics/names", + f"{self._user_config.server.url}/metrics/names", headers=self._headers, params=params, ) @@ -900,7 +868,7 @@ def _get_run_metrics_from_server( } metrics_response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/metrics", + f"{self._user_config.server.url}/metrics", headers=self._headers, params=params, ) @@ -1162,7 +1130,7 @@ def get_events( } response = requests.get( - f"{self._user_config.server.url}/api/events", + f"{self._user_config.server.url}/events", headers=self._headers, params=params, ) @@ -1219,7 +1187,7 @@ def get_alerts( params: dict[str, int] = {"count": count_limit or 0, "start": start_index or 0} if not run_id: response = requests.get( - f"{self._user_config.server.url}/api/alerts/", + f"{self._user_config.server.url}/alerts/", headers=self._headers, params=params, ) @@ -1231,7 +1199,7 @@ def get_alerts( ) else: response = requests.get( - f"{self._user_config.server.url}/api/runs/{run_id}", + f"{self._user_config.server.url}/runs/{run_id}", headers=self._headers, params=params, ) @@ -1308,7 +1276,7 @@ def get_tags( """ params = {"count": count_limit or 0, "start": start_index or 0} response = requests.get( - f"{self._user_config.server.url}/api/tags", + f"{self._user_config.server.url}/tags", headers=self._headers, params=params, ) diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index 43d72adf..5c03c038 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -19,6 +19,7 @@ from simvue.utilities import get_expiry from simvue.version import __version__ from simvue.api.request import get +from simvue.api.url import URL logger = logging.getLogger(__file__) @@ -30,8 +31,9 @@ class ServerSpecifications(pydantic.BaseModel): @pydantic.field_validator("url") @classmethod - def url_to_str(cls, v: typing.Any) -> str: - return f"{v}" + def url_to_api_url(cls, v: typing.Any) -> str: + _url = URL(f"{v}") / "api" + return f"{_url}" @pydantic.field_validator("token") def check_token(cls, v: typing.Any) -> str: @@ -50,7 +52,7 @@ def _check_server(cls, token: str, url: str) -> None: "User-Agent": f"Simvue Python client {__version__}", } try: - response = get(f"{url}/api/version", headers) + response = get(f"{url}/version", headers) if response.status_code != http.HTTPStatus.OK or not response.json().get( "version" diff --git a/simvue/factory/proxy/remote.py b/simvue/factory/proxy/remote.py index d6239388..e68b00a5 100644 --- a/simvue/factory/proxy/remote.py +++ b/simvue/factory/proxy/remote.py @@ -46,7 +46,7 @@ def list_tags(self) -> list[str]: logger.debug("Retrieving existing tags") try: response = get( - f"{self._user_config.server.url}/api/runs/{self._id}", self._headers + f"{self._user_config.server.url}/runs/{self._id}", self._headers ) except Exception as err: self._error(f"Exception retrieving tags: {str(err)}") @@ -80,7 +80,7 @@ def create_run(self, data) -> tuple[typing.Optional[str], typing.Optional[int]]: logger.debug("Creating folder %s if necessary", data.get("folder")) try: response = post( - f"{self._user_config.server.url}/api/folders", + f"{self._user_config.server.url}/folders", self._headers, {"path": data.get("folder")}, ) @@ -104,9 +104,7 @@ def create_run(self, data) -> tuple[typing.Optional[str], typing.Optional[int]]: logger.debug('Creating run with data: "%s"', data) try: - response = post( - f"{self._user_config.server.url}/api/runs", self._headers, data - ) + response = post(f"{self._user_config.server.url}/runs", self._headers, data) except Exception as err: self._error(f"Exception creating run: {str(err)}") return (None, None) @@ -145,9 +143,7 @@ def update( logger.debug('Updating run with data: "%s"', data) try: - response = put( - f"{self._user_config.server.url}/api/runs", self._headers, data - ) + response = put(f"{self._user_config.server.url}/runs", self._headers, data) except Exception as err: self._error(f"Exception updating run: {err}") return None @@ -176,7 +172,7 @@ def set_folder_details( try: response = post( - f"{self._user_config.server.url}/api/folders", self._headers, data + f"{self._user_config.server.url}/folders", self._headers, data ) except Exception as err: self._error(f"Exception creating folder: {err}") @@ -195,7 +191,7 @@ def set_folder_details( try: response = put( - f"{self._user_config.server.url}/api/folders", self._headers, data + f"{self._user_config.server.url}/folders", self._headers, data ) except Exception as err: self._error(f"Exception setting folder details: {err}") @@ -227,7 +223,7 @@ def save_file( # Get presigned URL try: response = post( - f"{self._user_config.server.url}/api/artifacts", + f"{self._user_config.server.url}/artifacts", self._headers, prepare_for_api(data), ) @@ -311,7 +307,7 @@ def save_file( return None if storage_id: - path = f"{self._user_config.server.url}/api/runs/{self._id}/artifacts" + path = f"{self._user_config.server.url}/runs/{self._id}/artifacts" data["storage"] = storage_id try: @@ -342,7 +338,7 @@ def add_alert(self, data, run=None): try: response = post( - f"{self._user_config.server.url}/api/alerts", self._headers, data + f"{self._user_config.server.url}/alerts", self._headers, data ) except Exception as err: self._error(f"Got exception when creating an alert: {str(err)}") @@ -370,7 +366,7 @@ def set_alert_state( data = {"run": self._id, "alert": alert_id, "status": status} try: response = put( - f"{self._user_config.server.url}/api/alerts/status", self._headers, data + f"{self._user_config.server.url}/alerts/status", self._headers, data ) except Exception as err: self._error(f"Got exception when setting alert state: {err}") @@ -387,7 +383,7 @@ def list_alerts(self) -> list[dict[str, typing.Any]]: List alerts """ try: - response = get(f"{self._user_config.server.url}/api/alerts", self._headers) + response = get(f"{self._user_config.server.url}/alerts", self._headers) except Exception as err: self._error(f"Got exception when listing alerts: {str(err)}") return [] @@ -416,7 +412,7 @@ def send_metrics( try: response = post( - f"{self._user_config.server.url}/api/metrics", + f"{self._user_config.server.url}/metrics", self._headers_mp, data, is_json=False, @@ -444,7 +440,7 @@ def send_event( try: response = post( - f"{self._user_config.server.url}/api/events", + f"{self._user_config.server.url}/events", self._headers_mp, data, is_json=False, @@ -470,7 +466,7 @@ def send_heartbeat(self) -> typing.Optional[dict[str, typing.Any]]: try: response = put( - f"{self._user_config.server.url}/api/runs/heartbeat", + f"{self._user_config.server.url}/runs/heartbeat", self._headers, {"id": self._id}, ) @@ -492,7 +488,7 @@ def get_abort_status(self) -> bool: try: response = get( - f"{self._user_config.server.url}/api/runs/{self._id}/abort", + f"{self._user_config.server.url}/runs/{self._id}/abort", self._headers_mp, ) except Exception as err: diff --git a/simvue/run.py b/simvue/run.py index 12135ed2..9f02ba21 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -447,7 +447,7 @@ def _online_dispatch_callback( return _data = {category: buffer, "run": run_id} _data_bin = msgpack.packb(_data, use_bin_type=True) - _url: str = f"{url}/api/{category}" + _url: str = f"{url}/{category}" _msgpack_header = headers | {"Content-Type": "application/msgpack"} diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index c8a7ba72..b62a640d 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -10,6 +10,7 @@ import tempfile import simvue.client as svc import simvue.run as sv_run +import simvue.api.objects as sv_api_obj @pytest.mark.dependency @@ -306,3 +307,34 @@ def test_multiple_metric_retrieval( aggregate=aggregate, output_format=output_format, ) + + +@pytest.mark.client +def test_alert_deletion() -> None: + _alert = sv_api_obj.UserAlert.new(name="test_alert", notification="none") + _alert.commit() + _client = svc.Client() + time.sleep(1) + _client.delete_alert(alert_id=_alert.id) + + with pytest.raises(RuntimeError) as e: + sv_api_obj.Alert(identifier=_alert.id) + + +@pytest.mark.client +def test_abort_run() -> None: + _uuid = f"{uuid.uuid4()}".split("-")[0] + _folder = sv_api_obj.Folder.new(path=f"/simvue_unit_testing/{_uuid}") + _run = sv_api_obj.Run.new(folder=f"/simvue_unit_testing/{_uuid}") + _run.status = "running" + _folder.commit() + _run.commit() + time.sleep(1) + _client = svc.Client() + _client.abort_run(_run.id, reason="Test abort") + time.sleep(1) + assert _run.abort_trigger + _run.delete() + _folder.delete() + + From 5d86ea6552c5736421cd7782573a44c11c01ce8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 2 Dec 2024 08:07:06 +0000 Subject: [PATCH 014/163] Continue swapping out old API --- pyproject.toml | 3 + simvue/api/objects/__init__.py | 2 +- simvue/api/objects/alert/base.py | 2 +- simvue/api/objects/alert/fetch.py | 15 ++- simvue/api/objects/base.py | 68 +++++++--- simvue/api/objects/folder.py | 29 ++++ simvue/api/objects/run.py | 15 ++- simvue/api/objects/storage/fetch.py | 8 +- simvue/api/request.py | 2 +- simvue/api/url.py | 7 +- simvue/run.py | 201 ++++++++++------------------ simvue/utilities.py | 23 +++- tests/functional/test_run_class.py | 9 +- 13 files changed, 213 insertions(+), 171 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e09d3586..597febdd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -112,3 +112,6 @@ ignore-init-method = true fail-under = 95 verbose = 1 exclude = ["docs", "tests"] + +[tool.mypy] +ignore_missing_imports = true diff --git a/simvue/api/objects/__init__.py b/simvue/api/objects/__init__.py index 7add4fbb..74bf2d4f 100644 --- a/simvue/api/objects/__init__.py +++ b/simvue/api/objects/__init__.py @@ -14,4 +14,4 @@ from .artifact import Artifact as Artifact from .run import Run as Run from .tag import Tag as Tag -from .folder import Folder as Folder +from .folder import Folder as Folder, get_folder_from_path as get_folder_from_path diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 8602b0ac..d0b89d49 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -112,6 +112,6 @@ def abort(self) -> bool: @abort.setter @pydantic.validate_call - def abort(self, abort: str) -> None: + def abort(self, abort: bool) -> None: """Configure alert to trigger aborts""" self._staging["abort"] = abort diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index 19767d0a..9925628f 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -50,6 +50,7 @@ def get( headers=_class_instance._headers, params={"start": offset, "count": count}, ) + _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], @@ -62,7 +63,7 @@ def get( f"but got '{type(_json_response)}'" ) - if not (_data := _json_response.get("data")): + if (_data := _json_response.get("data")) is None: raise RuntimeError( f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" ) @@ -70,17 +71,21 @@ def get( _out_dict: dict[str, AlertType] = {} for _entry in _json_response["data"]: + _id = _entry.pop("id") if _entry["source"] == "events": - yield _entry["id"], EventsAlert(read_only=True, **_entry) + yield _id, EventsAlert(read_only=True, identifier=_id, **_entry) elif _entry["source"] == "user": - yield _entry["id"], UserAlert(read_only=True, **_entry) + yield _id, UserAlert(read_only=True, identifier=_id, **_entry) elif _entry["source"] == "metrics" and _entry.get("alert", {}).get( "threshold" ): - yield _entry["id"], MetricsThresholdAlert(read_only=True, **_entry) + yield ( + _id, + MetricsThresholdAlert(read_only=True, identifier=_id, **_entry), + ) elif _entry["source"] == "metrics" and _entry.get("alert", {}).get( "range_low" ): - yield _entry["id"], MetricsRangeAlert(read_only=True, **_entry) + yield _id, MetricsRangeAlert(read_only=True, identifier=_id, **_entry) else: raise RuntimeError(f"Unrecognised alert source '{_entry['source']}'") diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index bb5d9473..effaee58 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -16,6 +16,7 @@ from requests.models import HTTPError from simvue.config.user import SimvueConfiguration +from simvue.exception import ObjectNotFoundError from simvue.version import __version__ from simvue.api.request import ( get as sv_get, @@ -39,7 +40,7 @@ def _wrapper(self) -> typing.Any: raise RuntimeError( f"Cannot use 'staging_check' decorator on type '{type(self).__name__}'" ) - if member_func.__name__ in _sv_obj._staging: + if not _sv_obj._read_only and member_func.__name__ in _sv_obj._staging: _sv_obj._logger.warning( f"Uncommitted change found for attribute '{member_func.__name__}'" ) @@ -91,7 +92,7 @@ def users(self, users: list[str]) -> None: @staging_check def public(self) -> bool: """Retrieve if this object is publically visible""" - return self._sv_obj._get_visibility().get("public", False) + return self._sv_obj._get_visibility().get("public", False) # type: ignore @public.setter @write_only @@ -103,7 +104,7 @@ def public(self, public: bool) -> None: @staging_check def tenant(self) -> bool: """Retrieve the tenant group this object is visible to""" - return self._sv_obj._get_visibility().get("tenant", False) + return self._sv_obj._get_visibility().get("tenant", False) # type: ignore @tenant.setter @write_only @@ -139,7 +140,7 @@ def __init__( # Recover any locally staged changes if not read-only self._staging: dict[str, typing.Any] = ( - self._get_local_staged() if _read_only else {} + self._get_local_staged() if not _read_only else {} ) self._staging |= kwargs @@ -172,8 +173,11 @@ def _get_local_staged(self) -> dict[str, typing.Any]: def _get_attribute(self, attribute: str, *default) -> typing.Any: # In the case where the object is read-only, staging is the data # already retrieved from the server - if (_attr := getattr(self, "_read_only", None)) and isinstance( - type(_attr), type(staging_check) + if ( + (_attr := getattr(self, "_read_only", None)) + and isinstance(type(_attr), type(staging_check)) + or self._identifier + and self._identifier.startswith("offline_") ): return self._staging[attribute] @@ -217,13 +221,40 @@ def _get_visibility(self) -> dict[str, bool | list[str]]: return {} @abc.abstractclassmethod - def new(cls, offline: bool = False, **kwargs): + def new(cls, **_): pass @classmethod def get( cls, count: int | None = None, offset: int | None = None, **kwargs ) -> typing.Generator[tuple[str, "SimvueObject"], None, None]: + _class_instance = cls(read_only=True) + if (_data := cls._get_all_objects(count, offset, **kwargs).get("data")) is None: + raise RuntimeError( + f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" + ) + + for _entry in _data: + _id = _entry.pop("id") + yield _id, cls(read_only=True, identifier=_id, **_entry) + + @classmethod + def count(cls, **kwargs) -> int: + _class_instance = cls(read_only=True) + if ( + _count := cls._get_all_objects(count=None, offset=None, **kwargs).get( + "count" + ) + ) is None: + raise RuntimeError( + f"Expected key 'count' for retrieval of {_class_instance.__class__.__name__.lower()}s" + ) + return _count + + @classmethod + def _get_all_objects( + cls, count: int | None, offset: int | None, **kwargs + ) -> dict[str, typing.Any]: _class_instance = cls(read_only=True) _url = f"{_class_instance._base_url}" _response = sv_get( @@ -243,13 +274,7 @@ def get( f"but got '{type(_json_response)}'" ) - if not (_data := _json_response.get("data")): - raise RuntimeError( - f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" - ) - - for _entry in _json_response["data"]: - yield _entry["id"], cls(read_only=True, **_entry) + return _json_response def read_only(self, is_read_only: bool) -> None: self._read_only = is_read_only @@ -326,7 +351,7 @@ def _put(self, **kwargs) -> dict[str, typing.Any]: if not self.url: raise RuntimeError(f"Identifier for instance of {self._label} Unknown") _response = sv_put( - url=self.url, headers=self._headers, data=kwargs, is_json=True + url=f"{self.url}", headers=self._headers, data=kwargs, is_json=True ) if _response.status_code == http.HTTPStatus.FORBIDDEN: @@ -380,12 +405,18 @@ def delete(self, **kwargs) -> dict[str, typing.Any]: return _json_response def _get(self, **kwargs) -> dict[str, typing.Any]: - if self._offline: + if self._identifier.startswith("offline_"): return self._get_local_staged() if not self.url: raise RuntimeError(f"Identifier for instance of {self._label} Unknown") _response = sv_get(url=f"{self.url}", headers=self._headers, params=kwargs) + + if _response.status_code == http.HTTPStatus.NOT_FOUND: + raise ObjectNotFoundError( + obj_type=self._label, name=self._identifier or "Unknown" + ) + _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], @@ -417,3 +448,8 @@ def _cache(self) -> None: with self._local_staging_file.open("w", encoding="utf-8") as out_f: json.dump(_local_data, out_f, indent=2) + + @property + def staged(self) -> dict[str, typing.Any] | None: + """Return currently staged changes to this object""" + return self._staging or None diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index 2c7e4774..0e2c10af 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -10,8 +10,11 @@ import pathlib import typing +from codecarbon.output_methods.emissions_data import json import pydantic +from simvue.exception import ObjectNotFoundError + from .base import SimvueObject, Visibility, staging_check, write_only from simvue.models import FOLDER_REGEX @@ -102,6 +105,19 @@ def name(self, name: str) -> None: """Update the folder name""" self._staging["name"] = name + @property + @staging_check + def metadata(self) -> dict[str, int | str | None | float | dict] | None: + """Return the folder metadata""" + return self._get().get("metadata") + + @metadata.setter + @write_only + @pydantic.validate_call + def metadata(self, metadata: dict[str, int | str | None | float | dict]) -> None: + """Update the folder metadata""" + self._staging["metadata"] = metadata + @property @staging_check def star(self) -> bool: @@ -130,3 +146,16 @@ def ttl(self, time_seconds: int) -> None: def delete(self, *, recursive: bool, delete_runs: bool) -> dict[str, typing.Any]: return super().delete(recursive=recursive, runs=delete_runs) + + +@pydantic.validate_call +def get_folder_from_path( + path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], +) -> Folder: + _folders = Folder.get(filters=json.dumps([f"path == {path}"]), count=1) + + try: + _, _folder = next(_folders) + except StopIteration as e: + raise ObjectNotFoundError(obj_type="folder", name=path) from e + return _folder # type: ignore diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 880c9d1c..516714d2 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -40,7 +40,7 @@ def new( *, folder: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], offline: bool = False, - ): + ) -> typing.Self: """Create a new Folder on the Simvue server with the given path""" _run = Run(folder=folder, system=None, status="created", _read_only=False) _run.offline_mode(offline) @@ -90,7 +90,7 @@ def ttl(self) -> int: @ttl.setter @write_only @pydantic.validate_call - def ttl(self, time_seconds: int) -> None: + def ttl(self, time_seconds: int | None) -> None: """Update the retention period for this run""" self._staging["ttl"] = time_seconds @@ -133,6 +133,12 @@ def description(self, description: str) -> None: def system(self) -> dict[str, typing.Any]: return self._get_attribute("system") + @system.setter + @write_only + @pydantic.validate_call + def system(self, system: dict[str, typing.Any]) -> None: + self._staging["system"] = system + @property @staging_check def heartbeat_timeout(self) -> int: @@ -157,8 +163,9 @@ def notifications(self, notifications: typing.Literal["none", "email"]) -> None: @property @staging_check - def alerts(self) -> list[str]: - return self._get_attribute("alerts") + def alerts(self) -> typing.Generator[str, None, None]: + for alert in self._get_attribute("alerts"): + yield alert["alert"]["id"] @alerts.setter @write_only diff --git a/simvue/api/objects/storage/fetch.py b/simvue/api/objects/storage/fetch.py index d4f989c7..8ee75188 100644 --- a/simvue/api/objects/storage/fetch.py +++ b/simvue/api/objects/storage/fetch.py @@ -61,9 +61,13 @@ def get( _out_dict: dict[str, FileStorage | S3Storage] = {} for _entry in _json_response: + _id = _entry.pop("id") if _entry["type"] == "S3": - yield _entry["id"], S3Storage(read_only=True, **_entry) + yield _entry["id"], S3Storage(read_only=True, identifier=_id, **_entry) elif _entry["type"] == "File": - yield _entry["id"], FileStorage(read_only=True, **_entry) + yield ( + _entry["id"], + FileStorage(read_only=True, identifier=_id, **_entry), + ) else: raise RuntimeError(f"Unrecognised storage type '{_entry['type']}'") diff --git a/simvue/api/request.py b/simvue/api/request.py index 2c240ca8..11999e3d 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -228,7 +228,7 @@ def get_json_from_response( return json_response details = f"could not request JSON response: {decode_error}" else: - error_str += f"with status {_status_code}" + error_str += f" with status {_status_code}" details = (json_response or {}).get("detail") try: diff --git a/simvue/api/url.py b/simvue/api/url.py index 76efb7e5..443c4021 100644 --- a/simvue/api/url.py +++ b/simvue/api/url.py @@ -5,14 +5,13 @@ class URL: def __init__(self, url: str) -> None: + if url.endswith("/"): + url = url[:-1] + _url = urllib.parse.urlparse(url) self._scheme: str = _url.scheme self._path: str = _url.path self._host: str | None = _url.hostname - - if self._host and self._host.endswith("/"): - self._host = self._host[:-1] - self._port: int | None = _url.port self._fragment: str = _url.fragment diff --git a/simvue/run.py b/simvue/run.py index 9f02ba21..f238454b 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -32,6 +32,8 @@ import psutil from simvue.api.objects.alert.fetch import Alert +from simvue.api.objects.folder import Folder, get_folder_from_path +from simvue.exception import ObjectNotFoundError, SimvueRunError from .config.user import SimvueConfiguration @@ -41,12 +43,10 @@ from .executor import Executor from .metrics import get_gpu_metrics, get_process_cpu, get_process_memory from .models import FOLDER_REGEX, NAME_REGEX, MetricKeyString -from .serialization import serialize_object from .system import get_system from .metadata import git_info, environment from .eco import SimvueEmissionsTracker from .utilities import ( - calculate_sha256, skip_if_failed, validate_timestamp, simvue_timestamp, @@ -155,6 +155,7 @@ def __init__( self._emissions_tracker: typing.Optional[SimvueEmissionsTracker] = None self._id: typing.Optional[str] = None + self._folder: Folder | None = None self._term_color: bool = True self._suppress_errors: bool = False self._queue_blocking: bool = False @@ -561,7 +562,7 @@ def _error(self, message: str, join_threads: bool = True) -> None: self._dispatcher.join() if not self._suppress_errors: - raise RuntimeError(message) + raise SimvueRunError(message) # Simvue support now terminated as the instance of Run has entered # the dormant state due to exception throw so set listing to be 'lost' @@ -578,13 +579,9 @@ def _error(self, message: str, join_threads: bool = True) -> None: @pydantic.validate_call def init( self, - name: typing.Optional[ - typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] - ] = None, + name: typing.Annotated[str | None, pydantic.Field(pattern=NAME_REGEX)] = None, *, - metadata: typing.Optional[ - dict[str, typing.Union[str, int, float, bool]] - ] = None, + metadata: dict[str, typing.Any] = None, tags: typing.Optional[list[str]] = None, description: typing.Optional[str] = None, folder: typing.Annotated[ @@ -648,6 +645,12 @@ def init( self._term_color = not no_color + try: + self._folder = get_folder_from_path(path=folder) + except ObjectNotFoundError: + self._folder = Folder.new(path=folder, offline=self._mode == "offline") + self._folder.commit() # type: ignore + if isinstance(visibility, str) and visibility not in ("public", "tenant"): self._error( "invalid visibility option, must be either None, 'public', 'tenant' or a list of users" @@ -685,9 +688,14 @@ def init( self._timer = time.time() - self._sv_obj = RunObject.new(folder=folder) - self._sv_obj.description = description - self._sv_obj.name = name + self._sv_obj = RunObject.new(folder=folder, offline=self._mode == "offline") + + if description: + self._sv_obj.description = description + + if name: + self._sv_obj.name = name + self._sv_obj.visibility = { "users": visibility if isinstance(visibility, list) else [], "tenant": visibility == "tenant", @@ -695,11 +703,12 @@ def init( } self._sv_obj.ttl = self._retention self._sv_obj.status = self._status - self._sv_obj.metadata = ( - (metadata or {}) | git_info(os.getcwd()) | environment(), - ) + self._sv_obj.metadata = (metadata or {}) | git_info(os.getcwd()) | environment() self._sv_obj.heartbeat_timeout = timeout - self._sv_obj.system = get_system() if self._status == "running" else None + + if self._status == "running": + self._sv_obj.system = get_system() + self._data = self._sv_obj._staging self._sv_obj.commit() @@ -1275,37 +1284,27 @@ def save_object( bool whether object upload was successful """ - serialized = serialize_object(obj, allow_pickle) - - if not serialized or not (pickled := serialized[0]): - self._error(f"Failed to serialize '{obj}'") - return False - - data_type = serialized[1] - - if not data_type and not allow_pickle: - self._error("Unable to save Python object, set allow_pickle to True") + if not self._sv_obj or not self.id: + self._error("Cannot save files, run not initialised") return False - data: dict[str, typing.Any] = { - "pickled": pickled, - "type": data_type, - "checksum": calculate_sha256(pickled, False), - "originalPath": "", - "size": sys.getsizeof(pickled), - "name": name, - "run": self._name, - "category": category, - "storage": self._storage_id, - } + _name: str = name or f"{obj.__class__.__name__.lower()}_{id(obj)}" - # Register file try: - return self._sv_obj is not None and self._simvue.save_file(data) is not None - except RuntimeError as e: - self._error(f"{e.args[0]}") + Artifact.new_object( + run=self.id, + name=_name, + category=category, + obj=obj, + allow_pickling=allow_pickle, + storage=self._storage_id, + ) + except (ValueError, RuntimeError) as e: + self._error(f"Failed to save object '{_name}' to run '{self.id}': {e}") return False + return True + @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @pydantic.validate_call @@ -1339,7 +1338,7 @@ def save_file( bool whether the upload was successful """ - if not self._sv_obj: + if not self._sv_obj or not self.id: self._error("Cannot save files, run not initialised") return False @@ -1347,14 +1346,6 @@ def save_file( self._error("Cannot upload output files for runs in the created state") return False - mimetypes.init() - mimetypes_valid = ["application/vnd.plotly.v1+json"] - mimetypes_valid += list(mimetypes.types_map.values()) - - if filetype and filetype not in mimetypes_valid: - self._error(f"Invalid MIME type '{filetype}' specified") - return False - stored_file_name: str = f"{file_path}" if preserve_path and stored_file_name.startswith("./"): @@ -1362,51 +1353,23 @@ def save_file( elif not preserve_path: stored_file_name = os.path.basename(file_path) - # Determine mimetype - if not (mimetype := filetype): - mimetype = mimetypes.guess_type(file_path)[0] or "application/octet-stream" - - data: dict[str, typing.Any] = { - "name": name or stored_file_name, - "run": self._name, - "type": mimetype, - "storage": self._storage_id, - "category": category, - "size": (file_size := os.path.getsize(file_path)), - "originalPath": os.path.abspath( - os.path.expanduser(os.path.expandvars(file_path)) - ), - "checksum": calculate_sha256(f"{file_path}", True), - } - - if not file_size: - click.secho( - "[simvue] WARNING: saving zero-sized files not currently supported", - bold=self._term_color, - fg="yellow" if self._term_color else None, - ) - return True - - # Register file - _artifact = Artifact.new( - name=name or stored_file_name, - run=self._sv_obj.id, - storage=self._storage_id, - file_path=file_path, - offline=self._mode == "offline", - file_type=None, - category=category, - ) - _artifact.commit() - - _storage_id = _artifact.storage - try: - return self._simvue.save_file(data) is not None - except RuntimeError as e: - self._error(f"{e.args[0]}") + # Register file + Artifact.new_file( + name=name or stored_file_name, + run=self.id, + storage=self._storage_id, + file_path=file_path, + offline=self._mode == "offline", + file_type=filetype, + category=category, + ) + except (ValueError, RuntimeError) as e: + self._error(f"Failed to save file: {e}") return False + return True + @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @pydantic.validate_call @@ -1598,7 +1561,6 @@ def close(self) -> bool: @pydantic.validate_call def set_folder_details( self, - path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], metadata: typing.Optional[dict[str, typing.Union[int, str, float]]] = None, tags: typing.Optional[list[str]] = None, description: typing.Optional[str] = None, @@ -1607,8 +1569,6 @@ def set_folder_details( Parameters ---------- - path : str - folder path metadata : dict[str, int | str | float], optional additional metadata to attach to this folder, by default None tags : list[str], optional @@ -1621,7 +1581,7 @@ def set_folder_details( bool returns True if update was successful """ - if not self._sv_obj: + if not self._folder: self._error("Cannot update folder details, run was not initialised") return False @@ -1629,25 +1589,21 @@ def set_folder_details( self._error("Run is not active") return False - data: dict[str, typing.Any] = {"path": path} - - if metadata: - data["metadata"] = metadata or {} - - if tags: - data["tags"] = tags or [] - - if description: - data["description"] = description - try: - if self._simvue.set_folder_details(data): - return True - except RuntimeError as e: - self._error(f"{e.args[0]}") + self._folder.read_only(False) + if metadata: + self._folder.metadata = metadata + if tags: + self._folder.tags = tags + if description: + self._folder.description = description + self._folder.commit() + self._folder.read_only(True) + except (RuntimeError, ValueError, pydantic.ValidationError) as e: + self._error(f"Failed to update folder '{self._folder.name}' details: {e}") return False - return False + return True @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @@ -1870,26 +1826,17 @@ def create_alert( # Check if the alert already exists _alert_id: typing.Optional[str] = None - try: - _alerts = Alert.get() - except RuntimeError as e: - self._error(f"{e.args[0]}") - return None - - if _alerts: - for _, _existing_alert in _alerts: - if _existing_alert.name == _alert.name and _existing_alert.compare( - _alert - ): - _alert_id = _existing_alert.id - logger.info("Existing alert found with id: %s", _existing_alert.id) - break + for _, _existing_alert in Alert.get(): + if _existing_alert.compare(_alert): + _alert_id = _existing_alert.id + logger.info("Existing alert found with id: %s", _existing_alert.id) + break if not _alert_id: _alert.commit() _alert_id = _alert.id - self._sv_obj.alerts = self._sv_obj.alerts + [_alert_id] + self._sv_obj.alerts = list(self._sv_obj.alerts) + [_alert_id] self._sv_obj.commit() diff --git a/simvue/utilities.py b/simvue/utilities.py index dee937f2..580fa7ff 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -160,14 +160,25 @@ def wrapper(self, *args, **kwargs) -> typing.Any: return decorator -def parse_pydantic_error(class_name: str, error: pydantic.ValidationError) -> str: +def parse_pydantic_error(error: pydantic.ValidationError) -> str: out_table: list[str] = [] for data in json.loads(error.json()): + _input = data.get("input") if data["input"] is not None else "None" + _input_str = ( + _input_str + if len((_input_str := f"{_input}")) < 50 + else f"{_input_str[:50]}..." + ) + _type: str = data["type"] + + if (_input_type := type(_input)) != _type: + _type = f"{_input_type.__name__} != {_type}" + out_table.append( [ - data.get("input") if data["input"] is not None else "None", + _input_str, data["loc"], - data["type"], + _type, data["msg"], ] ) @@ -176,7 +187,7 @@ def parse_pydantic_error(class_name: str, error: pydantic.ValidationError) -> st headers=["Input", "Location", "Type", "Message"], tablefmt="fancy_grid", ) - return f"`{class_name}` Validation:\n{err_table}" + return f"`{error.title}` Validation:\n{err_table}" def skip_if_failed( @@ -223,7 +234,7 @@ def wrapper(self: "Run", *args, **kwargs) -> typing.Any: try: return class_func(self, *args, **kwargs) except pydantic.ValidationError as e: - error_str = parse_pydantic_error(class_func.__name__, e) + error_str = parse_pydantic_error(e) if getattr(self, ignore_exc_attr, True): setattr(self, failure_attr, True) logger.error(error_str) @@ -260,7 +271,7 @@ def wrapper(self, *args, **kwargs) -> typing.Any: try: return class_func(self, *args, **kwargs) except pydantic.ValidationError as e: - error_str = parse_pydantic_error(class_func.__name__, e) + error_str = parse_pydantic_error(e) raise RuntimeError(error_str) return wrapper diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 4ab7544f..976bf24f 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -15,6 +15,7 @@ import random import simvue +from simvue.exception import SimvueRunError import simvue.run as sv_run import simvue.client as sv_cl import simvue.sender as sv_send @@ -62,7 +63,7 @@ def test_log_metrics( run.config(suppress_errors=False) if visibility == "bad_option": - with pytest.raises(RuntimeError): + with pytest.raises(SimvueRunError, match="visibility") as e: run.init( name=f"test_run_{str(uuid.uuid4()).split('-', 1)[0]}", tags=[ @@ -202,7 +203,7 @@ def test_update_metadata_running(create_test_run: tuple[sv_run.Run, dict]) -> No run_info = client.get_run(run.id) for key, value in METADATA.items(): - assert run_info.get("metadata", {}).get(key) == value + assert run_info.metadata.get(key) == value @pytest.mark.run @@ -215,7 +216,7 @@ def test_update_metadata_created(create_pending_run: tuple[sv_run.Run, dict]) -> run_info = client.get_run(run.id) for key, value in METADATA.items(): - assert run_info.get("metadata", {}).get(key) == value + assert run_info.metadata.get(key) == value @pytest.mark.run @@ -434,7 +435,7 @@ def test_set_folder_details(request: pytest.FixtureRequest) -> None: request.node.name.replace("[", "_").replace("]", "_"), ] run.init(folder=folder_name) - run.set_folder_details(path=folder_name, tags=tags, description=description) + run.set_folder_details(tags=tags, description=description) client = sv_cl.Client() assert (folder := client.get_folders(filters=[f"path == {folder_name}"])[0])["tags"] == tags From 409b30a5627784adb0d005b66fcfa87ceeb64711 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 2 Dec 2024 11:49:03 +0000 Subject: [PATCH 015/163] Fix read only mode for server object retrieval --- simvue/api/objects/administrator/tenant.py | 3 +- simvue/api/objects/base.py | 48 ++++---- simvue/api/objects/run.py | 7 +- simvue/client.py | 129 +++++---------------- simvue/run.py | 1 + tests/functional/test_client.py | 1 - 6 files changed, 65 insertions(+), 124 deletions(-) diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py index 905c2a1c..28b3e857 100644 --- a/simvue/api/objects/administrator/tenant.py +++ b/simvue/api/objects/administrator/tenant.py @@ -2,8 +2,7 @@ import pydantic -from simvue.api.objects.base import write_only -from .base import SimvueObject, staging_check +from simvue.api.objects.base import write_only, SimvueObject, staging_check class Tenant(SimvueObject): diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index effaee58..098f5936 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -8,12 +8,12 @@ import abc import pathlib import typing +import inspect import uuid import http from codecarbon.external.logger import logging from codecarbon.output_methods.emissions_data import json -from requests.models import HTTPError from simvue.config.user import SimvueConfiguration from simvue.exception import ObjectNotFoundError @@ -124,6 +124,11 @@ def __init__( self._identifier: typing.Optional[str] = ( identifier if identifier is not None else f"offline_{uuid.uuid1()}" ) + self._properties = [ + name + for name, member in inspect.getmembers(self.__class__) + if isinstance(member, property) + ] self._offline: bool = identifier is not None and identifier.startswith( "offline_" ) @@ -140,7 +145,7 @@ def __init__( # Recover any locally staged changes if not read-only self._staging: dict[str, typing.Any] = ( - self._get_local_staged() if not _read_only else {} + {} if _read_only else self._get_local_staged() ) self._staging |= kwargs @@ -150,16 +155,6 @@ def __init__( "User-Agent": f"Simvue Python client {__version__}", } - if identifier: - try: - self._get_attribute("id") - except HTTPError as e: - if e.response.status_code == http.HTTPStatus.NOT_FOUND: - raise ValueError( - f"Failed to retrieve {self._label} '{identifier}', " - "no such object" - ) from e - def _get_local_staged(self) -> dict[str, typing.Any]: """Retrieve any locally staged data for this identifier""" if not self._local_staging_file.exists() or not self._identifier: @@ -173,12 +168,13 @@ def _get_local_staged(self) -> dict[str, typing.Any]: def _get_attribute(self, attribute: str, *default) -> typing.Any: # In the case where the object is read-only, staging is the data # already retrieved from the server - if ( - (_attr := getattr(self, "_read_only", None)) - and isinstance(type(_attr), type(staging_check)) - or self._identifier - and self._identifier.startswith("offline_") - ): + _attribute_is_property: bool = attribute in self._properties + _state_is_read_only: bool = getattr(self, "_read_only", True) + _offline_state: bool = ( + self._identifier is not None and self._identifier.startswith("offline_") + ) + + if (_attribute_is_property and _state_is_read_only) or _offline_state: return self._staging[attribute] try: @@ -224,6 +220,18 @@ def _get_visibility(self) -> dict[str, bool | list[str]]: def new(cls, **_): pass + @classmethod + def ids( + cls, count: int | None = None, offset: int | None = None, **kwargs + ) -> list[str]: + """Retrieve a list of all object identifiers""" + _class_instance = cls(read_only=True) + if (_data := cls._get_all_objects(count, offset, **kwargs).get("data")) is None: + raise RuntimeError( + f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" + ) + return [_entry["id"] for _entry in _data] + @classmethod def get( cls, count: int | None = None, offset: int | None = None, **kwargs @@ -316,9 +324,7 @@ def _base_url(self) -> URL: @property def url(self) -> typing.Optional[URL]: - if self._identifier is None: - return None - return self._base_url / self._identifier + return None if self._identifier is None else self._base_url / self._identifier def _post(self, **kwargs) -> dict[str, typing.Any]: _response = sv_post( diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 516714d2..57b1dc50 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -164,8 +164,13 @@ def notifications(self, notifications: typing.Literal["none", "email"]) -> None: @property @staging_check def alerts(self) -> typing.Generator[str, None, None]: + for alert in self.get_alert_details(): + yield alert["id"] + + def get_alert_details(self) -> typing.Generator[dict[str, typing.Any], None, None]: + """Retrieve the full details of alerts for this run""" for alert in self._get_attribute("alerts"): - yield alert["alert"]["id"] + yield alert["alert"] @alerts.setter @write_only diff --git a/simvue/client.py b/simvue/client.py index c0e427ac..6631fbc4 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -18,6 +18,8 @@ import requests +from simvue.api.objects.alert.base import AlertBase + from .converters import ( aggregated_metrics_to_dataframe, to_dataframe, @@ -720,7 +722,7 @@ def get_artifacts_as_files( raise RuntimeError( f"Download of file {download['url']} " f"failed with exception: {e}" - ) + ) from e @prettify_pydantic @pydantic.validate_call @@ -967,7 +969,7 @@ def get_metric_values( "Expected either argument 'run_ids' or 'run_filters' for get_metric_values" ) - if not run_ids or any(not i for i in run_ids): + if not run_ids or not all(run_ids): raise ValueError( f"Expected list of run identifiers for 'run_ids' but got '{run_ids}'" ) @@ -975,26 +977,27 @@ def get_metric_values( if not use_run_names: run_labels = run_ids - # Now get the metrics for each run - run_metrics = self._get_run_metrics_from_server( + if run_metrics := self._get_run_metrics_from_server( metric_names=metric_names, run_ids=run_ids, xaxis=xaxis, aggregate=aggregate, max_points=max_points, - ) - - if not run_metrics: - return None - - if aggregate: - return aggregated_metrics_to_dataframe( - run_metrics, xaxis=xaxis, parse_to=output_format + ): + return ( + aggregated_metrics_to_dataframe( + run_metrics, xaxis=xaxis, parse_to=output_format + ) + if aggregate + else parse_run_set_metrics( + run_metrics, + xaxis=xaxis, + run_labels=run_labels, + parse_to=output_format, + ) ) else: - return parse_run_set_metrics( - run_metrics, xaxis=xaxis, run_labels=run_labels, parse_to=output_format - ) + return None @check_extra("plot") @prettify_pydantic @@ -1158,7 +1161,7 @@ def get_alerts( names_only: bool = True, start_index: typing.Optional[pydantic.NonNegativeInt] = None, count_limit: typing.Optional[pydantic.PositiveInt] = None, - ) -> list[dict[str, typing.Any]]: + ) -> list[AlertBase] | list[str | None]: """Retrieve alerts for a given run Parameters @@ -1184,68 +1187,15 @@ def get_alerts( RuntimeError if there was a failure retrieving data from the server """ - params: dict[str, int] = {"count": count_limit or 0, "start": start_index or 0} - if not run_id: - response = requests.get( - f"{self._user_config.server.url}/alerts/", - headers=self._headers, - params=params, - ) - - json_response = get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario=f"Retrieval of alerts for run '{run_id}'", - response=response, - ) - else: - response = requests.get( - f"{self._user_config.server.url}/runs/{run_id}", - headers=self._headers, - params=params, - ) - - json_response = get_json_from_response( - expected_status=[200], - scenario=f"Retrieval of alerts for run '{run_id}'", - response=response, - ) - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response when retrieving alerts" - ) - if run_id and (alerts := json_response.get("alerts")) is None: - raise RuntimeError( - "Expected key 'alerts' in response when retrieving " - f"alerts for run '{run_id}': {json_response}" - ) - elif not run_id and (alerts := json_response.get("data")) is None: - raise RuntimeError( - "Expected key 'data' in response when retrieving " - f"alerts: {json_response}" - ) - - if run_id and critical_only: - if names_only: - return [ - alert["alert"].get("name") - for alert in alerts - if alert["status"].get("current") == "critical" - ] - else: - return [ - alert - for alert in alerts - if alert["status"].get("current") == "critical" - ] - if names_only: - if run_id: - return [alert["alert"].get("name") for alert in alerts] - else: - return [alert.get("name") for alert in alerts] + if not run_id: + return [alert.name if names_only else alert for _, alert in Alert.get()] # type: ignore - return alerts + return [ + alert.get("name") if names_only else Alert(**alert) + for alert in Run(identifier=run_id).get_alert_details() + if not critical_only or alert["status"].get("current") == "critical" + ] # type: ignore @prettify_pydantic @pydantic.validate_call @@ -1254,7 +1204,7 @@ def get_tags( *, start_index: typing.Optional[pydantic.NonNegativeInt] = None, count_limit: typing.Optional[pydantic.PositiveInt] = None, - ) -> list[dict]: + ) -> list[str]: """Retrieve tags Parameters @@ -1266,34 +1216,15 @@ def get_tags( Returns ------- - list[dict[str, Any]] - a list of all tags for this run which match the constrains specified + list[str] + a list of all tag ids for this run Raises ------ RuntimeError if there was a failure retrieving data from the server """ - params = {"count": count_limit or 0, "start": start_index or 0} - response = requests.get( - f"{self._user_config.server.url}/tags", - headers=self._headers, - params=params, - ) - - json_response = get_json_from_response( - expected_status=[200], - scenario="Retrieval of tags", - response=response, - ) - - if not isinstance(json_response, dict): - raise RuntimeError("Expected list from JSON response when retrieving tags") - - if not (data := json_response.get("data")): - raise RuntimeError("Expected key 'data' in response during tags retrieval") - - return data + return Tag.ids(count=count_limit, offset=start_index) @prettify_pydantic @pydantic.validate_call diff --git a/simvue/run.py b/simvue/run.py index f238454b..e073c152 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -703,6 +703,7 @@ def init( } self._sv_obj.ttl = self._retention self._sv_obj.status = self._status + self._sv_obj.tags = tags self._sv_obj.metadata = (metadata or {}) | git_info(os.getcwd()) | environment() self._sv_obj.heartbeat_timeout = timeout diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index b62a640d..064f8a8b 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -37,7 +37,6 @@ def test_get_alerts(create_test_run: tuple[sv_run.Run, dict], from_run: bool) -> assert alert["alert"]["status"]["current"] == "critical" else: assert (triggered_alerts_full := client.get_alerts(names_only=True, critical_only=False)) - print(triggered_alerts_full, run_data["created_alerts"]) assert all(a in triggered_alerts_full for a in run_data['created_alerts']) From b42488a47039ac5f0b34fb701f70f36cdecfaee4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 5 Dec 2024 23:23:25 +0000 Subject: [PATCH 016/163] Further test fixes and added julia and js metadata --- poetry.lock | 14 +- pyproject.toml | 4 +- simvue/api/objects/administrator/tenant.py | 2 +- simvue/api/objects/administrator/user.py | 2 +- simvue/api/objects/alert/fetch.py | 6 - simvue/api/objects/alert/metrics.py | 1 + simvue/api/objects/artifact.py | 87 +++- simvue/api/objects/base.py | 105 +++-- simvue/api/objects/folder.py | 8 +- simvue/api/objects/metric.py | 12 - simvue/api/objects/run.py | 108 +++-- simvue/api/objects/storage/base.py | 7 +- simvue/api/objects/storage/fetch.py | 7 +- simvue/api/objects/tag.py | 2 +- simvue/api/request.py | 18 +- simvue/client.py | 451 ++++++------------- simvue/config/parameters.py | 2 + simvue/converters.py | 58 +-- simvue/eco.py | 1 + simvue/exception.py | 22 + simvue/factory/proxy/remote.py | 2 +- simvue/metadata.py | 62 ++- simvue/models.py | 66 ++- simvue/run.py | 77 +--- simvue/utilities.py | 4 - tests/example_data/Project.toml | 19 + tests/example_data/package-lock.json | 88 ++++ tests/functional/test_client.py | 27 +- tests/functional/test_config.py | 6 +- tests/functional/test_run_artifact_upload.py | 40 ++ tests/functional/test_run_class.py | 41 +- tests/functional/test_run_execute_process.py | 2 +- tests/unit/test_artifact.py | 17 +- tests/unit/test_event_alert.py | 26 ++ tests/unit/test_folder.py | 21 + tests/{functional => unit}/test_metadata.py | 15 + tests/unit/test_metric_range_alert.py | 47 +- tests/unit/test_metric_threshold_alert.py | 28 ++ tests/unit/test_run.py | 27 +- tests/unit/test_tag.py | 18 + tests/unit/test_tenant.py | 24 + tests/unit/test_user_alert.py | 23 + 42 files changed, 1022 insertions(+), 575 deletions(-) delete mode 100644 simvue/api/objects/metric.py create mode 100644 simvue/exception.py create mode 100644 tests/example_data/Project.toml create mode 100644 tests/example_data/package-lock.json create mode 100644 tests/functional/test_run_artifact_upload.py rename tests/{functional => unit}/test_metadata.py (50%) diff --git a/poetry.lock b/poetry.lock index 1b932b24..673e63a3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "annotated-types" @@ -603,6 +603,16 @@ files = [ [package.dependencies] termcolor = "*" +[[package]] +name = "flatdict" +version = "4.0.1" +description = "Python module for interacting with nested dicts as a single level dict with delimited keys." +optional = false +python-versions = "*" +files = [ + {file = "flatdict-4.0.1.tar.gz", hash = "sha256:cd32f08fd31ed21eb09ebc76f06b6bd12046a24f77beb1fd0281917e47f26742"}, +] + [[package]] name = "fonttools" version = "4.55.0" @@ -2366,4 +2376,4 @@ plot = ["matplotlib", "plotly"] [metadata] lock-version = "2.0" python-versions = "^3.10,<3.14" -content-hash = "ebaa64c9ec1ee1c2170adc267998548054e1d5be8623f78b7d13203423e47d4d" +content-hash = "3201bc3ccf75a36e5fd825e457b46c4d4e9dfb16a76997fc0a861b9e6ef8098c" diff --git a/pyproject.toml b/pyproject.toml index 500165f3..d80ad525 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,6 +55,7 @@ tabulate = "^0.9.0" randomname = "^0.2.1" codecarbon = "^2.7.1" numpy = "^2.1.2" +flatdict = "^4.0.1" [tool.poetry.extras] plot = ["matplotlib", "plotly"] @@ -104,7 +105,8 @@ markers = [ "api: tests of RestAPI functionality", "unix: tests for UNIX systems only", "metadata: tests of metadata gathering functions", - "proxies: tests for remote/offline Simvue proxies" + "proxies: tests for remote/offline Simvue proxies", + "offline: tests for offline functionality" ] [tool.interrogate] diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py index 28b3e857..e111ae97 100644 --- a/simvue/api/objects/administrator/tenant.py +++ b/simvue/api/objects/administrator/tenant.py @@ -11,7 +11,7 @@ class Tenant(SimvueObject): def new( cls, *, name: str, enabled: bool = True, offline: bool = False ) -> typing.Self: - _tenant = Tenant(name=name, enabled=enabled, offline=offline, read_only=False) + _tenant = Tenant(name=name, enabled=enabled, offline=offline, _read_only=False) _tenant.offline_mode(offline) return _tenant # type: ignore diff --git a/simvue/api/objects/administrator/user.py b/simvue/api/objects/administrator/user.py index d61ed137..c7925995 100644 --- a/simvue/api/objects/administrator/user.py +++ b/simvue/api/objects/administrator/user.py @@ -30,7 +30,7 @@ def new( "admin": admin, "enabled": enabled, } - _user = User(user=_user_info, tenant=tenant, offline=offline, read_only=False) + _user = User(user=_user_info, tenant=tenant, offline=offline, _read_only=False) _user.offline_mode(offline) return _user # type: ignore diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index 9925628f..45fe989d 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -57,12 +57,6 @@ def get( scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", ) - if not isinstance(_json_response, dict): - raise RuntimeError( - f"Expected dict from JSON response during {_class_instance.__class__.__name__.lower()}s retrieval " - f"but got '{type(_json_response)}'" - ) - if (_data := _json_response.get("data")) is None: raise RuntimeError( f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index 34cada1b..4725b48f 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -172,6 +172,7 @@ def new( source="metrics", enabled=enabled, alert=_alert_definition, + _read_only=False, ) _alert.offline_mode(offline) return _alert diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index dff22ed9..1dce7962 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -7,21 +7,25 @@ """ import http +import pathlib import typing import pydantic import os.path import sys +import requests from simvue.api.url import URL from simvue.models import NAME_REGEX from simvue.utilities import get_mimetype_for_file, get_mimetypes, calculate_sha256 from simvue.api.objects.base import SimvueObject from simvue.serialization import serialize_object -from simvue.api.request import put as sv_put, get_json_from_response +from simvue.api.request import put as sv_put, get_json_from_response, get as sv_get Category = typing.Literal["code", "input", "output"] UPLOAD_TIMEOUT: int = 30 +DOWNLOAD_TIMEOUT: int = 30 +DOWNLOAD_CHUNK_SIZE: int = 8192 class Artifact(SimvueObject): @@ -225,8 +229,44 @@ def _upload( response=_response, ) - def _get(self, storage: str | None = None) -> dict[str, typing.Any]: - return super()._get(storage=self._storage) + def _get(self, storage: str | None = None, **kwargs) -> dict[str, typing.Any]: + return super()._get(storage=self._storage, **kwargs) + + @classmethod + def _get_all_objects( + cls, count: int | None, offset: int | None, **kwargs + ) -> list[dict[str, typing.Any]]: + _class_instance = cls(read_only=True) + _url = f"{_class_instance._base_url}" + + _response = sv_get( + _url, + headers=_class_instance._headers, + params={"start": offset, "count": count} | kwargs, + ) + + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", + expected_type=list, + ) + + return _json_response + + @classmethod + def get( + cls, *, count: int | None = None, offset: int | None = None, **kwargs + ) -> typing.Generator[tuple[str, "SimvueObject"], None, None]: + _class_instance = cls(read_only=True) + if (_data := cls._get_all_objects(count, offset, **kwargs)) is None: + raise RuntimeError( + f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" + ) + + for _entry in _data: + _id = _entry.pop("id") + yield _id, cls(read_only=True, identifier=_id, **_entry) @property def name(self) -> str: @@ -262,3 +302,44 @@ def type(self) -> str: def storage_url(self) -> str | None: """Retrieve storage URL for the artifact""" return self._storage_url + + def download_content(self) -> typing.Any: + """Download content of artifact from storage""" + _response = requests.get(f"{self.storage_url}", timeout=DOWNLOAD_TIMEOUT) + + get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieval of content for {self._label} '{self._identifier}'", + ) + + return _response.content + + @pydantic.validate_call + def download(self, output_file: pathlib.Path) -> pathlib.Path | None: + _response = requests.get( + f"{self.storage_url}", stream=True, timeout=DOWNLOAD_TIMEOUT + ) + + get_json_from_response( + response=_response, + allow_parse_failure=True, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieval of file for {self._label} '{self._identifier}'", + ) + + _total_length: str | None = _response.headers.get("content-length") + + if not output_file.parent.is_dir(): + raise ValueError( + f"Cannot write to '{output_file.parent}', not a directory." + ) + + with output_file.open("wb") as out_f: + if _total_length is None: + out_f.write(_response.content) + else: + for data in _response.iter_content(chunk_size=DOWNLOAD_CHUNK_SIZE): + out_f.write(data) + + return output_file if output_file.exists() else None diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 098f5936..11cefb8e 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -11,9 +11,8 @@ import inspect import uuid import http - -from codecarbon.external.logger import logging -from codecarbon.output_methods.emissions_data import json +import json +import logging from simvue.config.user import SimvueConfiguration from simvue.exception import ObjectNotFoundError @@ -27,6 +26,8 @@ ) from simvue.api.url import URL +logging.basicConfig(level=logging.INFO) + def staging_check(member_func: typing.Callable) -> typing.Callable: """Decorator for checking if requested attribute has uncommitted changes""" @@ -143,19 +144,22 @@ def __init__( self._user_config.offline.cache.joinpath("staging.json") ) - # Recover any locally staged changes if not read-only - self._staging: dict[str, typing.Any] = ( - {} if _read_only else self._get_local_staged() - ) - - self._staging |= kwargs - self._headers: dict[str, str] = { "Authorization": f"Bearer {self._user_config.server.token}", "User-Agent": f"Simvue Python client {__version__}", } - def _get_local_staged(self) -> dict[str, typing.Any]: + self._staging: dict[str, typing.Any] = {} + + if not self._identifier.startswith("offline_") and self._read_only: + self._staging = self._get() + + # Recover any locally staged changes if not read-only + self._staging |= {} if _read_only else self._get_local_staged() + + self._staging |= kwargs + + def _get_local_staged(self, obj_label: str | None = None) -> dict[str, typing.Any]: """Retrieve any locally staged data for this identifier""" if not self._local_staging_file.exists() or not self._identifier: return {} @@ -163,7 +167,30 @@ def _get_local_staged(self) -> dict[str, typing.Any]: with self._local_staging_file.open() as in_f: _staged_data = json.load(in_f) - return _staged_data.get(self._label, {}).get(self._identifier, {}) + return _staged_data.get(obj_label or self._label, {}).get(self._identifier, {}) + + def _stage_to_other(self, obj_label: str, key: str, value: typing.Any) -> None: + """Stage a change to another object type""" + with self._local_staging_file.open() as in_f: + _staged_data = json.load(in_f) + + if key not in _staged_data[obj_label]: + _staged_data[obj_label][key] = value + return + + if isinstance(_staged_data[obj_label][key], list): + if not _staged_data[obj_label].get(key): + _staged_data[obj_label][key] = [] + _staged_data[obj_label][key] += value + elif isinstance(_staged_data[obj_label][key], dict): + if not _staged_data[obj_label].get(key): + _staged_data[obj_label][key] = {} + _staged_data[obj_label][key] |= value + else: + _staged_data[obj_label][key] = value + + with self._local_staging_file.open("w") as out_f: + json.dump(_staged_data, out_f, indent=2) def _get_attribute(self, attribute: str, *default) -> typing.Any: # In the case where the object is read-only, staging is the data @@ -175,7 +202,13 @@ def _get_attribute(self, attribute: str, *default) -> typing.Any: ) if (_attribute_is_property and _state_is_read_only) or _offline_state: - return self._staging[attribute] + try: + return self._staging[attribute] + except KeyError as e: + raise AttributeError( + f"Could not retrieve attribute '{attribute}' " + f"for {self._label} '{self._identifier}' from cached data" + ) from e try: return self._get()[attribute] @@ -234,7 +267,7 @@ def ids( @classmethod def get( - cls, count: int | None = None, offset: int | None = None, **kwargs + cls, *, count: int | None = None, offset: int | None = None, **kwargs ) -> typing.Generator[tuple[str, "SimvueObject"], None, None]: _class_instance = cls(read_only=True) if (_data := cls._get_all_objects(count, offset, **kwargs).get("data")) is None: @@ -276,12 +309,6 @@ def _get_all_objects( scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", ) - if not isinstance(_json_response, dict): - raise RuntimeError( - f"Expected dict from JSON response during {_class_instance.__class__.__name__.lower()}s retrieval " - f"but got '{type(_json_response)}'" - ) - return _json_response def read_only(self, is_read_only: bool) -> None: @@ -299,6 +326,9 @@ def commit(self) -> None: raise AttributeError("Cannot commit object in 'read-only' mode") if self._offline: + self._logger.debug( + f"Writing updates to staging file for {self._label} '{self.id}': {self._staging}" + ) _offline_dir: pathlib.Path = self._user_config.offline.cache _offline_file = _offline_dir.joinpath("staging.json") self._cache() @@ -307,8 +337,14 @@ def commit(self) -> None: # Initial commit is creation of object # if staging is empty then we do not need to use PUT if not self._identifier or self._identifier.startswith("offline_"): + self._logger.debug( + f"Posting from staged data for {self._label} '{self.id}': {self._staging}" + ) self._post(**self._staging) elif self._staging: + self._logger.debug( + f"Pushing updates from staged data for {self._label} '{self.id}': {self._staging}" + ) self._put(**self._staging) # Clear staged changes @@ -342,11 +378,6 @@ def _post(self, **kwargs) -> dict[str, typing.Any]: scenario=f"Creation of {self._label} '{kwargs}'", ) - if not isinstance(_json_response, dict): - raise RuntimeError( - f"Expected dictionary from JSON response during {self._label} creation " - f"but got '{type(_json_response)}'" - ) if _id := _json_response.get("id"): self._logger.debug("'%s' created successfully", _id) self._identifier = _id @@ -371,22 +402,24 @@ def _put(self, **kwargs) -> dict[str, typing.Any]: scenario=f"Creation of {self._label} '{self._identifier}", ) - if not isinstance(_json_response, dict): - raise RuntimeError( - f"Expected dictionary from JSON response during {self._label} modification " - f"but got '{type(_json_response)}'" - ) self._logger.debug("'%s' modified successfully", self._identifier) return _json_response - def delete(self, **kwargs) -> dict[str, typing.Any]: + def delete( + self, _linked_objects: list[str] | None = None, **kwargs + ) -> dict[str, typing.Any]: if self._get_local_staged(): with self._local_staging_file.open() as in_f: _local_data = json.load(in_f) _local_data[self._label].pop(self._identifier, None) + # If this object has information within other object types + # (e.g. runs can have metrics) ensure this is deleted too + for obj_type in _linked_objects or []: + _local_data[obj_type].pop(self._identifier, None) + with self._local_staging_file.open("w") as out_f: json.dump(_local_data, out_f, indent=2) @@ -403,14 +436,11 @@ def delete(self, **kwargs) -> dict[str, typing.Any]: ) self._logger.debug("'%s' deleted successfully", self._identifier) - if not isinstance(_json_response, dict): - raise RuntimeError( - f"Expected dictionary from JSON response during {self._label} deletion " - f"but got '{type(_json_response)}'" - ) return _json_response - def _get(self, **kwargs) -> dict[str, typing.Any]: + def _get( + self, allow_parse_failure: bool = False, **kwargs + ) -> dict[str, typing.Any]: if self._identifier.startswith("offline_"): return self._get_local_staged() @@ -426,6 +456,7 @@ def _get(self, **kwargs) -> dict[str, typing.Any]: _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], + allow_parse_failure=allow_parse_failure, scenario=f"Retrieval of {self._label} '{self._identifier}'", ) self._logger.debug("'%s' retrieved successfully", self._identifier) diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index 0e2c10af..7d974843 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -144,8 +144,12 @@ def ttl(self, time_seconds: int) -> None: """Update the retention period for this folder""" self._staging["ttl"] = time_seconds - def delete(self, *, recursive: bool, delete_runs: bool) -> dict[str, typing.Any]: - return super().delete(recursive=recursive, runs=delete_runs) + def delete( + self, *, recursive: bool, delete_runs: bool, runs_only: bool + ) -> dict[str, typing.Any]: + return super().delete( + recursive=recursive, runs=delete_runs, runs_only=runs_only + ) @pydantic.validate_call diff --git a/simvue/api/objects/metric.py b/simvue/api/objects/metric.py deleted file mode 100644 index 33fe3609..00000000 --- a/simvue/api/objects/metric.py +++ /dev/null @@ -1,12 +0,0 @@ -import typing -from .base import SimvueObject - - -class Metrics(SimvueObject): - def __init__( - self, - run_identifier: typing.Optional[str] = None, - read_only: bool = False, - **kwargs, - ) -> None: - super().__init__(run_identifier, read_only, **kwargs) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 57b1dc50..dc977334 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -1,12 +1,18 @@ import http import typing +import msgpack import pydantic import datetime from .base import SimvueObject, staging_check, Visibility, write_only -from simvue.api.request import get as sv_get, put as sv_put, get_json_from_response +from simvue.api.request import ( + get as sv_get, + put as sv_put, + get_json_from_response, + post as sv_post, +) from simvue.api.url import URL -from simvue.models import FOLDER_REGEX, NAME_REGEX, DATETIME_FORMAT +from simvue.models import FOLDER_REGEX, NAME_REGEX, DATETIME_FORMAT, EventSet, MetricSet Status = typing.Literal[ "lost", "failed", "completed", "terminated", "running", "created" @@ -33,6 +39,12 @@ def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: self.visibility = Visibility(self) super().__init__(identifier, **kwargs) + self._staged_metrics: list[dict[str, str | dict | int]] = ( + self._get_local_staged("metrics").get(self._identifier) # type: ignore + if self._identifier + else [] + ) + @classmethod @pydantic.validate_call def new( @@ -51,6 +63,10 @@ def new( def name(self) -> str: return self._get_attribute("name") + def delete(self, **kwargs) -> dict[str, typing.Any]: + # Any metric entries need to also be removed + return super().delete(_linked_objects=["metrics", "events"], **kwargs) + @name.setter @write_only @pydantic.validate_call @@ -206,10 +222,11 @@ def started(self, started: datetime.datetime) -> None: @property @staging_check - def endtime(self) -> datetime.datetime: - return datetime.datetime.strptime( - self._get_attribute("endtime"), DATETIME_FORMAT - ) + def endtime(self) -> datetime.datetime | None: + _endtime: str | None = self._get_attribute("endtime") + if not _endtime: + return None + return datetime.datetime.strptime(_endtime, DATETIME_FORMAT) @endtime.setter @write_only @@ -217,33 +234,67 @@ def endtime(self) -> datetime.datetime: def endtime(self, endtime: datetime.datetime) -> None: self._staging["endtime"] = endtime.strftime(DATETIME_FORMAT) + @property + def metrics( + self, + ) -> typing.Generator[tuple[str, dict[str, int | float | bool]], None, None]: + if self._staged_metrics: + self._logger.warning(f"Uncommitted metrics found for run '{self.id}'") + yield from self._get_attribute("metrics").items() + + @pydantic.validate_call + def log_entries( + self, + entry_type: typing.Literal["metrics", "events"], + entries: list[MetricSet | EventSet], + ) -> None: + """Add entries to server or local staging""" + if not self._identifier: + raise RuntimeError("Cannot stage metrics, no identifier found") + + _validated_entries: list[dict] = [entry.model_dump() for entry in entries] + + if self._offline or self._identifier.startswith("offline_"): + self._stage_to_other(entry_type, self._identifier, _validated_entries) + return + + _url = URL(self._user_config.server.url) / entry_type + _data = {entry_type: _validated_entries, "run": self._identifier} + _data_bin = msgpack.packb(_data, use_bin_type=True) + + _msgpack_header = self._headers | {"Content-Type": "application/msgpack"} + + _response = sv_post( + f"{_url}", headers=_msgpack_header, data=_data_bin, is_json=False + ) + + get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Logging of {entry_type} '{entries}' for run '{self.id}'", + allow_parse_failure=True, + ) + @write_only def send_heartbeat(self) -> dict[str, typing.Any] | None: if self._offline or not self._identifier: return None _url = self._base_url - _url /= f"heartbeat/{self._identifier}" + _url /= f"{self._identifier}/heartbeat" _response = sv_put(f"{_url}", headers=self._headers, data={}) _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], - scenario="Retrieving abort status", + scenario="Retrieving heartbeat state", ) - if not isinstance(_json_response, dict): - raise RuntimeError( - f"Expected dictionary from JSON response during {self._label} abort status check " - f"but got '{type(_json_response)}'" - ) return _json_response @property def _abort_url(self) -> URL | None: if not self._identifier: return None - _url = self._base_url - _url /= f"abort/{self._identifier}" - return _url + return self.url / "abort" @property def _artifact_url(self) -> URL | None: @@ -264,11 +315,6 @@ def abort_trigger(self) -> bool: expected_status=[http.HTTPStatus.OK], scenario=f"Retrieving abort status for run '{self.id}'", ) - if not isinstance(_json_response, dict): - raise RuntimeError( - f"Expected dictionary from JSON response during {self._label} abort status check " - f"but got '{type(_json_response)}'" - ) return _json_response.get("status", False) @property @@ -283,15 +329,9 @@ def artifacts(self) -> list[dict[str, typing.Any]]: response=_response, expected_status=[http.HTTPStatus.OK], scenario=f"Retrieving artifacts for run '{self.id}'", + expected_type=list, ) - if not isinstance(_json_response, list): - raise RuntimeError( - f"Expected list from JSON response during {self._label} " - f"'{self.id}' artifact retrieval " - f"but got '{type(_json_response)}'" - ) - return _json_response @pydantic.validate_call @@ -299,9 +339,9 @@ def abort(self, reason: str) -> dict[str, typing.Any]: if not self._abort_url: return {} - _url = self._abort_url / self._identifier - - _response = sv_put(f"{_url}", headers=self._headers, data={"reason": reason}) + _response = sv_put( + f"{self._abort_url}", headers=self._headers, data={"reason": reason} + ) _json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], @@ -309,10 +349,4 @@ def abort(self, reason: str) -> dict[str, typing.Any]: response=_response, ) - if not isinstance(_json_response, dict): - raise RuntimeError( - "Expected dict from JSON response during abort of " - f"run but got '{type(_json_response)}'" - ) - return _json_response diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py index 6f11af33..872f2131 100644 --- a/simvue/api/objects/storage/base.py +++ b/simvue/api/objects/storage/base.py @@ -7,12 +7,15 @@ class StorageBase(SimvueObject): def __init__( - self, identifier: typing.Optional[str] = None, read_only: bool = False, **kwargs + self, + identifier: typing.Optional[str] = None, + _read_only: bool = False, + **kwargs, ) -> None: """Retrieve an alert from the Simvue server by identifier""" self._label = "storage" self._endpoint = self._label - super().__init__(identifier, read_only, **kwargs) + super().__init__(identifier, _read_only=_read_only, **kwargs) self.status = Status(self) @classmethod diff --git a/simvue/api/objects/storage/fetch.py b/simvue/api/objects/storage/fetch.py index 8ee75188..1fd6d8e0 100644 --- a/simvue/api/objects/storage/fetch.py +++ b/simvue/api/objects/storage/fetch.py @@ -50,14 +50,9 @@ def get( response=_response, expected_status=[http.HTTPStatus.OK], scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", + expected_type=list, ) - if not isinstance(_json_response, list): - raise RuntimeError( - f"Expected list from JSON response during {_class_instance.__class__.__name__.lower()}s retrieval " - f"but got '{type(_json_response)}'" - ) - _out_dict: dict[str, FileStorage | S3Storage] = {} for _entry in _json_response: diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py index 8af118a8..4bb5b69b 100644 --- a/simvue/api/objects/tag.py +++ b/simvue/api/objects/tag.py @@ -56,7 +56,7 @@ def description(self, description: str) -> None: @classmethod def get( cls, *, count: int | None = None, offset: int | None = None, **kwargs - ) -> dict[str, "SimvueObject"]: + ) -> typing.Generator[tuple[str, "SimvueObject"], None, None]: # There are currently no tag filters kwargs.pop("filters", None) diff --git a/simvue/api/request.py b/simvue/api/request.py index 11999e3d..eb238c5b 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -12,6 +12,7 @@ import typing import http +from codecarbon.external.logger import logging import requests from tenacity import ( retry, @@ -88,6 +89,7 @@ def post( else: data_sent = data + logging.debug(f"POST: {url}\n\tdata={data_sent}") response = requests.post( url, headers=headers, data=data_sent, timeout=DEFAULT_API_TIMEOUT ) @@ -140,6 +142,8 @@ def put( else: data_sent = data + logging.debug(f"PUT: {url}\n\tdata={data_sent}") + return requests.put(url, headers=headers, data=data_sent, timeout=timeout) @@ -171,6 +175,7 @@ def get( requests.Response response from executing GET """ + logging.debug(f"GET: {url}\n\tparams={params}") return requests.get(url, headers=headers, timeout=timeout, params=params) @@ -204,6 +209,7 @@ def delete( requests.Response response from executing DELETE """ + logging.debug(f"DELETE: {url}\n\tparams={params}") return requests.delete(url, headers=headers, timeout=timeout, params=params) @@ -212,10 +218,11 @@ def get_json_from_response( scenario: str, response: requests.Response, allow_parse_failure: bool = False, + expected_type: typing.Literal[list, dict] = dict, ) -> typing.Union[dict, list]: try: json_response = response.json() - json_response = json_response or {} + json_response = json_response or ({} if expected_type is dict else []) decode_error = "" except json.JSONDecodeError as e: json_response = {} if allow_parse_failure else None @@ -224,12 +231,15 @@ def get_json_from_response( error_str = f"{scenario} failed for url '{response.url}'" if (_status_code := response.status_code) in expected_status: - if json_response is not None: + if not isinstance(json_response, expected_type): + details = f"expected type '{expected_type.__name__}' but got '{type(json_response).__name__}'" + elif json_response is not None: return json_response - details = f"could not request JSON response: {decode_error}" + else: + details = f"could not request JSON response: {decode_error}" else: error_str += f" with status {_status_code}" - details = (json_response or {}).get("detail") + details = (json_response or ({} if expected_type is dict else [])).get("detail") try: txt_response = response.text diff --git a/simvue/client.py b/simvue/client.py index 6631fbc4..4a6a2bb7 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -10,6 +10,7 @@ import json import logging import os +import pathlib import typing import http import pydantic @@ -36,57 +37,16 @@ CONCURRENT_DOWNLOADS = 10 DOWNLOAD_CHUNK_SIZE = 8192 -DOWNLOAD_TIMEOUT = 30 logger = logging.getLogger(__file__) -def downloader(job: dict[str, str]) -> bool: - """Download a job output to the location specified within the definition - - Parameters - ---------- - job : dict[str, str] - a dictionary containing information on URL and path for a given job - this information is then used to perform the download - - Returns - ------- - bool - whether the file was created successfully - """ - # Check to make sure all requirements have been retrieved first - for key in ("url", "path", "filename"): - if key not in job: - logger.warning(f"Expected key '{key}' during job object retrieval") - raise RuntimeError( - "Failed to retrieve required information during job download" - ) - - try: - response = requests.get(job["url"], stream=True, timeout=DOWNLOAD_TIMEOUT) - response = requests.get(job["url"], stream=True, timeout=DOWNLOAD_TIMEOUT) - except requests.exceptions.RequestException: - return False - - total_length = response.headers.get("content-length") - total_length = response.headers.get("content-length") - - save_location: str = os.path.join(job["path"], job["filename"]) - - if not os.path.isdir(job["path"]): - raise ValueError(f"Cannot write to '{job['path']}', not a directory.") - - logger.debug(f"Writing file '{save_location}'") - - with open(save_location, "wb") as fh: - if total_length is None: - fh.write(response.content) - else: - for data in response.iter_content(chunk_size=DOWNLOAD_CHUNK_SIZE): - fh.write(data) - - return os.path.exists(save_location) +def _download_artifact_to_file( + artifact: Artifact, output_dir: pathlib.Path | None +) -> None: + _file_name = os.path.basename(artifact.name) + _output_file = (output_dir or pathlib.Path.cwd()).joinpath(_file_name) + artifact.download(_output_file) class Client: @@ -152,14 +112,20 @@ def get_run_id_from_name( """ _runs = Run.get(filters=json.dumps([f"name == {name}"])) - if len(_runs) == 0: - raise RuntimeError("Could not collect ID - no run found with this name.") - if len(_runs) > 1: + try: + _id, _ = next(_runs) + except StopIteration as e: + raise RuntimeError( + "Could not collect ID - no run found with this name." + ) from e + + try: + next(_runs) raise RuntimeError( "Could not collect ID - more than one run exists with this name." ) - - _id, _ = next(_runs) + except StopIteration: + pass return _id @@ -212,13 +178,11 @@ def get_runs( metrics: bool = False, alerts: bool = False, metadata: bool = False, - output_format: typing.Literal["dict", "dataframe"] = "dict", + output_format: typing.Literal["dict", "objects", "dataframe"] = "objects", count_limit: typing.Optional[pydantic.PositiveInt] = 100, start_index: typing.Optional[pydantic.PositiveInt] = 0, show_shared: bool = False, - ) -> typing.Union[ - DataFrame, list[dict[str, typing.Union[int, str, float, None]]], None - ]: + ) -> typing.Union[DataFrame, list[Run], None]: """Retrieve all runs matching filters. Parameters @@ -235,9 +199,9 @@ def get_runs( alerts : bool, optional whether to include alert information in the response. Default False. - output_format : Literal['dict', 'dataframe'], optional - the structure of the response, either a dictionary or a dataframe. - Default is 'dict'. Pandas must be installed for 'dataframe'. + output_format : Literal['objects', 'dataframe'], optional + the structure of the response, either a dictionary of objects or a dataframe. + Default is 'objects'. Pandas must be installed for 'dataframe'. count_limit : int, optional maximum number of entries to return. Default is 100. start_index : int, optional @@ -261,21 +225,32 @@ def get_runs( if not show_shared: filters = (filters or []) + ["user == self"] - params = { + if output_format == "objects": + return dict( + Run.get( + count=count_limit, + offset=start_index, + filters=json.dumps(filters), + return_basic=True, + return_metrics=metrics, + return_alerts=alerts, + return_system=system, + return_metadata=metadata, + ) + ) + _params: dict[str, bool | str] = { "filters": json.dumps(filters), "return_basic": True, "return_metrics": metrics, "return_alerts": alerts, "return_system": system, "return_metadata": metadata, - "count": count_limit, - "start": start_index, } response = requests.get( f"{self._user_config.server.url}/runs", headers=self._headers, - params=params, + params=_params, ) response.raise_for_status() @@ -289,18 +264,13 @@ def get_runs( response=response, ) - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response during retrieval of runs " - f"but got '{type(json_response)}'" - ) + if (response_data := json_response.get("data")) is None: + raise RuntimeError("Failed to retrieve runs data") - if (response_data := json_response.get("data")) is not None: + if output_format == "dict": return response_data - elif output_format == "dataframe": - return to_dataframe(response.json()) - else: - raise RuntimeError("Failed to retrieve runs data") + + return to_dataframe(response_data) @prettify_pydantic @pydantic.validate_call @@ -324,6 +294,27 @@ def delete_run(self, run_id: str) -> typing.Optional[dict]: """ return Run(identifier=run_id).delete() or None + def _get_folder_from_path(self, path: str) -> typing.Optional[Folder]: + """Retrieve folder for the specified path if found + + Parameters + ---------- + path : str + the path to search for + + Returns + ------- + Folder | None + if a match is found, return the folder + """ + _folders = Folder.get(filters=json.dumps([f"path == {path}"])) + + try: + _, _folder = next(_folders) + return _folder # type: ignore + except StopIteration: + return None + def _get_folder_id_from_path(self, path: str) -> typing.Optional[str]: """Retrieve folder identifier for the specified path if found @@ -337,10 +328,9 @@ def _get_folder_id_from_path(self, path: str) -> typing.Optional[str]: str | None if a match is found, return the identifier of the folder """ - _folders = Folder.get(filters=json.dumps([f"path == {path}"])) - _id, _ = next(_folders) + _ids = Folder.ids(filters=json.dumps([f"path == {path}"])) - return _id + return _ids[0] if _ids else None @prettify_pydantic @pydantic.validate_call @@ -365,30 +355,10 @@ def delete_runs( RuntimeError if deletion fails due to server request error """ - folder_id = self._get_folder_id_from_path(folder_path) - - if not folder_id: + if not (_folder := self._get_folder_from_path(folder_path)): raise ValueError(f"Could not find a folder matching '{folder_path}'") - - params: dict[str, bool] = {"runs_only": True, "runs": True} - - response = requests.delete( - f"{self._user_config.server.url}/folders/{folder_id}", - headers=self._headers, - params=params, - ) - - if response.status_code == http.HTTPStatus.OK: - if runs := response.json().get("runs", []): - logger.debug(f"Runs from '{folder_path}' deleted successfully: {runs}") - else: - logger.debug("Folder empty, no runs deleted.") - return runs - - raise RuntimeError( - f"Deletion of runs from folder '{folder_path}' failed" - f"with code {response.status_code}: {response.text}" - ) + _delete = _folder.delete(runs_only=True, delete_runs=True, recursive=False) + return _delete.get("runs", []) @prettify_pydantic @pydantic.validate_call @@ -435,7 +405,7 @@ def delete_folder( "folder does not exist." ) _response = Folder(identifier=folder_id).delete( - delete_runs=remove_runs, recursive=recursive + delete_runs=remove_runs, recursive=recursive, runs_only=False ) return _response.get("runs", []) @@ -450,11 +420,11 @@ def delete_alert(self, alert_id: str) -> None: alert_id : str the unique identifier for the alert """ - Alert(identifier=alert_id).delete() + Alert(identifier=alert_id).delete() # type: ignore @prettify_pydantic @pydantic.validate_call - def list_artifacts(self, run_id: str) -> list[dict[str, typing.Any]]: + def list_artifacts(self, run_id: str) -> dict[str, Artifact]: """Retrieve artifacts for a given run Parameters @@ -464,7 +434,7 @@ def list_artifacts(self, run_id: str) -> list[dict[str, typing.Any]]: Returns ------- - list[dict[str, typing.Any]] + dict[str, Artifact] list of relevant artifacts Raises @@ -472,33 +442,12 @@ def list_artifacts(self, run_id: str) -> list[dict[str, typing.Any]]: RuntimeError if retrieval of artifacts failed when communicating with the server """ - params: dict[str, str] = {"runs": json.dumps([run_id])} + return dict(Artifact.get(runs=json.dumps([run_id]))) # type: ignore - response: requests.Response = requests.get( - f"{self._user_config.server.url}/artifacts", - headers=self._headers, - params=params, - ) - - json_response = get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario=f"Retrieval of artifacts for run '{run_id}", - response=response, - ) - - if not isinstance(json_response, list): - raise RuntimeError( - "Expected list of entries from JSON response during artifact " - f"retrieval but got '{type(json_response)}'" - ) - return json_response - - def _retrieve_artifact_from_server( - self, - run_id: str, - name: str, - ) -> typing.Union[dict, list]: - return Artifact.get(runs=[run_id], name=name) + def _retrieve_artifacts_from_server( + self, run_id: str, name: str, count: int | None = None + ) -> typing.Generator[tuple[str, Artifact], None, None]: + return Artifact.get(runs=[run_id], name=name, count=count) # type: ignore @prettify_pydantic @pydantic.validate_call @@ -546,31 +495,30 @@ def get_artifact( RuntimeError if retrieval of artifact from the server failed """ - json_response = self._retrieve_artifact_from_server(run_id, name) + _artifacts = self._retrieve_artifacts_from_server(run_id, name, count=1) - if not json_response: - return None + try: + _id, _artifact = next(_artifacts) + except StopIteration as e: + raise ValueError(f"No artifact '{name}' found for run '{run_id}'") from e - url = json_response[0]["url"] - mimetype = json_response[0]["type"] - url = json_response[0]["url"] - mimetype = json_response[0]["type"] + _content = _artifact.download_content() - response = requests.get(url, timeout=DOWNLOAD_TIMEOUT) - response.raise_for_status() - - content: typing.Optional[DeserializedContent] = deserialize_data( - response.content, mimetype, allow_pickle + _deserialized_content: typing.Optional[DeserializedContent] = deserialize_data( + _content, _artifact.type, allow_pickle ) # Numpy array return means just 'if content' will be ambiguous # so must explicitly check if None - return response.content if content is None else content + return _content if _deserialized_content is None else _deserialized_content @prettify_pydantic @pydantic.validate_call def get_artifact_as_file( - self, run_id: str, name: str, path: typing.Optional[str] = None + self, + run_id: str, + name: str, + output_dir: typing.Optional[pydantic.DirectoryPath] = None, ) -> None: """Retrieve the specified artifact in the form of a file @@ -582,7 +530,7 @@ def get_artifact_as_file( unique identifier for the run to be queried name : str the name of the artifact to be retrieved - path : str | None, optional + output_dir: str | None, optional path to download retrieved content to, the default of None uses the current working directory. @@ -592,83 +540,25 @@ def get_artifact_as_file( if there was a failure during retrieval of information from the server """ - json_response = self._retrieve_artifact_from_server(run_id, name) - - if not json_response: - raise RuntimeError( - f"Failed to download artifact '{name}' from run '{run_id}'," - " no results found." - ) - - if not (url := json_response[0].get("url")): - raise RuntimeError( - "Failed to download artifacts, " - "expected URL for retrieval but server " - "did not return result" - ) - - downloader( - { - "url": url, - "filename": os.path.basename(name), - "path": path or os.getcwd(), - } - ) - - def _assemble_artifact_downloads( - self, - request_response: requests.Response, - startswith: typing.Optional[str], - endswith: typing.Optional[str], - contains: typing.Optional[str], - out_path: str, - ) -> list[dict[str, str]]: - downloads: list[dict[str, str]] = [] - - for item in request_response.json(): - for key in ("url", "name"): - if key not in item: - raise RuntimeError( - f"Expected key '{key}' in request " - "response during file retrieval" - ) - - if startswith and not item["name"].startswith(startswith): - continue - if contains and contains not in item["name"]: - continue - if endswith and not item["name"].endswith(endswith): - continue - - file_name: str = os.path.basename(item["name"]) - file_dir: str = os.path.join(out_path, os.path.dirname(item["name"])) + _artifacts = self._retrieve_artifacts_from_server(run_id, name) - job: dict[str, str] = { - "url": item["url"], - "filename": file_name, - "path": file_dir, - } - - if os.path.isfile(file_path := os.path.join(file_dir, file_name)): - logger.warning(f"File '{file_path}' exists, skipping") - continue - - os.makedirs(job["path"], exist_ok=True) - - downloads.append(job) + try: + _id, _artifact = next(_artifacts) + except StopIteration: + raise ValueError(f"No artifact '{name}' found for run '{run_id}'") - return downloads + _download_artifact_to_file(_artifact, output_dir) @prettify_pydantic @pydantic.validate_call def get_artifacts_as_files( self, run_id: str, - category: typing.Optional[typing.Literal["input", "output", "code"]] = None, - path: typing.Optional[str] = None, - startswith: typing.Optional[str] = None, - contains: typing.Optional[str] = None, - endswith: typing.Optional[str] = None, + category: typing.Literal["input", "output", "code"] | None = None, + output_dir: pydantic.DirectoryPath | None = None, + startswith: str | None = None, + contains: str | None = None, + endswith: str | None = None, ) -> None: """Retrieve artifacts from the given run as a set of files @@ -676,7 +566,7 @@ def get_artifacts_as_files( ---------- run_id : str the unique identifier for the run - path : str | None, optional + output_dir : str | None, optional location to download files to, the default of None will download them to the current working directory startswith : str, optional @@ -691,36 +581,21 @@ def get_artifacts_as_files( RuntimeError if there was a failure retrieving artifacts from the server """ - params: dict[str, typing.Optional[str]] = {"category": category} - - response: requests.Response = requests.get( - f"{self._user_config.server.url}/runs/{run_id}/artifacts", - headers=self._headers, - params=params, - ) - - get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario=f"Download of artifacts for run '{run_id}'", - response=response, - ) - - downloads: list[dict[str, str]] = self._assemble_artifact_downloads( - request_response=response, - startswith=startswith, - endswith=endswith, - contains=contains, - out_path=path or os.getcwd(), - ) + _artifacts: typing.Generator[tuple[str, Artifact], None, None] = Artifact.get( + runs=[run_id], category=category + ) # type: ignore with ThreadPoolExecutor(CONCURRENT_DOWNLOADS) as executor: - futures = [executor.submit(downloader, item) for item in downloads] - for future, download in zip(as_completed(futures), downloads): + futures = [ + executor.submit(_download_artifact_to_file, artifact, output_dir) + for _, artifact in _artifacts + ] + for future, (_, artifact) in zip(as_completed(futures), _artifacts): try: future.result() except Exception as e: raise RuntimeError( - f"Download of file {download['url']} " + f"Download of file {artifact.storage_url} " f"failed with exception: {e}" ) from e @@ -728,7 +603,7 @@ def get_artifacts_as_files( @pydantic.validate_call def get_folder( self, folder_path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)] - ) -> typing.Optional[dict[str, typing.Any]]: + ) -> Folder | None: """Retrieve a folder by identifier Parameters @@ -739,7 +614,7 @@ def get_folder( Returns ------- - dict[str, typing.Any] | None + Folder | None data for the requested folder if it exists else None Raises @@ -747,9 +622,15 @@ def get_folder( RuntimeError if there was a failure when retrieving information from the server """ - if not (_folders := self.get_folders(filters=[f"path == {folder_path}"])): + _folders: typing.Generator[tuple[str, Folder], None, None] = Folder.get( + path=folder_path + ) # type: ignore + + try: + _, _folder = next(_folders) + return _folder + except StopIteration: return None - return _folders[0] @pydantic.validate_call def get_folders( @@ -758,7 +639,7 @@ def get_folders( filters: typing.Optional[list[str]] = None, count: pydantic.PositiveInt = 100, start_index: pydantic.NonNegativeInt = 0, - ) -> list[dict[str, typing.Any]]: + ) -> typing.Generator[tuple[str, Folder], None, None]: """Retrieve folders from the server Parameters @@ -772,48 +653,21 @@ def get_folders( Returns ------- - list[dict[str, Any]] - all data for folders matching the filter request + Generator[str, Folder] + all data for folders matching the filter request in form (id, Folder) Raises ------ RuntimeError if there was a failure retrieving data from the server """ - params: dict[str, typing.Union[str, int]] = { - "filters": json.dumps(filters or []), - "count": count, - "start": start_index, - } - - response: requests.Response = requests.get( - f"{self._user_config.server.url}/folders", - headers=self._headers, - params=params, - ) - - json_response = get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario="Retrieval of folders", - response=response, - ) - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response during folder retrieval " - f"but got '{type(json_response)}'" - ) - - if (data := json_response.get("data")) is None: - raise RuntimeError( - "Expected key 'data' in response during folder retrieval" - ) - - return data + return Folder.get( + filters=json.dumps(filters or []), count=count, offset=start_index + ) # type: ignore @prettify_pydantic @pydantic.validate_call - def get_metrics_names(self, run_id: str) -> list[str]: + def get_metrics_names(self, run_id: str) -> typing.Generator[str, None, None]: """Return information on all metrics within a run Parameters @@ -823,7 +677,7 @@ def get_metrics_names(self, run_id: str) -> list[str]: Returns ------- - list[str] + Generator[str, None, None] names of metrics in the given run Raises @@ -831,27 +685,10 @@ def get_metrics_names(self, run_id: str) -> list[str]: RuntimeError if there was a failure retrieving information from the server """ - params = {"runs": json.dumps([run_id])} - - response: requests.Response = requests.get( - f"{self._user_config.server.url}/metrics/names", - headers=self._headers, - params=params, - ) - - json_response = get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario=f"Request for metric names for run '{run_id}'", - response=response, - ) - - if not isinstance(json_response, list): - raise RuntimeError( - "Expected list from JSON response during folder retrieval " - f"but got '{type(json_response)}'" - ) + _run = Run(identifier=run_id) - return json_response + for metric in _run.metrics: + yield metric.name def _get_run_metrics_from_server( self, @@ -881,11 +718,6 @@ def _get_run_metrics_from_server( response=metrics_response, ) - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response for metric retrieval" - ) - return json_response @prettify_pydantic @@ -1144,12 +976,7 @@ def get_events( response=response, ) - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response when retrieving events" - ) - - return response.json().get("data", []) + return json_response.get("data", []) @prettify_pydantic @pydantic.validate_call @@ -1204,7 +1031,7 @@ def get_tags( *, start_index: typing.Optional[pydantic.NonNegativeInt] = None, count_limit: typing.Optional[pydantic.PositiveInt] = None, - ) -> list[str]: + ) -> typing.Generator[Tag, None, None]: """Retrieve tags Parameters @@ -1216,15 +1043,15 @@ def get_tags( Returns ------- - list[str] - a list of all tag ids for this run + list[Tag] + a list of all tags for this run Raises ------ RuntimeError if there was a failure retrieving data from the server """ - return Tag.ids(count=count_limit, offset=start_index) + return Tag.get(count=count_limit, offset=start_index) @prettify_pydantic @pydantic.validate_call diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index 5c03c038..4600f062 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -32,6 +32,8 @@ class ServerSpecifications(pydantic.BaseModel): @pydantic.field_validator("url") @classmethod def url_to_api_url(cls, v: typing.Any) -> str: + if f"{v}".endswith("/api"): + return URL(f"{v}") _url = URL(f"{v}") / "api" return f"{_url}" diff --git a/simvue/converters.py b/simvue/converters.py index cdc3135b..ff1ca7ce 100644 --- a/simvue/converters.py +++ b/simvue/converters.py @@ -8,6 +8,8 @@ import typing import pandas +import flatdict + if typing.TYPE_CHECKING: from pandas import DataFrame @@ -195,44 +197,30 @@ def to_dataframe(data): """ metadata = [] + system_columns = [] for run in data: - if "metadata" in run: - for item in run["metadata"]: - if item not in metadata: - metadata.append(item) + for item in run.get("metadata", []): + if item not in metadata: + metadata.append(item) + for item, value in (run.get("system", {}) or {}).items(): + if isinstance(value, dict): + system_columns += [ + col_name + for sub_item in value.keys() + if (col_name := f"system.{item}.{sub_item}") not in system_columns + ] + else: + if f"system.{item}" not in system_columns: + system_columns.append(f"system.{item}") + + columns = {f"metadata.{column}": [] for column in metadata} + columns |= {column: [] for column in system_columns} - columns = {} for run in data: - for item in ("name", "status", "folder", "created", "started", "ended"): - if item not in columns: - columns[item] = [] - if item in run: - columns[item].append(run[item]) - else: - columns[item].append(None) - - if "system" in run: - for section in run["system"]: - if section in ("cpu", "gpu", "platform"): - for item in run["system"][section]: - if "system.%s.%s" % (section, item) not in columns: - columns["system.%s.%s" % (section, item)] = [] - columns["system.%s.%s" % (section, item)].append( - run["system"][section][item] - ) - else: - if "system.%s" % section not in columns: - columns["system.%s" % section] = [] - columns["system.%s" % section].append(run["system"][section]) - - if "metadata" in run: - for item in metadata: - if "metadata.%s" % item not in columns: - columns["metadata.%s" % item] = [] - if item in run["metadata"]: - columns["metadata.%s" % item].append(run["metadata"][item]) - else: - columns["metadata.%s" % item].append(None) + run_info = flatdict.FlatDict(run, delimiter=".").as_dict() + + for column in columns: + columns[column].append(run_info.get(column)) return pandas.DataFrame(data=columns) diff --git a/simvue/eco.py b/simvue/eco.py index 3964f53f..7a4d47bf 100644 --- a/simvue/eco.py +++ b/simvue/eco.py @@ -81,3 +81,4 @@ def set_measure_interval(self, interval: int) -> None: def post_init(self) -> None: self._set_from_conf(self._simvue_run._id, "experiment_id") self._set_from_conf(self._simvue_run._name, "experiment_name") + self.start() diff --git a/simvue/exception.py b/simvue/exception.py new file mode 100644 index 00000000..58bafb10 --- /dev/null +++ b/simvue/exception.py @@ -0,0 +1,22 @@ +""" +Simvue Exception Types +====================== + +Custom exceptions for handling of Simvue request scenarions. + +""" + + +class ObjectNotFoundError(Exception): + """For failure retrieving Simvue object from server""" + + def __init__(self, obj_type: str, name: str) -> None: + super().__init__( + f"Failed to retrieve '{name}' of type '{obj_type}' " f"no such object" + ) + + +class SimvueRunError(RuntimeError): + """A special sub-class of runtime error specifically for Simvue run errors""" + + pass diff --git a/simvue/factory/proxy/remote.py b/simvue/factory/proxy/remote.py index e68b00a5..ed2e3e24 100644 --- a/simvue/factory/proxy/remote.py +++ b/simvue/factory/proxy/remote.py @@ -484,7 +484,7 @@ def send_heartbeat(self) -> typing.Optional[dict[str, typing.Any]]: @skip_if_failed("_aborted", "_suppress_errors", False) def get_abort_status(self) -> bool: - logger.debug("Retrieving alert status") + logger.debug("Retrieving abort status") try: response = get( diff --git a/simvue/metadata.py b/simvue/metadata.py index a92609bc..d9d9ae1c 100644 --- a/simvue/metadata.py +++ b/simvue/metadata.py @@ -11,11 +11,15 @@ import re import json import toml +import logging import importlib.metadata import pathlib +import flatdict from simvue.utilities import simvue_timestamp +logger = logging.getLogger(__file__) + def git_info(repository: str) -> dict[str, typing.Any]: """Retrieves metadata for the target git repository @@ -148,6 +152,62 @@ def _rust_env(repository: pathlib.Path) -> dict[str, typing.Any]: } +def _julia_env(repository: pathlib.Path) -> dict[str, typing.Any]: + """Retrieve a dictionary of Julia dependencies if a project file is available""" + julia_meta: dict[str, str] = {} + if (project_file := pathlib.Path(repository).joinpath("Project.toml")).exists(): + content = toml.load(project_file) + julia_meta |= { + f"julia.project.{key}": value + for key, value in content.items() + if not isinstance(value, dict) + } + julia_meta |= { + f"julia.environment.{key}": value + for key, value in content.get("compat", {}).items() + } + return julia_meta + + +def _node_js_env(repository: pathlib.Path) -> dict[str, typing.Any]: + js_meta: dict[str, str] = {} + if ( + project_file := pathlib.Path(repository).joinpath("package-lock.json") + ).exists(): + content = json.load(project_file.open()) + if (lfv := content["lockfileVersion"]) not in (1, 2, 3): + logger.warning( + f"Unsupported package-lock.json lockfileVersion {lfv}, ignoring JS project metadata" + ) + return {} + + js_meta |= { + f"javascript.project.{key}": value + for key, value in content.items() + if key in ("name", "version") + } + js_meta |= { + f"javascript.environment.{key.replace('@', '')}": value["version"] + for key, value in content.get( + "packages" if lfv in (2, 3) else "dependencies", {} + ).items() + if key and not value.get("dev", True) + } + return js_meta + + def environment(repository: pathlib.Path = pathlib.Path.cwd()) -> dict[str, typing.Any]: """Retrieve environment metadata""" - return _python_env(repository) | _rust_env(repository) + _environment_meta = flatdict.FlatDict( + _python_env(repository), delimiter="." + ).as_dict() + _environment_meta |= flatdict.FlatDict( + _rust_env(repository), delimiter="." + ).as_dict() + _environment_meta |= flatdict.FlatDict( + _julia_env(repository), delimiter="." + ).as_dict() + _environment_meta |= flatdict.FlatDict( + _node_js_env(repository), delimiter="." + ).as_dict() + return _environment_meta diff --git a/simvue/models.py b/simvue/models.py index 2e47b440..31a81bb3 100644 --- a/simvue/models.py +++ b/simvue/models.py @@ -1,22 +1,62 @@ -from typing import Annotated, Dict, List, Optional, Union -from pydantic import BaseModel, Field, StringConstraints, PositiveInt +import datetime +import typing +import pydantic + FOLDER_REGEX: str = r"^/.*" NAME_REGEX: str = r"^[a-zA-Z0-9\-\_\s\/\.:]+$" METRIC_KEY_REGEX: str = r"^[a-zA-Z0-9\-\_\s\/\.:=><]+$" DATETIME_FORMAT: str = "%Y-%m-%d %H:%M:%S.%f" -MetadataKeyString = Annotated[str, StringConstraints(pattern=r"^[\w\-\s\.]+$")] -TagString = Annotated[str, StringConstraints(pattern=r"^[\w\-\s\.]+$")] -MetricKeyString = Annotated[str, StringConstraints(pattern=METRIC_KEY_REGEX)] +MetadataKeyString = typing.Annotated[ + str, pydantic.StringConstraints(pattern=r"^[\w\-\s\.]+$") +] +TagString = typing.Annotated[str, pydantic.StringConstraints(pattern=r"^[\w\-\s\.]+$")] +MetricKeyString = typing.Annotated[ + str, pydantic.StringConstraints(pattern=METRIC_KEY_REGEX) +] # Pydantic class to validate run.init() -class RunInput(BaseModel): - name: Optional[str] = Field(None, pattern=NAME_REGEX) - metadata: Optional[Dict[MetadataKeyString, Union[str, int, float, None]]] = None - tags: Optional[List[TagString]] = None - description: Optional[str] = None - folder: str = Field(pattern=FOLDER_REGEX) - status: Optional[str] = None - ttl: Optional[PositiveInt] = None +class RunInput(pydantic.BaseModel): + name: str | None = pydantic.Field(None, pattern=NAME_REGEX) + metadata: dict[MetadataKeyString, str | int | float | None] | None = None + tags: list[TagString] | None = None + description: str | None = None + folder: str = pydantic.Field(pattern=FOLDER_REGEX) + status: str | None = None + ttl: pydantic.PositiveInt | None = None + + +class MetricSet(pydantic.BaseModel): + time: pydantic.NonNegativeFloat | pydantic.NonNegativeInt + timestamp: str + step: pydantic.NonNegativeInt + values: dict[str, int | float | bool] + + @pydantic.field_validator("timestamp", mode="after") + @classmethod + def timestamp_str(cls, value: str) -> str: + try: + datetime.datetime.strptime(value, DATETIME_FORMAT) + except ValueError as e: + raise AssertionError( + f"Invalid timestamp, expected form '{DATETIME_FORMAT}'" + ) from e + return value + + +class EventSet(pydantic.BaseModel): + message: str + timestamp: str + + @pydantic.field_validator("timestamp", mode="after") + @classmethod + def timestamp_str(cls, value: str) -> str: + try: + datetime.datetime.strptime(value, DATETIME_FORMAT) + except ValueError as e: + raise AssertionError( + f"Invalid timestamp, expected form '{DATETIME_FORMAT}'" + ) from e + return value diff --git a/simvue/run.py b/simvue/run.py index e073c152..0ed55ec0 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -7,7 +7,6 @@ """ import contextlib -import json import logging import pathlib import mimetypes @@ -28,7 +27,6 @@ import uuid import click -import msgpack import psutil from simvue.api.objects.alert.fetch import Alert @@ -37,7 +35,6 @@ from .config.user import SimvueConfiguration -import simvue.api.request as sv_api from .factory.dispatch import Dispatcher from .executor import Executor @@ -404,67 +401,17 @@ def _create_dispatch_callback( if self._mode == "online" and not self._id: raise RuntimeError("Expected identifier for run") - if not self._user_config.server.url: + if not self._user_config.server.url or not self._sv_obj: raise RuntimeError("Cannot commence dispatch, run not initialised") - def _offline_dispatch_callback( + def _dispatch_callback( buffer: list[typing.Any], category: str, - run_id: typing.Optional[str] = self._id, - uuid: str = self._uuid, + run_obj: RunObject = self._sv_obj, ) -> None: - _offline_directory: pathlib.Path = self._user_config.offline.cache - if not _offline_directory.exists(): - logger.error( - f"Cannot write to offline directory '{_offline_directory}', directory not found." - ) - return - _directory = _offline_directory.joinpath(uuid) - - unique_id = time.time() - filename = _directory.joinpath(f"{category}-{unique_id}") - _data = {category: buffer, "run": run_id} - try: - with filename.open("w") as fh: - json.dump(_data, fh) - except Exception as err: - if self._suppress_errors: - logger.error( - "Got exception writing offline update for %s: %s", - category, - str(err), - ) - else: - raise err - - def _online_dispatch_callback( - buffer: list[typing.Any], - category: str, - url: str = self._user_config.server.url, - run_id: typing.Optional[str] = self._id, - headers: dict[str, str] = self._headers, - ) -> None: - if not buffer: - return - _data = {category: buffer, "run": run_id} - _data_bin = msgpack.packb(_data, use_bin_type=True) - _url: str = f"{url}/{category}" + run_obj.log_entries(entries=buffer, entry_type=category) - _msgpack_header = headers | {"Content-Type": "application/msgpack"} - - try: - sv_api.post( - url=_url, headers=_msgpack_header, data=_data_bin, is_json=False - ) - except (ValueError, RuntimeError) as e: - self._error(f"{e}", join_threads=False) - return - - return ( - _online_dispatch_callback - if self._mode == "online" - else _offline_dispatch_callback - ) + return _dispatch_callback def _start(self, reconnect: bool = False) -> bool: """Start a run @@ -1030,6 +977,11 @@ def config( "simvue", self, self._emission_metrics_interval ) + # If the main Run API object is initialised the run is active + # hence the tracker should start too + if self._sv_obj: + self._emissions_tracker.start() + if resources_metrics_interval: self._resources_metrics_interval = resources_metrics_interval @@ -1139,7 +1091,7 @@ def update_tags(self, tags: list[str]) -> bool: @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @pydantic.validate_call - def log_event(self, message: str, timestamp: typing.Optional[str] = None) -> bool: + def log_event(self, message: str, timestamp: str | None = None) -> bool: """Log event to the server Parameters @@ -1834,8 +1786,12 @@ def create_alert( break if not _alert_id: - _alert.commit() _alert_id = _alert.id + else: + _alert = Alert(identifier=_alert_id) + _alert.read_only(False) + + _alert.commit() self._sv_obj.alerts = list(self._sv_obj.alerts) + [_alert_id] @@ -1868,6 +1824,7 @@ def log_alert( return False _alert = Alert(identifier=identifier) + _alert.read_only(False) _alert.state = state _alert.commit() diff --git a/simvue/utilities.py b/simvue/utilities.py index 580fa7ff..8854ec75 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -17,9 +17,6 @@ from datetime import timezone -if typing.TYPE_CHECKING: - pass - CHECKSUM_BLOCK_SIZE = 4096 EXTRAS: tuple[str, ...] = ("plot", "torch") @@ -40,7 +37,6 @@ def find_first_instance_of_file( Parameters ---------- - file_name: list[str] | str candidate names of file to locate check_user_space: bool, optional check the users home area if current working directory is not diff --git a/tests/example_data/Project.toml b/tests/example_data/Project.toml new file mode 100644 index 00000000..eeaae7c5 --- /dev/null +++ b/tests/example_data/Project.toml @@ -0,0 +1,19 @@ +name = "Julia Demo Project" +uuid = "31b09h27-d3fd-4268-8c4f-7ab0a8rbc582" +authors = ["Joe Bloggs None: +@pytest.mark.parametrize("output_format", ("dict", "dataframe", "objects")) +def test_get_runs(create_test_run: tuple[sv_run.Run, dict], output_format: str) -> None: client = svc.Client() - assert client.get_runs(filters=None) + + _result = client.get_runs(filters=None, output_format=output_format, count_limit=10) + + if output_format == "dataframe": + assert not _result.empty + else: + assert _result @pytest.mark.dependency @@ -161,8 +168,9 @@ def test_get_run(create_test_run: tuple[sv_run.Run, dict]) -> None: def test_get_folder(create_test_run: tuple[sv_run.Run, dict]) -> None: client = svc.Client() assert (folders := client.get_folders()) - assert (folder_id := folders[1].get("path")) - assert client.get_folder(folder_id) + _id, _folder = next(folders) + assert _folder.path + assert client.get_folder(_folder.path) @pytest.mark.dependency @@ -170,8 +178,7 @@ def test_get_folder(create_test_run: tuple[sv_run.Run, dict]) -> None: def test_get_metrics_names(create_test_run: tuple[sv_run.Run, dict]) -> None: client = svc.Client() time.sleep(1) - assert client.get_metrics_names(create_test_run[1]["run_id"]) - + assert list(client.get_metrics_names(create_test_run[1]["run_id"])) @pytest.mark.dependency @@ -180,7 +187,7 @@ def test_get_tag(create_plain_run: tuple[sv_run.Run, dict]) -> None: _, run_data = create_plain_run client = svc.Client() time.sleep(1.0) - assert any(tag["name"] == run_data["tags"][-1] for tag in client.get_tags()) + assert any(tag.name == run_data["tags"][-1] for _, tag in client.get_tags()) PRE_DELETION_TESTS: list[str] = [ @@ -225,7 +232,7 @@ def test_get_tags(create_plain_run: tuple[sv_run.Run, dict]) -> None: run.close() time.sleep(1.0) client = svc.Client() - retrieved = [t["name"] for t in client.get_tags()] + retrieved = [t.name for _, t in client.get_tags()] assert all(t in retrieved for t in tags) diff --git a/tests/functional/test_config.py b/tests/functional/test_config.py index d4afa0c0..07a0c803 100644 --- a/tests/functional/test_config.py +++ b/tests/functional/test_config.py @@ -117,13 +117,13 @@ def _mocked_find(file_names: list[str], *_, ppt_file=_ppt_file, conf_file=_confi assert _config.config_file() == _config_file if use_env: - assert _config.server.url == _other_url + assert _config.server.url == f"{_other_url}api" assert _config.server.token == _other_token elif use_args: - assert _config.server.url == _arg_url + assert _config.server.url == f"{_arg_url}api" assert _config.server.token == _arg_token elif use_file and use_file != "pyproject.toml": - assert _config.server.url == _url + assert _config.server.url == f"{_url}api" assert _config.server.token == _token assert f"{_config.offline.cache}" == temp_d diff --git a/tests/functional/test_run_artifact_upload.py b/tests/functional/test_run_artifact_upload.py new file mode 100644 index 00000000..2daeb326 --- /dev/null +++ b/tests/functional/test_run_artifact_upload.py @@ -0,0 +1,40 @@ +""" +Low Level API: Run Class +""" + +import uuid +import pathlib +import tempfile +import pytest + +from simvue.api.objects import Run, Artifact, storage +from simvue.api.objects.folder import Folder + + +@pytest.mark.api +def test_add_artifact_to_run() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder = Folder.new(path=f"/simvue_unit_testing/{_uuid}") + _folder.commit() + _run = Run.new(folder=f"/simvue_unit_testing/{_uuid}") + _run.status = "running" + _run.commit() + + with tempfile.NamedTemporaryFile() as tempf: + with open(tempf.name, "w") as in_f: + in_f.write("Hello") + + _artifact = Artifact.new_file( + name=f"test_{_uuid}", + run=_run.id, + category="input", + storage=None, + file_path=pathlib.Path(tempf.name), + file_type=None + ) + _run.status = "completed" + _run.commit() + assert _run.artifacts + _run.delete() + _folder.delete(recursive=True, delete_runs=True) + diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index dd162b64..51d7f20e 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -20,6 +20,8 @@ import simvue.client as sv_cl import simvue.sender as sv_send +from simvue.api.objects import Run as RunObject + if typing.TYPE_CHECKING: from .conftest import CountingLogHandler @@ -28,6 +30,8 @@ def test_created_run() -> None: with sv_run.Run() as run_created: run_created.init(running=False) + _run = RunObject(identifier=run_created.id) + assert _run.status == "created" @pytest.mark.run @@ -41,6 +45,16 @@ def test_check_run_initialised_decorator() -> None: assert "Simvue Run must be initialised" in str(e.value) +@pytest.mark.run +def test_run_with_emissions() -> None: + with sv_run.Run() as run_created: + run_created.init() + run_created.config(enable_emission_metrics=True, emission_metrics_interval=1) + time.sleep(5) + _run = RunObject(identifier=run_created.id) + assert list(_run.metrics) + + @pytest.mark.run @pytest.mark.parametrize("overload_buffer", (True, False), ids=("overload", "normal")) @pytest.mark.parametrize( @@ -111,6 +125,8 @@ def test_log_metrics( with contextlib.suppress(RuntimeError): client.delete_run(run._id) + assert _data + assert sorted(set(METRICS.keys())) == sorted(set(_data.keys())) _steps = [] for entry in _data.values(): @@ -130,6 +146,7 @@ def test_log_metrics( @pytest.mark.run +@pytest.mark.offline def test_log_metrics_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None: METRICS = {"a": 10, "b": 1.2, "c": 2} run, _ = create_plain_run_offline @@ -168,6 +185,7 @@ def test_log_events(create_test_run: tuple[sv_run.Run, dict]) -> None: @pytest.mark.run +@pytest.mark.offline def test_log_events_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None: EVENT_MSG = "Hello offline world!" run, _ = create_plain_run_offline @@ -181,6 +199,7 @@ def test_log_events_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) - @pytest.mark.run +@pytest.mark.offline def test_offline_tags(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None: run, run_data = create_plain_run_offline run_id, *_ = sv_send.sender() @@ -220,6 +239,7 @@ def test_update_metadata_created(create_pending_run: tuple[sv_run.Run, dict]) -> @pytest.mark.run +@pytest.mark.offline def test_update_metadata_offline( create_plain_run_offline: tuple[sv_run.Run, dict], ) -> None: @@ -438,8 +458,13 @@ def test_set_folder_details(request: pytest.FixtureRequest) -> None: run.set_folder_details(tags=tags, description=description) client = sv_cl.Client() - assert sorted((folder := client.get_folders(filters=[f"path == {folder_name}"])[0])["tags"]) == sorted(tags) - assert folder["description"] == description + _folder = client.get_folder(folder_path=folder_name) + print(_folder) + + assert _folder.tags + assert sorted(_folder.tags) == sorted(tags) + + assert _folder.description == description @pytest.mark.run @@ -510,11 +535,12 @@ def test_save_file_online( out_loc = pathlib.Path(tempd) stored_name = pathlib.Path(base_name) out_file = out_loc.joinpath(name or out_name.name) - client.get_artifact_as_file(run_id=simvue_run.id, name=f"{name or stored_name}", path=tempd) + client.get_artifact_as_file(run_id=simvue_run.id, name=f"{name or stored_name}", output_dir=tempd) assert out_loc.joinpath(name if name else out_name.name).exists() @pytest.mark.run +@pytest.mark.offline @pytest.mark.parametrize( "preserve_path", (True, False), ids=("preserve_path", "modified_path") ) @@ -576,13 +602,13 @@ def test_update_tags_running( time.sleep(1) client = sv_cl.Client() run_data = client.get_run(simvue_run._id) - assert run_data["tags"] == tags + assert run_data.tags == tags simvue_run.update_tags(["additional"]) time.sleep(1) run_data = client.get_run(simvue_run._id) - assert sorted(run_data["tags"]) == sorted(tags + ["additional"]) + assert sorted(run_data.tags) == sorted(tags + ["additional"]) @pytest.mark.run @@ -602,13 +628,13 @@ def test_update_tags_created( time.sleep(1) client = sv_cl.Client() run_data = client.get_run(simvue_run._id) - assert sorted(run_data["tags"]) == sorted(tags) + assert sorted(run_data.tags) == sorted(tags) simvue_run.update_tags(["additional"]) time.sleep(1) run_data = client.get_run(simvue_run._id) - assert sorted(run_data["tags"]) == sorted(tags + ["additional"]) + assert sorted(run_data.tags) == sorted(tags + ["additional"]) @pytest.mark.run @@ -715,6 +741,7 @@ def testing_exit(status: int) -> None: counter = 0 while run._status != "terminated" and counter < 15: time.sleep(1) + assert run._sv_obj.abort_trigger, "Abort trigger was not set" counter += 1 if counter >= 15: run.kill_all_processes() diff --git a/tests/functional/test_run_execute_process.py b/tests/functional/test_run_execute_process.py index de16b8ac..5cf0386e 100644 --- a/tests/functional/test_run_execute_process.py +++ b/tests/functional/test_run_execute_process.py @@ -72,7 +72,7 @@ def test_processes_cwd(create_plain_run: dict[Run, dict]) -> None: # Check that the script was uploaded to the run correctly os.makedirs(os.path.join(temp_dir, "downloaded")) - client.get_artifact_as_file(run_id, os.path.basename(temp_file.name), path=os.path.join(temp_dir, "downloaded")) + client.get_artifact_as_file(run_id, os.path.basename(temp_file.name), output_dir=os.path.join(temp_dir, "downloaded")) assert filecmp.cmp(os.path.join(temp_dir, "downloaded", os.path.basename(temp_file.name)), temp_file.name) client.get_artifact_as_file(run_id, "new_file.txt", path=os.path.join(temp_dir, "downloaded")) diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index 669944f8..b5d80717 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -13,12 +13,16 @@ def test_artifact_creation_online() -> None: _folder_name = f"/simvue_unit_testing/{_uuid}" _folder = Folder.new(path=_folder_name) _run = Run.new(folder=_folder_name) + _folder.commit() + _run.commit() + + _failed = [] with tempfile.NamedTemporaryFile(suffix=".txt") as temp_f: _path = pathlib.Path(temp_f.name) with _path.open("w") as out_f: out_f.write("Hello World!") - _artifact = Artifact.new( + _artifact = Artifact.new_file( name=f"test_artifact_{_uuid}", run=_run.id, file_path=_path, @@ -26,10 +30,16 @@ def test_artifact_creation_online() -> None: storage=None, file_type=None ) - _artifact.commit() time.sleep(1) + for member in _artifact._properties: + try: + getattr(_artifact, member) + except Exception as e: + _failed.append((member, f"{e}")) assert _artifact.name == f"test_artifact_{_uuid}" _artifact.delete() + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) _run.delete() _folder.delete() @@ -45,7 +55,7 @@ def test_artifact_creation_offline() -> None: _path = pathlib.Path(temp_f.name) with _path.open("w") as out_f: out_f.write("Hello World!") - _artifact = Artifact.new( + _artifact = Artifact.new_file( name=f"test_artifact_{_uuid}", run=_run.id, file_path=_path, @@ -63,3 +73,4 @@ def test_artifact_creation_offline() -> None: _run.delete() _folder.delete() + diff --git a/tests/unit/test_event_alert.py b/tests/unit/test_event_alert.py index 4783ddea..908c86be 100644 --- a/tests/unit/test_event_alert.py +++ b/tests/unit/test_event_alert.py @@ -1,5 +1,6 @@ import time import pytest +import contextlib import uuid from simvue.api.objects import Alert, EventsAlert @@ -90,3 +91,28 @@ def test_event_alert_modification_offline() -> None: assert _new_alert.description == "updated!" _new_alert.delete() + +@pytest.mark.api +def test_event_alert_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = EventsAlert.new( + name=f"events_alert_{_uuid}", + frequency=1, + pattern="completed", + notification="none" + ) + _alert.commit() + + _failed = [] + + for member in _alert._properties: + try: + getattr(_alert, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _alert.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) + diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index 5ce7ba91..b2dcb0ce 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -1,6 +1,7 @@ import typing import pytest import uuid +import contextlib import json import time @@ -89,3 +90,23 @@ def test_folder_modification_offline() -> None: assert _folder_new.visibility.tenant _folder_new.delete() + +@pytest.mark.api +def test_folder_get_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _path = f"/simvue_unit_testing/objects/folder/{_uuid}" + _folder = Folder.new(path=_path) + _folder.commit() + + _failed = [] + + for member in _folder._properties: + try: + getattr(_folder, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _folder.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) diff --git a/tests/functional/test_metadata.py b/tests/unit/test_metadata.py similarity index 50% rename from tests/functional/test_metadata.py rename to tests/unit/test_metadata.py index 9e095c19..5a1070ff 100644 --- a/tests/functional/test_metadata.py +++ b/tests/unit/test_metadata.py @@ -16,3 +16,18 @@ def test_python_env() -> None: assert re.findall(r"\d+\.\d+\.\d+", metadata["python.environment.click"]) assert metadata["python.project.name"] == "spam-eggs" + +@pytest.mark.metadata +def test_julia_env() -> None: + metadata = sv_meta._julia_env(pathlib.Path(__file__).parents[1].joinpath("example_data")) + assert metadata["julia.project.name"] == "Julia Demo Project" + assert re.findall(r"\d+\.\d+\.\d+", metadata["julia.environment.AbstractDifferentiation"]) + + +@pytest.mark.metadata +def test_js_env() -> None: + metadata = sv_meta._node_js_env(pathlib.Path(__file__).parents[1].joinpath("example_data")) + assert metadata["javascript.project.name"] == "my-awesome-project" + assert re.findall(r"\d+\.\d+\.\d+", metadata["javascript.environment.node_modules/dotenv"]) + + diff --git a/tests/unit/test_metric_range_alert.py b/tests/unit/test_metric_range_alert.py index 9867c3fb..f9dd1eb6 100644 --- a/tests/unit/test_metric_range_alert.py +++ b/tests/unit/test_metric_range_alert.py @@ -1,4 +1,4 @@ - +import contextlib import time import pytest import json @@ -10,7 +10,7 @@ def test_metric_range_alert_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsRangeAlert.new( - name=f"metrics_threshold_alert_{_uuid}", + name=f"metrics_range_alert_{_uuid}", frequency=1, notification="none", metric="x", @@ -23,7 +23,7 @@ def test_metric_range_alert_creation_online() -> None: _alert.commit() assert _alert.source == "metrics" assert _alert.alert.frequency == 1 - assert _alert.name == f"metrics_threshold_alert_{_uuid}" + assert _alert.name == f"metrics_range_alert_{_uuid}" assert _alert.notification == "none" _alert.delete() @@ -32,7 +32,7 @@ def test_metric_range_alert_creation_online() -> None: def test_metric_range_alert_creation_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsRangeAlert.new( - name=f"metrics_threshold_alert_{_uuid}", + name=f"metrics_range_alert_{_uuid}", frequency=1, notification="none", metric="x", @@ -46,7 +46,7 @@ def test_metric_range_alert_creation_offline() -> None: _alert.commit() assert _alert.source == "metrics" assert _alert.alert.frequency == 1 - assert _alert.name == f"metrics_threshold_alert_{_uuid}" + assert _alert.name == f"metrics_range_alert_{_uuid}" assert _alert.notification == "none" _alert.delete() @@ -57,10 +57,10 @@ def test_metric_range_alert_creation_offline() -> None: @pytest.mark.api -def test_metric_threshold_alert_modification_online() -> None: +def test_metric_range_alert_modification_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsRangeAlert.new( - name=f"metrics_threshold_alert_{_uuid}", + name=f"metrics_range_alert_{_uuid}", frequency=1, notification="none", metric="x", @@ -82,10 +82,10 @@ def test_metric_threshold_alert_modification_online() -> None: @pytest.mark.api -def test_metric_threshold_alert_modification_offline() -> None: +def test_metric_range_alert_modification_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsRangeAlert.new( - name=f"metrics_threshold_alert_{_uuid}", + name=f"metrics_range_alert_{_uuid}", frequency=1, notification="none", metric="x", @@ -106,3 +106,32 @@ def test_metric_threshold_alert_modification_offline() -> None: assert _new_alert.description == "updated!" _new_alert.delete() +@pytest.mark.api +def test_metric_range_alert_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsRangeAlert.new( + name=f"metrics_range_alert_{_uuid}", + frequency=1, + notification="none", + metric="x", + range_low=10, + range_high=15, + window=1, + aggregation="average", + rule="is inside range" + ) + _alert.commit() + + _failed = [] + + for member in _alert._properties: + try: + getattr(_alert, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _alert.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) + diff --git a/tests/unit/test_metric_threshold_alert.py b/tests/unit/test_metric_threshold_alert.py index e0520859..85c8deca 100644 --- a/tests/unit/test_metric_threshold_alert.py +++ b/tests/unit/test_metric_threshold_alert.py @@ -1,4 +1,5 @@ import time +import contextlib import pytest import json import uuid @@ -102,3 +103,30 @@ def test_metric_threshold_alert_modification_offline() -> None: assert _new_alert.description == "updated!" _new_alert.delete() +@pytest.mark.api +def test_metric_range_alert_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsThresholdAlert.new( + name=f"metrics_threshold_alert_{_uuid}", + frequency=1, + notification="none", + metric="x", + threshold=10, + window=1, + rule="is above", + aggregation="average" + ) + _alert.commit() + + _failed = [] + + for member in _alert._properties: + try: + getattr(_alert, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _alert.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index ea5c1b0f..cfcd7f23 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -1,4 +1,5 @@ -from codecarbon.output_methods.emissions_data import json +import contextlib +import json import pytest import time import datetime @@ -99,3 +100,27 @@ def test_run_modification_offline() -> None: _run.delete() _folder.delete() + +@pytest.mark.api +def test_run_get_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(folder=_folder_name) + _run.status = "running" + _run.ttl = 60 + _folder.commit() + _run.commit() + _failed = [] + + for member in _run._properties: + try: + getattr(_run, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _run.delete() + _folder.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) diff --git a/tests/unit/test_tag.py b/tests/unit/test_tag.py index c05d9395..87b4a163 100644 --- a/tests/unit/test_tag.py +++ b/tests/unit/test_tag.py @@ -1,4 +1,5 @@ import time +import contextlib import pytest import uuid import json @@ -64,3 +65,20 @@ def test_tag_modification_offline() -> None: assert _new_tag.description == "modified test tag" _tag.delete() +@pytest.mark.api +def test_tag_get_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tag = Tag.new(name=f"test_tag_{_uuid}") + _tag.commit() + _failed = [] + + for member in _tag._properties: + try: + getattr(_tag, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _tag.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) diff --git a/tests/unit/test_tenant.py b/tests/unit/test_tenant.py index 63d07e49..c3f64cf3 100644 --- a/tests/unit/test_tenant.py +++ b/tests/unit/test_tenant.py @@ -1,5 +1,6 @@ import pytest import time +import contextlib import json import uuid @@ -32,3 +33,26 @@ def test_create_tenant_offline() -> None: assert _new_tenant.name == _uuid assert _new_tenant.enabled _new_tenant.delete() + + +@pytest.mark.api +def test_tag_get_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tenant = Tenant.new(name=_uuid) + try: + _tenant.commit() + except RuntimeError as e: + assert "You do not have permission" in str(e) + return + _failed = [] + + for member in _tag._properties: + try: + getattr(_tag, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _tag.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py index afc5e6a7..b28bdcb6 100644 --- a/tests/unit/test_user_alert.py +++ b/tests/unit/test_user_alert.py @@ -1,5 +1,6 @@ import time import json +import contextlib import pytest import uuid @@ -79,3 +80,25 @@ def test_user_alert_modification_offline() -> None: assert _new_alert.description == "updated!" _new_alert.delete() +@pytest.mark.api +def test_user_alert_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = UserAlert.new( + name=f"users_alert_{_uuid}", + notification="none" + ) + _alert.commit() + + _failed = [] + + for member in _alert._properties: + try: + getattr(_alert, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _alert.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) + From 1bf7205df7427651619ec412ed50a7a82d92a82e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 10 Dec 2024 17:19:38 +0000 Subject: [PATCH 017/163] Further fixes towards working tests for v2 --- simvue/api/objects/alert/base.py | 12 ++++++ simvue/api/objects/alert/user.py | 13 ++++++ simvue/api/objects/artifact.py | 2 +- simvue/api/objects/base.py | 10 ++--- simvue/api/objects/run.py | 8 +--- simvue/config/parameters.py | 6 +++ simvue/config/user.py | 24 +++++++---- simvue/eco.py | 17 +++++--- simvue/run.py | 67 ++++++++++++++++++------------ simvue/utilities.py | 2 +- tests/functional/test_run_class.py | 33 ++++++++------- 11 files changed, 124 insertions(+), 70 deletions(-) diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index d0b89d49..2751e7cf 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -115,3 +115,15 @@ def abort(self) -> bool: def abort(self, abort: bool) -> None: """Configure alert to trigger aborts""" self._staging["abort"] = abort + + @property + @staging_check + def state(self) -> typing.Literal["ok", "critical", "no_data"]: + return self._get_attribute("state") + + @state.setter + @pydantic.validate_call + def state(self, state: typing.Literal["ok", "critical"]) -> None: + raise AttributeError( + f"Cannot update state for alert of type '{self.__class__.__name__}'" + ) diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index a0b62111..7f60779f 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -8,6 +8,8 @@ import pydantic import typing + +from simvue.api.objects.base import staging_check from .base import AlertBase from simvue.models import NAME_REGEX @@ -56,3 +58,14 @@ def get( cls, count: int | None = None, offset: int | None = None ) -> dict[str, typing.Any]: raise NotImplementedError("Retrieve of only user alerts is not yet supported") + + @property + @staging_check + def state(self) -> typing.Literal["ok", "critical", "no_data"]: + return self._get_attribute("state") + + @state.setter + @pydantic.validate_call + def state(self, state: typing.Literal["ok", "critical"]) -> None: + """Set alert name""" + self._staging["state"] = state diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 1dce7962..2dfd3ea7 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -219,7 +219,7 @@ def _upload( _response = sv_put( url=f"{_run_artifacts_url}", - headers=self._headers, + headers={}, data=_obj_parameters | {"storage": self.storage}, ) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 11cefb8e..082d072c 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -137,6 +137,7 @@ def __init__( _config_args = { "server_url": kwargs.pop("server_url", None), "server_token": kwargs.pop("server_token", None), + "mode": kwargs.pop("mode", "online"), } self._user_config = SimvueConfiguration.fetch(**_config_args) @@ -174,6 +175,9 @@ def _stage_to_other(self, obj_label: str, key: str, value: typing.Any) -> None: with self._local_staging_file.open() as in_f: _staged_data = json.load(in_f) + if obj_label not in _staged_data: + _staged_data[obj_label] = {} + if key not in _staged_data[obj_label]: _staged_data[obj_label][key] = value return @@ -396,16 +400,12 @@ def _put(self, **kwargs) -> dict[str, typing.Any]: f"Forbidden: You do not have permission to create object of type '{self._label}'" ) - _json_response = get_json_from_response( + return get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], scenario=f"Creation of {self._label} '{self._identifier}", ) - self._logger.debug("'%s' modified successfully", self._identifier) - - return _json_response - def delete( self, _linked_objects: list[str] | None = None, **kwargs ) -> dict[str, typing.Any]: diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index dc977334..1b220679 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -325,15 +325,13 @@ def artifacts(self) -> list[dict[str, typing.Any]]: _response = sv_get(url=self._artifact_url, headers=self._headers) - _json_response = get_json_from_response( + return get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], scenario=f"Retrieving artifacts for run '{self.id}'", expected_type=list, ) - return _json_response - @pydantic.validate_call def abort(self, reason: str) -> dict[str, typing.Any]: if not self._abort_url: @@ -343,10 +341,8 @@ def abort(self, reason: str) -> dict[str, typing.Any]: f"{self._abort_url}", headers=self._headers, data={"reason": reason} ) - _json_response = get_json_from_response( + return get_json_from_response( expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], scenario=f"Abort of run '{self.id}'", response=_response, ) - - return _json_response diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index cbc6ff5b..4e1d310b 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -46,6 +46,12 @@ class OfflineSpecifications(pydantic.BaseModel): cache: typing.Optional[pathlib.Path] = None +class MetricsSpecifications(pydantic.BaseModel): + resources_metrics_interval: pydantic.PositiveInt | None = -1 + emission_metrics_interval: pydantic.PositiveInt | None = None + enable_emission_metrics: bool = False + + class DefaultRunSpecifications(pydantic.BaseModel): name: typing.Optional[str] = None description: typing.Optional[str] = None diff --git a/simvue/config/user.py b/simvue/config/user.py index aedabaaa..9f28b553 100644 --- a/simvue/config/user.py +++ b/simvue/config/user.py @@ -20,6 +20,7 @@ from simvue.config.parameters import ( ClientGeneralOptions, DefaultRunSpecifications, + MetricsSpecifications, ServerSpecifications, OfflineSpecifications, ) @@ -30,14 +31,17 @@ DEFAULT_OFFLINE_DIRECTORY, ) from simvue.version import __version__ -from simvue.api import get +from simvue.api.request import get as sv_get +from simvue.api.url import URL logger = logging.getLogger(__name__) SIMVUE_SERVER_UPPER_CONSTRAINT: typing.Optional[semver.Version] = semver.Version.parse( + "2.0.0" +) +SIMVUE_SERVER_LOWER_CONSTRAINT: typing.Optional[semver.Version] = semver.Version.parse( "1.0.0" ) -SIMVUE_SERVER_LOWER_CONSTRAINT: typing.Optional[semver.Version] = None class SimvueConfiguration(pydantic.BaseModel): @@ -49,6 +53,7 @@ class SimvueConfiguration(pydantic.BaseModel): ) run: DefaultRunSpecifications = DefaultRunSpecifications() offline: OfflineSpecifications = OfflineSpecifications() + metrics: MetricsSpecifications = MetricsSpecifications() @classmethod def _load_pyproject_configs(cls) -> typing.Optional[dict]: @@ -95,18 +100,21 @@ def _check_server( "User-Agent": f"Simvue Python client {__version__}", } try: - response = get(f"{url}/api/version", headers) + _url = URL(url) / "version" + _response = sv_get(f"{_url}", headers) - if response.status_code != http.HTTPStatus.OK or not ( - _version_str := response.json().get("version") + if _response.status_code != http.HTTPStatus.OK or not ( + _version_str := _response.json().get("version") ): raise AssertionError - if response.status_code == http.HTTPStatus.UNAUTHORIZED: + if _response.status_code == http.HTTPStatus.UNAUTHORIZED: raise AssertionError("Unauthorised token") except Exception as err: - raise AssertionError(f"Exception retrieving server version: {str(err)}") + raise AssertionError( + f"Exception retrieving server version: {str(err)}" + ) from err _version = semver.Version.parse(_version_str) @@ -200,7 +208,7 @@ def fetch( "SIMVUE_TOKEN", server_token or _config_dict["server"].get("token") ) - _run_mode = mode or _config_dict["run"].get("mode") + _run_mode = mode or _config_dict["run"].get("mode") or "online" if not _server_url: raise RuntimeError("No server URL was specified") diff --git a/simvue/eco.py b/simvue/eco.py index 7a4d47bf..6ff7023b 100644 --- a/simvue/eco.py +++ b/simvue/eco.py @@ -3,7 +3,6 @@ import datetime from codecarbon import EmissionsTracker -from codecarbon.external.logger import logger from codecarbon.output_methods.base_output import BaseOutput as cc_BaseOutput from simvue.utilities import simvue_timestamp @@ -12,21 +11,27 @@ from codecarbon.output_methods.emissions_data import EmissionsData +logger = logging.getLogger(__file__) + + class CodeCarbonOutput(cc_BaseOutput): def __init__(self, run: "Run") -> None: - self._meta_update: bool = True self._simvue_run = run self._metrics_step: int = 0 - def out(self, total: "EmissionsData", delta: "EmissionsData") -> None: + def out( + self, total: "EmissionsData", delta: "EmissionsData", meta_update: bool = True + ) -> None: # Check if the run has been shutdown, if so do nothing if ( self._simvue_run._shutdown_event and self._simvue_run._shutdown_event.is_set() ): + logger.debug("Terminating CodeCarbon tracker") return - if self._meta_update: + if meta_update: + logger.debug("Logging CodeCarbon metadata") self._simvue_run.update_metadata( { "codecarbon.country": total.country_name, @@ -35,12 +40,12 @@ def out(self, total: "EmissionsData", delta: "EmissionsData") -> None: "codecarbon.version": total.codecarbon_version, } ) - self._meta_update = False _cc_timestamp: datetime.datetime = datetime.datetime.strptime( total.timestamp, "%Y-%m-%dT%H:%M:%S" ) + logger.debug("Logging CodeCarbon metrics") self._simvue_run.log_metrics( metrics={ "codecarbon.total.emissions": total.emissions, @@ -54,7 +59,7 @@ def out(self, total: "EmissionsData", delta: "EmissionsData") -> None: self._metrics_step += 1 def live_out(self, total: "EmissionsData", delta: "EmissionsData") -> None: - self.out(total, delta) + self.out(total, delta, meta_update=False) class SimvueEmissionsTracker(EmissionsTracker): diff --git a/simvue/run.py b/simvue/run.py index b9118e0e..28ef82a5 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -148,7 +148,6 @@ def __init__( self._executor = Executor(self) self._dispatcher: typing.Optional[DispatcherBaseClass] = None - self._emissions_tracker: typing.Optional[SimvueEmissionsTracker] = None self._id: typing.Optional[str] = None self._folder: Folder | None = None @@ -175,7 +174,11 @@ def __init__( ) self._aborted: bool = False - self._resources_metrics_interval: typing.Optional[int] = HEARTBEAT_INTERVAL + self._resources_metrics_interval: typing.Optional[int] = ( + HEARTBEAT_INTERVAL + if self._user_config.metrics.resources_metrics_interval < 1 + else self._user_config.metrics.resources_metrics_interval + ) self._headers: dict[str, str] = { "Authorization": f"Bearer {self._user_config.server.token}" } @@ -188,7 +191,19 @@ def __init__( self._heartbeat_thread: typing.Optional[threading.Thread] = None self._heartbeat_interval: int = HEARTBEAT_INTERVAL - self._emission_metrics_interval: int = HEARTBEAT_INTERVAL + self._emission_metrics_interval: int | None = ( + HEARTBEAT_INTERVAL + if ( + (_interval := self._user_config.metrics.emission_metrics_interval) + and _interval < 1 + ) + else self._user_config.metrics.emission_metrics_interval + ) + self._emissions_tracker: typing.Optional[SimvueEmissionsTracker] = ( + SimvueEmissionsTracker("simvue", self, self._emission_metrics_interval) + if self._user_config.metrics.enable_emission_metrics + else None + ) def __enter__(self) -> Self: return self @@ -598,7 +613,9 @@ def init( try: self._folder = get_folder_from_path(path=folder) except ObjectNotFoundError: - self._folder = Folder.new(path=folder, offline=self._mode == "offline") + self._folder = Folder.new( + path=folder, offline=self._user_config.run.mode == "offline" + ) self._folder.commit() # type: ignore if isinstance(visibility, str) and visibility not in ("public", "tenant"): @@ -638,7 +655,9 @@ def init( self._timer = time.time() - self._sv_obj = RunObject.new(folder=folder, offline=self._mode == "offline") + self._sv_obj = RunObject.new( + folder=folder, offline=self._user_config.run.mode == "offline" + ) if description: self._sv_obj.description = description @@ -686,7 +705,7 @@ def init( fg="green" if self._term_color else None, ) - if self._emissions_tracker: + if self._emissions_tracker and self._status == "running": self._emissions_tracker.post_init() self._emissions_tracker.start() @@ -774,11 +793,8 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: "due to function pickling restrictions for multiprocessing" ) - if isinstance(executable, pathlib.Path): - if not executable.is_file(): - raise FileNotFoundError( - f"Executable '{executable}' is not a valid file" - ) + if isinstance(executable, pathlib.Path) and not executable.is_file(): + raise FileNotFoundError(f"Executable '{executable}' is not a valid file") cmd_list: typing.List[str] = [] pos_args = list(cmd_args) @@ -799,13 +815,13 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: if isinstance(val, bool) and val: cmd_list += [f"-{kwarg}"] else: - cmd_list += [f"-{kwarg}{(' '+ _quoted_val) if val else ''}"] + cmd_list += [f"-{kwarg}{(f' {_quoted_val}') if val else ''}"] else: kwarg = kwarg.replace("_", "-") if isinstance(val, bool) and val: cmd_list += [f"--{kwarg}"] else: - cmd_list += [f"--{kwarg}{(' '+_quoted_val) if val else ''}"] + cmd_list += [f"--{kwarg}{(f' {_quoted_val}') if val else ''}"] cmd_list += pos_args cmd_str = " ".join(cmd_list) @@ -984,6 +1000,8 @@ def config( # hence the tracker should start too if self._sv_obj: self._emissions_tracker.start() + elif enable_emission_metrics is False and self._emissions_tracker: + self._error("Cannot disable emissions tracker once it has been started") if resources_metrics_interval: self._resources_metrics_interval = resources_metrics_interval @@ -1316,7 +1334,7 @@ def save_file( run=self.id, storage=self._storage_id, file_path=file_path, - offline=self._mode == "offline", + offline=self._user_config.run.mode == "offline", file_type=filetype, category=category, ) @@ -1359,11 +1377,8 @@ def save_directory( return False if filetype: - mimetypes_valid = [] mimetypes.init() - for _, value in mimetypes.types_map.items(): - mimetypes_valid.append(value) - + mimetypes_valid = [value for _, value in mimetypes.types_map.items()] if filetype not in mimetypes_valid: self._error("Invalid MIME type specified") return False @@ -1735,7 +1750,7 @@ def create_alert( notification=notification, threshold=threshold, frequency=frequency or 60, - offline=self._mode == "offline", + offline=self._user_config.run.mode == "offline", ) elif source == "metrics": if ( @@ -1758,7 +1773,7 @@ def create_alert( range_low=range_low, range_high=range_high, frequency=frequency or 60, - offline=self._mode == "offline", + offline=self._user_config.run.mode == "offline", ) elif source == "events": if not pattern: @@ -1770,11 +1785,13 @@ def create_alert( pattern=pattern, notification=notification, frequency=frequency or 60, - offline=self._mode == "offline", + offline=self._user_config.run.mode == "offline", ) else: _alert = UserAlert.new( - name=name, notification=notification, offline=self._mode == "offline" + name=name, + notification=notification, + offline=self._user_config.run.mode == "offline", ) _alert.abort = trigger_abort @@ -1789,12 +1806,8 @@ def create_alert( break if not _alert_id: + _alert.commit() _alert_id = _alert.id - else: - _alert = Alert(identifier=_alert_id) - _alert.read_only(False) - - _alert.commit() self._sv_obj.alerts = list(self._sv_obj.alerts) + [_alert_id] diff --git a/simvue/utilities.py b/simvue/utilities.py index 8854ec75..56455e8b 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -167,7 +167,7 @@ def parse_pydantic_error(error: pydantic.ValidationError) -> str: ) _type: str = data["type"] - if (_input_type := type(_input)) != _type: + if (_input_type := type(_input)) != _type and "error" not in _type: _type = f"{_input_type.__name__} != {_type}" out_table.append( diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 51d7f20e..758e711e 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -1,5 +1,6 @@ import os from os.path import basename +from numpy import identity import pytest import pytest_mock import time @@ -15,6 +16,7 @@ import random import simvue +from simvue.api.objects.alert.fetch import Alert from simvue.exception import SimvueRunError import simvue.run as sv_run import simvue.client as sv_cl @@ -109,10 +111,10 @@ def test_log_metrics( if overload_buffer: for i in range(run._dispatcher._max_buffer_size * 3): - run.log_metrics({key: i for key in METRICS.keys()}) + run.log_metrics({key: i for key in METRICS}) else: run.log_metrics(METRICS) - time.sleep(1.0 if not overload_buffer else 2.0) + time.sleep(2.0 if overload_buffer else 1.0) run.close() client = sv_cl.Client() _data = client.get_metric_values( @@ -130,16 +132,14 @@ def test_log_metrics( assert sorted(set(METRICS.keys())) == sorted(set(_data.keys())) _steps = [] for entry in _data.values(): - _steps += list(i[0] for i in entry.keys()) + _steps += [i[0] for i in entry.keys()] _steps = set(_steps) assert ( - len(_steps) == 1 - if not overload_buffer - else run._dispatcher._max_buffer_size * 3 + run._dispatcher._max_buffer_size * 3 if overload_buffer else len(_steps) == 1 ) # Check metrics have been set - assert setup_logging.counts[0] == 1 if not overload_buffer else 3 + assert 3 if overload_buffer else setup_logging.counts[0] == 1 # Check heartbeat has been called at least once (so sysinfo sent) assert setup_logging.counts[1] > 0 @@ -164,7 +164,7 @@ def test_log_metrics_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) assert sorted(set(METRICS.keys())) == sorted(set(_data.keys())) _steps = [] for entry in _data.values(): - _steps += list(i[0] for i in entry.keys()) + _steps += [i[0] for i in entry.keys()] _steps = set(_steps) assert ( len(_steps) == 1 @@ -172,7 +172,7 @@ def test_log_metrics_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) @pytest.mark.run -def test_log_events(create_test_run: tuple[sv_run.Run, dict]) -> None: +def test_log_events_online(create_test_run: tuple[sv_run.Run, dict]) -> None: EVENT_MSG = "Hello world!" run, _ = create_test_run run.log_event(EVENT_MSG) @@ -183,7 +183,6 @@ def test_log_events(create_test_run: tuple[sv_run.Run, dict]) -> None: assert event_data[0].get("message", EVENT_MSG) - @pytest.mark.run @pytest.mark.offline def test_log_events_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None: @@ -495,7 +494,7 @@ def test_save_file_online( (out_name := pathlib.Path(tempd).joinpath("test_file.txt")), "w", ) as out_f: - out_f.write("test data entry" if not empty_file else "") + out_f.write("" if empty_file else "test data entry") if valid_mimetype: simvue_run.save_file( @@ -536,7 +535,7 @@ def test_save_file_online( stored_name = pathlib.Path(base_name) out_file = out_loc.joinpath(name or out_name.name) client.get_artifact_as_file(run_id=simvue_run.id, name=f"{name or stored_name}", output_dir=tempd) - assert out_loc.joinpath(name if name else out_name.name).exists() + assert out_loc.joinpath(name or out_name.name).exists() @pytest.mark.run @@ -582,7 +581,7 @@ def test_save_file_offline( stored_name = pathlib.Path(base_name) out_file = out_loc.joinpath(name or out_name.name) client.get_artifact_as_file(run_id=run_id, name=f"{name or stored_name}", path=tempd) - assert out_loc.joinpath(name if name else out_name.name).exists() + assert out_loc.joinpath(name or out_name.name).exists() @pytest.mark.run @@ -602,7 +601,7 @@ def test_update_tags_running( time.sleep(1) client = sv_cl.Client() run_data = client.get_run(simvue_run._id) - assert run_data.tags == tags + assert sorted(run_data.tags) == sorted(tags) simvue_run.update_tags(["additional"]) @@ -694,11 +693,11 @@ def abort_callback(abort_run=trigger) -> None: assert run._resources_metrics_interval == 1 for child in child_processes: assert not child.is_running() - if not run._status == "terminated": + if run._status != "terminated": run.kill_all_processes() raise AssertionError("Run was not terminated") assert trigger.is_set() - + @pytest.mark.run def test_abort_on_alert_python(create_plain_run: typing.Tuple[sv_run.Run, dict], mocker: pytest_mock.MockerFixture) -> None: @@ -738,6 +737,8 @@ def testing_exit(status: int) -> None: run.add_process(identifier="forever_long", executable="bash", c="sleep 10") time.sleep(2) run.log_alert(alert_id, "critical") + _alert = Alert(identifier=alert_id) + assert _alert.state == "critical" counter = 0 while run._status != "terminated" and counter < 15: time.sleep(1) From 1173cb8b17e18882826b02602ce44c6f8c51c928 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 16 Dec 2024 14:41:33 +0000 Subject: [PATCH 018/163] Fix wrong headers during run update for artifact --- simvue/api/objects/artifact.py | 2 +- simvue/api/objects/run.py | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 2dfd3ea7..1dce7962 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -219,7 +219,7 @@ def _upload( _response = sv_put( url=f"{_run_artifacts_url}", - headers={}, + headers=self._headers, data=_obj_parameters | {"storage": self.storage}, ) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 1b220679..0b732f29 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -242,6 +242,14 @@ def metrics( self._logger.warning(f"Uncommitted metrics found for run '{self.id}'") yield from self._get_attribute("metrics").items() + @property + def events( + self, + ) -> typing.Generator[tuple[str, dict[str, int | float | bool]], None, None]: + if self._staged_metrics: + self._logger.warning(f"Uncommitted metrics found for run '{self.id}'") + yield from self._get_attribute("events").items() + @pydantic.validate_call def log_entries( self, From 36bbd5a8a9fa554a246b1fc81a18572a745ec166 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 16 Dec 2024 22:43:59 +0000 Subject: [PATCH 019/163] Use lock files for python environment, support Poetry and UV --- simvue/metadata.py | 83 ++-- tests/example_data/pyproject.toml | 32 -- tests/example_data/python_poetry/poetry.lock | 376 ++++++++++++++++++ .../example_data/python_poetry/pyproject.toml | 17 + tests/example_data/python_uv/pyproject.toml | 11 + tests/example_data/python_uv/uv.lock | 179 +++++++++ tests/unit/test_metadata.py | 18 +- 7 files changed, 634 insertions(+), 82 deletions(-) delete mode 100644 tests/example_data/pyproject.toml create mode 100644 tests/example_data/python_poetry/poetry.lock create mode 100644 tests/example_data/python_poetry/pyproject.toml create mode 100644 tests/example_data/python_uv/pyproject.toml create mode 100644 tests/example_data/python_uv/uv.lock diff --git a/simvue/metadata.py b/simvue/metadata.py index 958240b8..673baac5 100644 --- a/simvue/metadata.py +++ b/simvue/metadata.py @@ -8,11 +8,9 @@ import contextlib import typing -import re import json import toml import logging -import importlib.metadata import pathlib import flatdict @@ -81,52 +79,45 @@ def git_info(repository: str) -> dict[str, typing.Any]: def _python_env(repository: pathlib.Path) -> dict[str, typing.Any]: - """Retrieve a dictionary of Python dependencies if a file is available""" - meta: dict[str, str] = {} - req_meta: dict[str, str] = {} - - if (reqfile := pathlib.Path(repository).joinpath("requirements.txt")).exists(): - with reqfile.open() as in_req: - requirement_lines = in_req.readlines() - req_meta = {} - - for line in requirement_lines: - dependency, version = line.split("=", 1) - req_meta[dependency] = version - if (pptoml := pathlib.Path(repository).joinpath("pyproject.toml")).exists(): - content = toml.load(pptoml) - - requirements = (project := content.get("project", {})).get("dependencies") - - if requirements: - requirements = [re.split("[=><]", dep, 1)[0] for dep in requirements] - - requirements = requirements or ( - project := content.get("tool", {}).get("poetry", {}) - ).get("dependencies") - - if version := project.get("version"): - meta |= {"python.project.version": version} - - if name := project.get("name"): - meta |= {"python.project.name": name} - - if not requirements: - return meta - - req_meta = {} + """Retrieve a dictionary of Python dependencies if lock file is available""" + python_meta: dict[str, str] = {} + + if (pyproject_file := pathlib.Path(repository).joinpath("pyproject.toml")).exists(): + content = toml.load(pyproject_file) + if poetry_content := content.get("tool", {}).get("poetry"): + python_meta |= { + "python.project.name": poetry_content["name"], + "python.project.version": poetry_content["version"], + } + elif other_content := content.get("project"): + python_meta |= { + "python.project.name": other_content["name"], + "python.project.version": other_content["version"], + } + + if (poetry_lock_file := pathlib.Path(repository).joinpath("poetry.lock")).exists(): + content = toml.load(poetry_lock_file).get("package", {}) + python_meta |= { + f"python.environment.{package['name']}": package["version"] + for package in content + } + elif (uv_lock_file := pathlib.Path(repository).joinpath("uv.lock")).exists(): + content = toml.load(uv_lock_file).get("package", {}) + python_meta |= { + f"python.environment.{package['name']}": package["version"] + for package in content + } + else: + with contextlib.suppress((KeyError, ImportError)): + from pip._internal.operations.freeze import freeze - for package in requirements: - if package == "python": - continue - # Cover case where package is an optional dependency and not installed - with contextlib.suppress(importlib.metadata.PackageNotFoundError): - req_meta[package] = importlib.metadata.version(package) + python_meta |= { + f"python.environment.{entry[0]}": entry[-1] + for line in freeze(local_only=True) + if (entry := line.split("==")) + } - return meta | { - f"python.environment.{dependency}": version - for dependency, version in req_meta.items() - } + return python_meta def _rust_env(repository: pathlib.Path) -> dict[str, typing.Any]: diff --git a/tests/example_data/pyproject.toml b/tests/example_data/pyproject.toml deleted file mode 100644 index 786bba83..00000000 --- a/tests/example_data/pyproject.toml +++ /dev/null @@ -1,32 +0,0 @@ -[project] -name = "spam-eggs" -version = "2020.0.0" -authors = [ - {name = "Pradyun Gedam", email = "pradyun@example.com"}, - {name = "Tzu-Ping Chung", email = "tzu-ping@example.com"}, - {name = "Another person"}, - {email = "different.person@example.com"}, -] -maintainers = [ - {name = "Brett Cannon", email = "brett@example.com"} -] -dependencies = [ - "dill>=0.3.7", - "requests>=2.31.0", - "msgpack>=1.0.7", - "tenacity>=8.2.3,<10.0.0", - "PyJWT>=2.8.0", - "psutil>=5.9.8", - "pydantic>=2.5.3", - "pandas>=2.2.0", - "plotly>=5.18.0", - "numpy>=1.26.3,<3.0.0", - "matplotlib>=3.8.2", - "typing_extensions>=4.11.0", - "toml>=0.10.2", - "click>=8.1.7", - "gitpython>=3.1.43", - "humanfriendly>=10.0", - "tabulate>=0.9.0", - "randomname>=0.2.1", -] diff --git a/tests/example_data/python_poetry/poetry.lock b/tests/example_data/python_poetry/poetry.lock new file mode 100644 index 00000000..28836471 --- /dev/null +++ b/tests/example_data/python_poetry/poetry.lock @@ -0,0 +1,376 @@ +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. + +[[package]] +name = "certifi" +version = "2024.12.14" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "numpy" +version = "2.2.0" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.10" +files = [ + {file = "numpy-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e25507d85da11ff5066269d0bd25d06e0a0f2e908415534f3e603d2a78e4ffa"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a62eb442011776e4036af5c8b1a00b706c5bc02dc15eb5344b0c750428c94219"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:b606b1aaf802e6468c2608c65ff7ece53eae1a6874b3765f69b8ceb20c5fa78e"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:36b2b43146f646642b425dd2027730f99bac962618ec2052932157e213a040e9"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fe8f3583e0607ad4e43a954e35c1748b553bfe9fdac8635c02058023277d1b3"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122fd2fcfafdefc889c64ad99c228d5a1f9692c3a83f56c292618a59aa60ae83"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3f2f5cddeaa4424a0a118924b988746db6ffa8565e5829b1841a8a3bd73eb59a"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fe4bb0695fe986a9e4deec3b6857003b4cfe5c5e4aac0b95f6a658c14635e31"}, + {file = "numpy-2.2.0-cp310-cp310-win32.whl", hash = "sha256:b30042fe92dbd79f1ba7f6898fada10bdaad1847c44f2dff9a16147e00a93661"}, + {file = "numpy-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dc1d6d66f8d37843ed281773c7174f03bf7ad826523f73435deb88ba60d2d4"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9874bc2ff574c40ab7a5cbb7464bf9b045d617e36754a7bc93f933d52bd9ffc6"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0da8495970f6b101ddd0c38ace92edea30e7e12b9a926b57f5fabb1ecc25bb90"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0557eebc699c1c34cccdd8c3778c9294e8196df27d713706895edc6f57d29608"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3579eaeb5e07f3ded59298ce22b65f877a86ba8e9fe701f5576c99bb17c283da"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40deb10198bbaa531509aad0cd2f9fadb26c8b94070831e2208e7df543562b74"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2aed8fcf8abc3020d6a9ccb31dbc9e7d7819c56a348cc88fd44be269b37427e"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a222d764352c773aa5ebde02dd84dba3279c81c6db2e482d62a3fa54e5ece69b"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4e58666988605e251d42c2818c7d3d8991555381be26399303053b58a5bbf30d"}, + {file = "numpy-2.2.0-cp311-cp311-win32.whl", hash = "sha256:4723a50e1523e1de4fccd1b9a6dcea750c2102461e9a02b2ac55ffeae09a4410"}, + {file = "numpy-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:16757cf28621e43e252c560d25b15f18a2f11da94fea344bf26c599b9cf54b73"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cff210198bb4cae3f3c100444c5eaa573a823f05c253e7188e1362a5555235b3"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58b92a5828bd4d9aa0952492b7de803135038de47343b2aa3cc23f3b71a3dc4e"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:ebe5e59545401fbb1b24da76f006ab19734ae71e703cdb4a8b347e84a0cece67"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e2b8cd48a9942ed3f85b95ca4105c45758438c7ed28fff1e4ce3e57c3b589d8e"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57fcc997ffc0bef234b8875a54d4058afa92b0b0c4223fc1f62f24b3b5e86038"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ad7d11b309bd132d74397fcf2920933c9d1dc865487128f5c03d580f2c3d03"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cb24cca1968b21355cc6f3da1a20cd1cebd8a023e3c5b09b432444617949085a"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0798b138c291d792f8ea40fe3768610f3c7dd2574389e37c3f26573757c8f7ef"}, + {file = "numpy-2.2.0-cp312-cp312-win32.whl", hash = "sha256:afe8fb968743d40435c3827632fd36c5fbde633b0423da7692e426529b1759b1"}, + {file = "numpy-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:3a4199f519e57d517ebd48cb76b36c82da0360781c6a0353e64c0cac30ecaad3"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742"}, + {file = "numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e"}, + {file = "numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72"}, + {file = "numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066"}, + {file = "numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e12c6c1ce84628c52d6367863773f7c8c8241be554e8b79686e91a43f1733773"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:b6207dc8fb3c8cb5668e885cef9ec7f70189bec4e276f0ff70d5aa078d32c88e"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a50aeff71d0f97b6450d33940c7181b08be1441c6c193e678211bff11aa725e7"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:df12a1f99b99f569a7c2ae59aa2d31724e8d835fc7f33e14f4792e3071d11221"}, + {file = "numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0"}, +] + +[[package]] +name = "pandas" +version = "2.2.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, +] + +[package.dependencies] +numpy = {version = ">=1.26.0", markers = "python_version >= \"3.12\""} +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2024.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "tzdata" +version = "2024.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.13" +content-hash = "fabc966f49e79f5fc297c4ba1778f211db403b4125ea6224ada97c8664f88ea3" diff --git a/tests/example_data/python_poetry/pyproject.toml b/tests/example_data/python_poetry/pyproject.toml new file mode 100644 index 00000000..6fc8a220 --- /dev/null +++ b/tests/example_data/python_poetry/pyproject.toml @@ -0,0 +1,17 @@ +[tool.poetry] +name = "example-repo" +version = "0.1.0" +description = "" +authors = ["Kristian Zarębski "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.13" +numpy = "^2.2.0" +pandas = "^2.2.3" +requests = "^2.32.3" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/tests/example_data/python_uv/pyproject.toml b/tests/example_data/python_uv/pyproject.toml new file mode 100644 index 00000000..124f9f0c --- /dev/null +++ b/tests/example_data/python_uv/pyproject.toml @@ -0,0 +1,11 @@ +[project] +name = "example-repo" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.13" +dependencies = [ + "numpy>=2.2.0", + "pandas>=2.2.3", + "requests>=2.32.3", +] diff --git a/tests/example_data/python_uv/uv.lock b/tests/example_data/python_uv/uv.lock new file mode 100644 index 00000000..5c947580 --- /dev/null +++ b/tests/example_data/python_uv/uv.lock @@ -0,0 +1,179 @@ +version = 1 +requires-python = ">=3.13" + +[[package]] +name = "certifi" +version = "2024.12.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 }, + { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 }, + { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 }, + { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 }, + { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 }, + { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 }, + { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 }, + { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 }, + { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 }, + { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 }, + { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 }, + { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 }, + { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 }, + { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 }, + { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 }, + { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 }, +] + +[[package]] +name = "example-repo" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "numpy" }, + { name = "pandas" }, + { name = "requests" }, +] + +[package.metadata] +requires-dist = [ + { name = "numpy", specifier = ">=2.2.0" }, + { name = "pandas", specifier = ">=2.2.3" }, + { name = "requests", specifier = ">=2.32.3" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "numpy" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/1b/1d565e0f6e156e1522ab564176b8b29d71e13d8caf003a08768df3d5cec5/numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0", size = 20225497 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/4c/0d1eef206545c994289e7a9de21b642880a11e0ed47a2b0c407c688c4f69/numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367", size = 20895707 }, + { url = "https://files.pythonhosted.org/packages/16/cb/88f6c1e6df83002c421d5f854ccf134aa088aa997af786a5dac3f32ec99b/numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae", size = 14110592 }, + { url = "https://files.pythonhosted.org/packages/b4/54/817e6894168a43f33dca74199ba0dd0f1acd99aa6323ed6d323d63d640a2/numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69", size = 5110858 }, + { url = "https://files.pythonhosted.org/packages/c7/99/00d8a1a8eb70425bba7880257ed73fed08d3e8d05da4202fb6b9a81d5ee4/numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13", size = 6645143 }, + { url = "https://files.pythonhosted.org/packages/34/86/5b9c2b7c56e7a9d9297a0a4be0b8433f498eba52a8f5892d9132b0f64627/numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671", size = 14042812 }, + { url = "https://files.pythonhosted.org/packages/df/54/13535f74391dbe5f479ceed96f1403267be302c840040700d4fd66688089/numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571", size = 16093419 }, + { url = "https://files.pythonhosted.org/packages/dd/37/dfb2056842ac61315f225aa56f455da369f5223e4c5a38b91d20da1b628b/numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d", size = 15238969 }, + { url = "https://files.pythonhosted.org/packages/5a/3d/d20d24ee313992f0b7e7b9d9eef642d9b545d39d5b91c4a2cc8c98776328/numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742", size = 17855705 }, + { url = "https://files.pythonhosted.org/packages/5b/40/944c9ee264f875a2db6f79380944fd2b5bb9d712bb4a134d11f45ad5b693/numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e", size = 6270078 }, + { url = "https://files.pythonhosted.org/packages/30/04/e1ee6f8b22034302d4c5c24e15782bdedf76d90b90f3874ed0b48525def0/numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2", size = 12605791 }, + { url = "https://files.pythonhosted.org/packages/ef/fb/51d458625cd6134d60ac15180ae50995d7d21b0f2f92a6286ae7b0792d19/numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95", size = 20920160 }, + { url = "https://files.pythonhosted.org/packages/b4/34/162ae0c5d2536ea4be98c813b5161c980f0443cd5765fde16ddfe3450140/numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c", size = 14119064 }, + { url = "https://files.pythonhosted.org/packages/17/6c/4195dd0e1c41c55f466d516e17e9e28510f32af76d23061ea3da67438e3c/numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca", size = 5152778 }, + { url = "https://files.pythonhosted.org/packages/2f/47/ea804ae525832c8d05ed85b560dfd242d34e4bb0962bc269ccaa720fb934/numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d", size = 6667605 }, + { url = "https://files.pythonhosted.org/packages/76/99/34d20e50b3d894bb16b5374bfbee399ab8ff3a33bf1e1f0b8acfe7bbd70d/numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529", size = 14013275 }, + { url = "https://files.pythonhosted.org/packages/69/8f/a1df7bd02d434ab82539517d1b98028985700cfc4300bc5496fb140ca648/numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3", size = 16074900 }, + { url = "https://files.pythonhosted.org/packages/04/94/b419e7a76bf21a00fcb03c613583f10e389fdc8dfe420412ff5710c8ad3d/numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab", size = 15219122 }, + { url = "https://files.pythonhosted.org/packages/65/d9/dddf398b2b6c5d750892a207a469c2854a8db0f033edaf72103af8cf05aa/numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72", size = 17851668 }, + { url = "https://files.pythonhosted.org/packages/d4/dc/09a4e5819a9782a213c0eb4eecacdc1cd75ad8dac99279b04cfccb7eeb0a/numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066", size = 6325288 }, + { url = "https://files.pythonhosted.org/packages/ce/e1/e0d06ec34036c92b43aef206efe99a5f5f04e12c776eab82a36e00c40afc/numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881", size = 12692303 }, +] + +[[package]] +name = "pandas" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/22/3b8f4e0ed70644e85cfdcd57454686b9057c6c38d2f74fe4b8bc2527214a/pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015", size = 12477643 }, + { url = "https://files.pythonhosted.org/packages/e4/93/b3f5d1838500e22c8d793625da672f3eec046b1a99257666c94446969282/pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28", size = 11281573 }, + { url = "https://files.pythonhosted.org/packages/f5/94/6c79b07f0e5aab1dcfa35a75f4817f5c4f677931d4234afcd75f0e6a66ca/pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0", size = 15196085 }, + { url = "https://files.pythonhosted.org/packages/e8/31/aa8da88ca0eadbabd0a639788a6da13bb2ff6edbbb9f29aa786450a30a91/pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24", size = 12711809 }, + { url = "https://files.pythonhosted.org/packages/ee/7c/c6dbdb0cb2a4344cacfb8de1c5808ca885b2e4dcfde8008266608f9372af/pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659", size = 16356316 }, + { url = "https://files.pythonhosted.org/packages/57/b7/8b757e7d92023b832869fa8881a992696a0bfe2e26f72c9ae9f255988d42/pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb", size = 14022055 }, + { url = "https://files.pythonhosted.org/packages/3b/bc/4b18e2b8c002572c5a441a64826252ce5da2aa738855747247a971988043/pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d", size = 11481175 }, + { url = "https://files.pythonhosted.org/packages/76/a3/a5d88146815e972d40d19247b2c162e88213ef51c7c25993942c39dbf41d/pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468", size = 12615650 }, + { url = "https://files.pythonhosted.org/packages/9c/8c/f0fd18f6140ddafc0c24122c8a964e48294acc579d47def376fef12bcb4a/pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18", size = 11290177 }, + { url = "https://files.pythonhosted.org/packages/ed/f9/e995754eab9c0f14c6777401f7eece0943840b7a9fc932221c19d1abee9f/pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2", size = 14651526 }, + { url = "https://files.pythonhosted.org/packages/25/b0/98d6ae2e1abac4f35230aa756005e8654649d305df9a28b16b9ae4353bff/pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4", size = 11871013 }, + { url = "https://files.pythonhosted.org/packages/cc/57/0f72a10f9db6a4628744c8e8f0df4e6e21de01212c7c981d31e50ffc8328/pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d", size = 15711620 }, + { url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436 }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "pytz" +version = "2024.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, +] + +[[package]] +name = "tzdata" +version = "2024.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/34/943888654477a574a86a98e9896bae89c7aa15078ec29f490fef2f1e5384/tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc", size = 193282 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/ab/7e5f53c3b9d14972843a647d8d7a853969a58aecc7559cb3267302c94774/tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd", size = 346586 }, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 }, +] diff --git a/tests/unit/test_metadata.py b/tests/unit/test_metadata.py index 5a1070ff..6b2ea5f4 100644 --- a/tests/unit/test_metadata.py +++ b/tests/unit/test_metadata.py @@ -11,10 +11,20 @@ def test_cargo_env() -> None: assert metadata["rust.project.name"] == "example_project" @pytest.mark.metadata -def test_python_env() -> None: - metadata = sv_meta._python_env(pathlib.Path(__file__).parents[1].joinpath("example_data")) - assert re.findall(r"\d+\.\d+\.\d+", metadata["python.environment.click"]) - assert metadata["python.project.name"] == "spam-eggs" +@pytest.mark.parametrize( + "backend", ("poetry", "uv", None) +) +def test_python_env(backend: str | None) -> None: + if backend == "poetry": + metadata = sv_meta._python_env(pathlib.Path(__file__).parents[1].joinpath("example_data", "python_poetry")) + assert metadata["python.project.name"] == "example-repo" + elif backend == "uv": + metadata = sv_meta._python_env(pathlib.Path(__file__).parents[1].joinpath("example_data", "python_uv")) + assert metadata["python.project.name"] == "example-repo" + else: + metadata = sv_meta._python_env(pathlib.Path(__file__).parents[1].joinpath("example_data")) + + assert re.findall(r"\d+\.\d+\.\d+", metadata["python.environment.numpy"]) @pytest.mark.metadata From c36e39602381e65223c489844afd3e922fbdb524 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 17 Dec 2024 16:04:20 +0000 Subject: [PATCH 020/163] Fix more tests --- simvue/api/objects/alert/fetch.py | 7 +- simvue/api/objects/artifact.py | 99 +++++++++----------- simvue/api/objects/base.py | 12 +-- simvue/api/objects/run.py | 4 +- simvue/client.py | 23 +++-- simvue/config/parameters.py | 2 +- simvue/run.py | 9 +- simvue/serialization.py | 10 +- simvue/utilities.py | 14 ++- tests/conftest.py | 2 + tests/functional/test_client.py | 21 +++-- tests/functional/test_config.py | 11 +-- tests/functional/test_dispatch.py | 6 +- tests/functional/test_run_execute_process.py | 59 ++++++------ tests/functional/test_scenarios.py | 2 +- tests/unit/test_conversion_to_dataframe.py | 4 +- 16 files changed, 145 insertions(+), 140 deletions(-) diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index 45fe989d..9df929f8 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -9,6 +9,8 @@ import typing import http +import pydantic + from simvue.api.objects.alert.user import UserAlert from simvue.api.request import get_json_from_response from simvue.api.request import get as sv_get @@ -22,8 +24,11 @@ class Alert: """Generic Simvue alert retrieval class""" - def __new__(cls, identifier: str | None = None, **kwargs) -> AlertType: + @pydantic.validate_call() + def __new__(cls, identifier: str, **kwargs) -> AlertType: """Retrieve an object representing an alert either locally or on the server by id""" + if identifier.startswith("offline_"): + raise ValueError("Cannot retrieve offline run from server") _alert_pre = AlertBase(identifier=identifier, **kwargs) if _alert_pre.source == "events": return EventsAlert(identifier=identifier, **kwargs) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 1dce7962..b71b42d8 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -19,7 +19,7 @@ from simvue.utilities import get_mimetype_for_file, get_mimetypes, calculate_sha256 from simvue.api.objects.base import SimvueObject from simvue.serialization import serialize_object -from simvue.api.request import put as sv_put, get_json_from_response, get as sv_get +from simvue.api.request import put as sv_put, get_json_from_response, post as sv_post Category = typing.Literal["code", "input", "output"] @@ -104,7 +104,7 @@ def new_file( _artifact.offline_mode(offline) with open(file_path, "rb") as out_f: - _artifact._upload(artifact_data=out_f, run_id=run, **_upload_data) + _artifact._upload(artifact_data={"file": out_f}, run_id=run, **_upload_data) return _artifact @@ -172,16 +172,6 @@ def new_object( _artifact._upload(artifact_data=_serialized, run_id=run, **_upload_data) return _artifact - def _post(self, **kwargs) -> dict[str, typing.Any]: - # The ID is the checksum, set this only if the post - # to server was successful (else offline_ prefix kept) - _identifier = self._staging["checksum"] - _response = super()._post(**kwargs) - self._storage = _response.get("storage_id") - self._storage_url = _response.get("url") - self._identifier = _identifier - return _response - def commit(self) -> None: raise TypeError("Cannot call method 'commit' on write-once type 'Artifact'") @@ -194,10 +184,11 @@ def _upload( # NOTE: Assumes URL for Run artifacts is always same _run_artifacts_url: URL = ( - URL(self._user_config.server.url) / f"runs/{run_id}/artifacts" + URL(self._user_config.server.url) + / f"runs/{run_id}/artifacts/{self._identifier}" ) - _response = sv_put( + _response = sv_post( url=f"{self._storage_url}", headers={}, data=artifact_data, @@ -211,7 +202,7 @@ def _upload( ) get_json_from_response( - expected_status=[http.HTTPStatus.OK], + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NO_CONTENT], allow_parse_failure=True, # JSON response from S3 not parsible scenario=f"uploading artifact '{_obj_parameters['name']}' to object storage", response=_response, @@ -232,46 +223,44 @@ def _upload( def _get(self, storage: str | None = None, **kwargs) -> dict[str, typing.Any]: return super()._get(storage=self._storage, **kwargs) - @classmethod - def _get_all_objects( - cls, count: int | None, offset: int | None, **kwargs - ) -> list[dict[str, typing.Any]]: - _class_instance = cls(read_only=True) - _url = f"{_class_instance._base_url}" - - _response = sv_get( - _url, - headers=_class_instance._headers, - params={"start": offset, "count": count} | kwargs, - ) - - _json_response = get_json_from_response( - response=_response, - expected_status=[http.HTTPStatus.OK], - scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", - expected_type=list, - ) - - return _json_response - - @classmethod - def get( - cls, *, count: int | None = None, offset: int | None = None, **kwargs - ) -> typing.Generator[tuple[str, "SimvueObject"], None, None]: - _class_instance = cls(read_only=True) - if (_data := cls._get_all_objects(count, offset, **kwargs)) is None: - raise RuntimeError( - f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" - ) - - for _entry in _data: - _id = _entry.pop("id") - yield _id, cls(read_only=True, identifier=_id, **_entry) - - @property - def name(self) -> str: - """Retrieve the name for this artifact""" - return self._get_attribute("name") + # @classmethod + # def _get_all_objects( + # cls, count: int | None, offset: int | None, **kwargs + # ) -> list[dict[str, typing.Any]]: + # _class_instance = cls(read_only=True) + # _url = f"{_class_instance._base_url}" + + # _response = sv_get( + # _url, + # headers=_class_instance._headers, + # params={"start": offset, "count": count} | kwargs, + # ) + + # return get_json_from_response( + # response=_response, + # expected_status=[http.HTTPStatus.OK], + # scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", + # expected_type=list, + # ) + + # @classmethod + # def get( + # cls, *, count: int | None = None, offset: int | None = None, **kwargs + # ) -> typing.Generator[tuple[str, "SimvueObject"], None, None]: + # _class_instance = cls(read_only=True) + # if (_data := cls._get_all_objects(count, offset, **kwargs)) is None: + # raise RuntimeError( + # f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" + # ) + # import pdb;pdb.set_trace() + # for _entry in _data: + # _id = _entry.pop("id") + # yield _id, cls(read_only=True, identifier=_id, **_entry) + + # @property + # def name(self) -> str: + # """Retrieve the name for this artifact""" + # return self._get_attribute("name") @property def checksum(self) -> str: diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 082d072c..aba440fc 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -146,7 +146,7 @@ def __init__( ) self._headers: dict[str, str] = { - "Authorization": f"Bearer {self._user_config.server.token}", + "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}", "User-Agent": f"Simvue Python client {__version__}", } @@ -307,21 +307,19 @@ def _get_all_objects( headers=_class_instance._headers, params={"start": offset, "count": count} | kwargs, ) - _json_response = get_json_from_response( + return get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", ) - return _json_response - def read_only(self, is_read_only: bool) -> None: self._read_only = is_read_only # If using writable mode, clear the staging dictionary as # in this context it contains existing data retrieved - # from the server/local entry which we dont want token - # repush unnecessarily then read any locally staged changes + # from the server/local entry which we dont want to + # re-push unnecessarily, then read any locally staged changes if not self._read_only: self._staging = self._get_local_staged() @@ -379,7 +377,7 @@ def _post(self, **kwargs) -> dict[str, typing.Any]: _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], - scenario=f"Creation of {self._label} '{kwargs}'", + scenario=f"Creation of {self._label}", ) if _id := _json_response.get("id"): diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 0b732f29..6439e560 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -192,7 +192,9 @@ def get_alert_details(self) -> typing.Generator[dict[str, typing.Any], None, Non @write_only @pydantic.validate_call def alerts(self, alerts: list[str]) -> None: - self._staging["alerts"] = alerts + self._staging["alerts"] = [ + alert for alert in alerts if alert not in self._staging.get("alerts", []) + ] @property @staging_check diff --git a/simvue/client.py b/simvue/client.py index 4a6a2bb7..32853936 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -20,6 +20,7 @@ import requests from simvue.api.objects.alert.base import AlertBase +from simvue.exception import ObjectNotFoundError from .converters import ( aggregated_metrics_to_dataframe, @@ -57,7 +58,7 @@ class Client: def __init__( self, *, - server_token: typing.Optional[str] = None, + server_token: typing.Optional[pydantic.SecretStr] = None, server_url: typing.Optional[str] = None, ) -> None: """Initialise an instance of the Simvue client @@ -81,7 +82,7 @@ def __init__( logger.warning(f"No {label} specified") self._headers: dict[str, str] = { - "Authorization": f"Bearer {self._user_config.server.token}" + "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}" } @prettify_pydantic @@ -424,7 +425,7 @@ def delete_alert(self, alert_id: str) -> None: @prettify_pydantic @pydantic.validate_call - def list_artifacts(self, run_id: str) -> dict[str, Artifact]: + def list_artifacts(self, run_id: str) -> typing.Generator[Artifact, None, None]: """Retrieve artifacts for a given run Parameters @@ -432,17 +433,17 @@ def list_artifacts(self, run_id: str) -> dict[str, Artifact]: run_id : str unique identifier for the run - Returns - ------- - dict[str, Artifact] - list of relevant artifacts + Yields + ------ + str, Artifact + ID and artifact entry for relevant artifacts Raises ------ RuntimeError if retrieval of artifacts failed when communicating with the server """ - return dict(Artifact.get(runs=json.dumps([run_id]))) # type: ignore + return Artifact.get(runs=json.dumps([run_id])) # type: ignore def _retrieve_artifacts_from_server( self, run_id: str, name: str, count: int | None = None @@ -1019,7 +1020,9 @@ def get_alerts( return [alert.name if names_only else alert for _, alert in Alert.get()] # type: ignore return [ - alert.get("name") if names_only else Alert(**alert) + alert.get("name") + if names_only + else Alert(identifier=alert.get("id"), **alert) for alert in Run(identifier=run_id).get_alert_details() if not critical_only or alert["status"].get("current") == "critical" ] # type: ignore @@ -1093,5 +1096,5 @@ def get_tag(self, tag_id: str) -> Tag | None: """ try: return Tag(identifier=tag_id) - except ValueError: + except ObjectNotFoundError: return None diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index 4e1d310b..cfb90bf4 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -39,7 +39,7 @@ def check_token(cls, v: typing.Any) -> str: raise AssertionError("Failed to parse Simvue token - invalid token form") if time.time() - expiry > 0: raise AssertionError("Simvue token has expired") - return value + return v class OfflineSpecifications(pydantic.BaseModel): diff --git a/simvue/run.py b/simvue/run.py index 28ef82a5..ac60fdbb 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -108,7 +108,7 @@ def __init__( self, mode: typing.Literal["online", "offline", "disabled"] = "online", abort_callback: typing.Optional[typing.Callable[[Self], None]] = None, - server_token: typing.Optional[str] = None, + server_token: typing.Optional[pydantic.SecretStr] = None, server_url: typing.Optional[str] = None, debug: bool = False, ) -> None: @@ -180,7 +180,7 @@ def __init__( else self._user_config.metrics.resources_metrics_interval ) self._headers: dict[str, str] = { - "Authorization": f"Bearer {self._user_config.server.token}" + "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}" } self._sv_obj: typing.Optional[RunObject] = None self._pid: typing.Optional[int] = 0 @@ -1621,7 +1621,8 @@ def add_alerts( self._error("Need to provide alert ids or alert names") return False - self._sv_obj.alerts = self._sv_obj.alerts + [ids] + # Avoid duplication + self._sv_obj.alerts = list(set(self._sv_obj.alerts + [ids])) self._sv_obj.commit() return False @@ -1809,7 +1810,7 @@ def create_alert( _alert.commit() _alert_id = _alert.id - self._sv_obj.alerts = list(self._sv_obj.alerts) + [_alert_id] + self._sv_obj.alerts = [_alert_id] self._sv_obj.commit() diff --git a/simvue/serialization.py b/simvue/serialization.py index 7fcb7a22..5433cba8 100644 --- a/simvue/serialization.py +++ b/simvue/serialization.py @@ -46,9 +46,7 @@ def _is_torch_tensor(data: typing.Any) -> bool: return False -def serialize_object( - data: typing.Any, allow_pickle: bool -) -> typing.Optional[tuple[str, str]]: +def serialize_object(data: typing.Any, allow_pickle: bool) -> tuple[str, str] | None: """Determine which serializer to use for the given object Parameters @@ -85,13 +83,11 @@ def serialize_object( elif serialized := _serialize_json(data): return serialized - if allow_pickle: - return _serialize_pickle(data) - return None + return _serialize_pickle(data) if allow_pickle else None @check_extra("plot") -def _serialize_plotly_figure(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_plotly_figure(data: typing.Any) -> tuple[str, str]: try: import plotly except ImportError: diff --git a/simvue/utilities.py b/simvue/utilities.py index 56455e8b..adc1438c 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -167,7 +167,19 @@ def parse_pydantic_error(error: pydantic.ValidationError) -> str: ) _type: str = data["type"] - if (_input_type := type(_input)) != _type and "error" not in _type: + _skip_type_compare_for = ( + "error", + "missing", + "unexpected", + "union_tag", + "parsing", + "scheme", + "syntax", + ) + + if (_input_type := type(_input)) != _type and all( + e not in _type for e in _skip_type_compare_for + ): _type = f"{_input_type.__name__} != {_type}" out_table.append( diff --git a/tests/conftest.py b/tests/conftest.py index d239d3a5..153ef9ac 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -160,6 +160,8 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur if create_objects: TEST_DATA["metrics"] = ("metric_counter", "metric_val") + + assert not run._id.startswith("offline_") TEST_DATA["run_id"] = run._id TEST_DATA["run_name"] = run._name TEST_DATA["url"] = run._user_config.server.url diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index 7235f36a..5162cc3a 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -9,6 +9,7 @@ import time import tempfile import simvue.client as svc +from simvue.exception import ObjectNotFoundError import simvue.run as sv_run import simvue.api.objects as sv_api_obj @@ -33,7 +34,7 @@ def test_get_alerts(create_test_run: tuple[sv_run.Run, dict], from_run: bool) -> triggered_alerts_full = client.get_alerts(run_id=create_test_run[1]["run_id"], critical_only=False, names_only=False) assert len(triggered_alerts_full) == 7 for alert in triggered_alerts_full: - if alert["alert"].get("name") == "value_above_1": + if alert.name == "value_above_1": assert alert["alert"]["status"]["current"] == "critical" else: assert (triggered_alerts_full := client.get_alerts(names_only=True, critical_only=False)) @@ -70,12 +71,12 @@ def test_get_metric_values( _first_entry: dict = next(iter(_metrics_dict.values())) assert create_test_run[1]["metrics"][0] in _metrics_dict.keys() if aggregate: - _value_types = set(i[1] for i in _first_entry.keys()) + _value_types = {i[1] for i in _first_entry} assert all( i in _value_types for i in ("average", "min", "max") ), f"Expected ('average', 'min', 'max') in {_value_types}" else: - _runs = set(i[1] for i in _first_entry.keys()) + _runs = {i[1] for i in _first_entry} assert create_test_run[1]["run_id"] in _runs @@ -97,9 +98,9 @@ def test_plot_metrics(create_test_run: tuple[sv_run.Run, dict]) -> None: @pytest.mark.dependency @pytest.mark.client -def test_get_artifacts(create_test_run: tuple[sv_run.Run, dict]) -> None: +def test_get_artifacts_entries(create_test_run: tuple[sv_run.Run, dict]) -> None: client = svc.Client() - assert client.list_artifacts(create_test_run[1]["run_id"]) + assert dict(client.list_artifacts(create_test_run[1]["run_id"])) assert client.get_artifact(create_test_run[1]["run_id"], name="test_attributes") @@ -251,13 +252,13 @@ def test_folder_deletion(create_test_run: tuple[sv_run.Run, dict]) -> None: def test_run_folder_metadata_find(create_plain_run: tuple[sv_run.Run, dict]) -> None: run, run_data = create_plain_run rand_val = random.randint(0, 1000) - run.set_folder_details(path=run_data["folder"], metadata={'atest': rand_val}) + run.set_folder_details(metadata={'atest': rand_val}) run.close() time.sleep(1.0) client = svc.Client() data = client.get_folders(filters=[f'metadata.atest == {rand_val}']) - assert run_data["folder"] in [i["path"] for i in data] + assert run_data["folder"] in [i.path for _, i in data] @pytest.mark.client @@ -271,7 +272,7 @@ def test_tag_deletion(create_plain_run: tuple[sv_run.Run, dict]) -> None: tags = client.get_tags() client.delete_run(run.id) time.sleep(1.0) - tag_identifier = [tag["id"] for tag in tags if tag["name"] == f"delete_me_{unique_id}"][0] + tag_identifier = [identifier for identifier, tag in tags if tag.name == f"delete_me_{unique_id}"][0] client.delete_tag(tag_identifier) time.sleep(1.0) assert not client.get_tag(tag_identifier) @@ -323,7 +324,7 @@ def test_alert_deletion() -> None: time.sleep(1) _client.delete_alert(alert_id=_alert.id) - with pytest.raises(RuntimeError) as e: + with pytest.raises(ObjectNotFoundError) as e: sv_api_obj.Alert(identifier=_alert.id) @@ -341,6 +342,6 @@ def test_abort_run() -> None: time.sleep(1) assert _run.abort_trigger _run.delete() - _folder.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) diff --git a/tests/functional/test_config.py b/tests/functional/test_config.py index 07a0c803..29534e5e 100644 --- a/tests/functional/test_config.py +++ b/tests/functional/test_config.py @@ -92,10 +92,7 @@ def test_config_setup( def _mocked_find(file_names: list[str], *_, ppt_file=_ppt_file, conf_file=_config_file, **__) -> str: - if "pyproject.toml" in file_names: - return ppt_file - else: - return conf_file + return ppt_file if "pyproject.toml" in file_names else conf_file mocker.patch("simvue.config.user.sv_util.find_first_instance_of_file", _mocked_find) @@ -118,13 +115,13 @@ def _mocked_find(file_names: list[str], *_, ppt_file=_ppt_file, conf_file=_confi if use_env: assert _config.server.url == f"{_other_url}api" - assert _config.server.token == _other_token + assert _config.server.token.get_secret_value() == _other_token elif use_args: assert _config.server.url == f"{_arg_url}api" - assert _config.server.token == _arg_token + assert _config.server.token.get_secret_value() == _arg_token elif use_file and use_file != "pyproject.toml": assert _config.server.url == f"{_url}api" - assert _config.server.token == _token + assert _config.server.token.get_secret_value() == _token assert f"{_config.offline.cache}" == temp_d if use_file == "extended": diff --git a/tests/functional/test_dispatch.py b/tests/functional/test_dispatch.py index 2ac15cbb..02e0d99f 100644 --- a/tests/functional/test_dispatch.py +++ b/tests/functional/test_dispatch.py @@ -24,9 +24,9 @@ @pytest.mark.parametrize("multiple", (True, False), ids=("multiple", "single")) def test_queued_dispatcher(overload_buffer: bool, multiple: bool, append_during_dispatch: bool) -> None: buffer_size: int = 10 - n_elements: int = buffer_size - 1 if not overload_buffer else 2 * buffer_size + n_elements: int = 2 * buffer_size if overload_buffer else buffer_size - 1 max_read_rate: float = 0.2 - time_threshold: float = 1 if not overload_buffer else 1 + (1 / max_read_rate) + time_threshold: float = 1 + (1 / max_read_rate) if overload_buffer else 1 start_time = time.time() @@ -80,7 +80,7 @@ def test_nested_queued_dispatch(multi_queue: bool) -> None: buffer_size: int = 10 n_elements: int = 2 * buffer_size max_read_rate: float = 0.2 - variable: str | list[str] = "demo" if not multi_queue else ["events", "metrics"] + variable: str | list[str] = ["events", "metrics"] if multi_queue else "demo" result_queue = Queue() diff --git a/tests/functional/test_run_execute_process.py b/tests/functional/test_run_execute_process.py index 5cf0386e..7bd35093 100644 --- a/tests/functional/test_run_execute_process.py +++ b/tests/functional/test_run_execute_process.py @@ -49,34 +49,33 @@ def test_processes_cwd(create_plain_run: dict[Run, dict]) -> None: """ run, _ = create_plain_run with tempfile.TemporaryDirectory() as temp_dir: - with tempfile.NamedTemporaryFile(dir=temp_dir, suffix=".py") as temp_file: - with open(temp_file.name, "w") as out_f: - out_f.writelines([ - "import os\n", - "f = open('new_file.txt', 'w')\n", - "f.write('Test Line')\n", - "f.close()" - ]) - - run_id = run.id - run.add_process( - identifier="sleep_10_process", - executable="python", - script=temp_file.name, - cwd=temp_dir - ) - time.sleep(1) - run.save_file(os.path.join(temp_dir, "new_file.txt"), 'output') - - client = Client() - - # Check that the script was uploaded to the run correctly - os.makedirs(os.path.join(temp_dir, "downloaded")) - client.get_artifact_as_file(run_id, os.path.basename(temp_file.name), output_dir=os.path.join(temp_dir, "downloaded")) - assert filecmp.cmp(os.path.join(temp_dir, "downloaded", os.path.basename(temp_file.name)), temp_file.name) - - client.get_artifact_as_file(run_id, "new_file.txt", path=os.path.join(temp_dir, "downloaded")) - new_file = open(os.path.join(temp_dir, "downloaded", "new_file.txt"), "r") - assert new_file.read() == "Test Line" - new_file.close() + with tempfile.NamedTemporaryFile(dir=temp_dir, suffix=".py") as temp_file: + with open(temp_file.name, "w") as out_f: + out_f.writelines([ + "import os\n", + "f = open('new_file.txt', 'w')\n", + "f.write('Test Line')\n", + "f.close()" + ]) + + run_id = run.id + run.add_process( + identifier="sleep_10_process", + executable="python", + script=temp_file.name, + cwd=temp_dir + ) + time.sleep(1) + run.save_file(os.path.join(temp_dir, "new_file.txt"), 'output') + + client = Client() + + # Check that the script was uploaded to the run correctly + os.makedirs(os.path.join(temp_dir, "downloaded")) + client.get_artifact_as_file(run_id, os.path.basename(temp_file.name), output_dir=os.path.join(temp_dir, "downloaded")) + assert filecmp.cmp(os.path.join(temp_dir, "downloaded", os.path.basename(temp_file.name)), temp_file.name) + + client.get_artifact_as_file(run_id, "new_file.txt", path=os.path.join(temp_dir, "downloaded")) + with open(os.path.join(temp_dir, "downloaded", "new_file.txt"), "r") as new_file: + assert new_file.read() == "Test Line" diff --git a/tests/functional/test_scenarios.py b/tests/functional/test_scenarios.py index c3d266c0..84471376 100644 --- a/tests/functional/test_scenarios.py +++ b/tests/functional/test_scenarios.py @@ -59,7 +59,7 @@ def upload(name: str, values_per_run: int, shared_dict) -> None: def test_uploaded_data_immediately_accessible( values_per_run: int, processing: str, run_deleter ) -> None: - name = "Test-" + str(random.randint(0, 1000000000)) + name = f"Test-{str(random.randint(0, 1000000000))}" manager = Manager() shared_dict = manager.dict() diff --git a/tests/unit/test_conversion_to_dataframe.py b/tests/unit/test_conversion_to_dataframe.py index 205a4ea9..a2cfc82b 100644 --- a/tests/unit/test_conversion_to_dataframe.py +++ b/tests/unit/test_conversion_to_dataframe.py @@ -33,13 +33,13 @@ def test_run_conversion_to_dataframe(): 'metadata.b2']) data = runs_df.to_dict('records') - for i in range(0, len(runs)): + for i in range(len(runs)): assert(runs[i]['name'] == data[i]['name']) assert(runs[i]['folder'] == data[i]['folder']) assert(runs[i]['created'] == data[i]['created']) assert(runs[i]['started'] == data[i]['started']) assert(runs[i]['ended'] == data[i]['ended']) for item in runs[i]['metadata']: - index = 'metadata.%s' % item + index = f'metadata.{item}' assert(index in data[i]) assert(runs[i]['metadata'][item] == data[i][index]) From 57db61d12108ba1098a25c947c5c66bb5328cf8f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 18 Dec 2024 08:48:30 +0000 Subject: [PATCH 021/163] Modified alert status setting --- simvue/api/objects/alert/base.py | 21 ++++++++++----- simvue/api/objects/alert/user.py | 23 ++++++++++------- simvue/api/objects/artifact.py | 44 ++++---------------------------- simvue/run.py | 8 +++++- tests/unit/test_event_alert.py | 1 + tests/unit/test_user_alert.py | 24 ++++++++++++++++- 6 files changed, 65 insertions(+), 56 deletions(-) diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 2751e7cf..5bd245dc 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -6,9 +6,11 @@ """ +import http import pydantic import typing from simvue.api.objects.base import SimvueObject, staging_check +from simvue.api.request import get as sv_get, get_json_from_response from simvue.models import NAME_REGEX @@ -116,14 +118,21 @@ def abort(self, abort: bool) -> None: """Configure alert to trigger aborts""" self._staging["abort"] = abort - @property - @staging_check - def state(self) -> typing.Literal["ok", "critical", "no_data"]: - return self._get_attribute("state") + def get_status(self, run_id: str) -> typing.Literal["ok", "critical", "no_data"]: + _response = sv_get( + url=self.url / "status", headers=self._headers, params={"run": run_id} + ) + + _json_response = get_json_from_response( + expected_status=[http.HTTPStatus.OK], + response=_response, + scenario=f"Retrieving status for alert '{self._identifier}'", + ) + + return _json_response["status"] - @state.setter @pydantic.validate_call - def state(self, state: typing.Literal["ok", "critical"]) -> None: + def set_status(self, run_id: str, status: typing.Literal["ok", "critical"]) -> None: raise AttributeError( f"Cannot update state for alert of type '{self.__class__.__name__}'" ) diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index 7f60779f..74ce4052 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -8,8 +8,9 @@ import pydantic import typing +import http -from simvue.api.objects.base import staging_check +from simvue.api.request import get_json_from_response, put as sv_put from .base import AlertBase from simvue.models import NAME_REGEX @@ -59,13 +60,17 @@ def get( ) -> dict[str, typing.Any]: raise NotImplementedError("Retrieve of only user alerts is not yet supported") - @property - @staging_check - def state(self) -> typing.Literal["ok", "critical", "no_data"]: - return self._get_attribute("state") - - @state.setter @pydantic.validate_call - def state(self, state: typing.Literal["ok", "critical"]) -> None: + def set_status(self, run_id: str, status: typing.Literal["ok", "critical"]) -> None: """Set alert name""" - self._staging["state"] = state + _response = sv_put( + url=self.url / "status" / run_id, + data={"status": status}, + headers=self._headers, + ) + + get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Updating state of alert '{self._identifier}' to '{status}'", + ) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index b71b42d8..f45aaf2b 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -223,45 +223,6 @@ def _upload( def _get(self, storage: str | None = None, **kwargs) -> dict[str, typing.Any]: return super()._get(storage=self._storage, **kwargs) - # @classmethod - # def _get_all_objects( - # cls, count: int | None, offset: int | None, **kwargs - # ) -> list[dict[str, typing.Any]]: - # _class_instance = cls(read_only=True) - # _url = f"{_class_instance._base_url}" - - # _response = sv_get( - # _url, - # headers=_class_instance._headers, - # params={"start": offset, "count": count} | kwargs, - # ) - - # return get_json_from_response( - # response=_response, - # expected_status=[http.HTTPStatus.OK], - # scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", - # expected_type=list, - # ) - - # @classmethod - # def get( - # cls, *, count: int | None = None, offset: int | None = None, **kwargs - # ) -> typing.Generator[tuple[str, "SimvueObject"], None, None]: - # _class_instance = cls(read_only=True) - # if (_data := cls._get_all_objects(count, offset, **kwargs)) is None: - # raise RuntimeError( - # f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" - # ) - # import pdb;pdb.set_trace() - # for _entry in _data: - # _id = _entry.pop("id") - # yield _id, cls(read_only=True, identifier=_id, **_entry) - - # @property - # def name(self) -> str: - # """Retrieve the name for this artifact""" - # return self._get_attribute("name") - @property def checksum(self) -> str: """Retrieve the checksum for this artifact""" @@ -292,6 +253,11 @@ def storage_url(self) -> str | None: """Retrieve storage URL for the artifact""" return self._storage_url + @property + def name(self) -> str | None: + """Retrieve name for the artifact""" + return self._get_attribute("name") + def download_content(self) -> typing.Any: """Download content of artifact from storage""" _response = requests.get(f"{self.storage_url}", timeout=DOWNLOAD_TIMEOUT) diff --git a/simvue/run.py b/simvue/run.py index ac60fdbb..3a53497a 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1841,8 +1841,14 @@ def log_alert( return False _alert = Alert(identifier=identifier) + if not isinstance(_alert, UserAlert): + self._error( + f"Cannot update state for alert '{identifier}' " + f"of type '{_alert.__class__.__name__.lower()}'" + ) + return False _alert.read_only(False) - _alert.state = state + _alert.set_state(run_id=self._id, state=state) _alert.commit() return True diff --git a/tests/unit/test_event_alert.py b/tests/unit/test_event_alert.py index 908c86be..3934dbe3 100644 --- a/tests/unit/test_event_alert.py +++ b/tests/unit/test_event_alert.py @@ -60,6 +60,7 @@ def test_event_alert_modification_online() -> None: _alert.commit() time.sleep(1) _new_alert = Alert(_alert.id) + _new_alert.read_only(False) assert isinstance(_new_alert, EventsAlert) _new_alert.description = "updated!" assert _new_alert.description != "updated!" diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py index b28bdcb6..26fb4c97 100644 --- a/tests/unit/test_user_alert.py +++ b/tests/unit/test_user_alert.py @@ -4,7 +4,8 @@ import pytest import uuid -from simvue.api.objects import Alert, UserAlert +from simvue.api.objects import Alert, UserAlert, Run +from simvue.api.objects.folder import Folder @pytest.mark.api def test_user_alert_creation_online() -> None: @@ -102,3 +103,24 @@ def test_user_alert_properties() -> None: if _failed: raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) + +@pytest.mark.api +def test_user_alert_status() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = UserAlert.new( + name=f"users_alert_{_uuid}", + notification="none" + ) + _alert.commit() + _folder = Folder.new(path=f"/simvue_unit_tests/{_uuid}") + _run = Run.new(folder=f"/simvue_unit_tests/{_uuid}") + _folder.commit() + _run.alerts = [_alert.id] + _run.commit() + _alert.set_status(_run.id, "critical") + time.sleep(1) + assert _alert.get_status(_run.id) == "critical" + _run.delete() + _folder.delete(recursive=True, runs_only=False, delete_runs=True) + _alert.delete() + From e96c69e687f372bdbfe59d3855b5fcd5863fb1a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 18 Dec 2024 14:58:03 +0000 Subject: [PATCH 022/163] Continue trying to address artifact object --- simvue/api/objects/artifact.py | 200 ++++++++++++++++++++++++--------- simvue/api/objects/run.py | 8 +- simvue/api/request.py | 8 +- simvue/client.py | 18 ++- simvue/exception.py | 6 +- tests/conftest.py | 14 ++- 6 files changed, 185 insertions(+), 69 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index f45aaf2b..a9c7c015 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -11,15 +11,22 @@ import typing import pydantic import os.path +import functools import sys import requests from simvue.api.url import URL +from simvue.exception import ObjectNotFoundError from simvue.models import NAME_REGEX from simvue.utilities import get_mimetype_for_file, get_mimetypes, calculate_sha256 from simvue.api.objects.base import SimvueObject from simvue.serialization import serialize_object -from simvue.api.request import put as sv_put, get_json_from_response, post as sv_post +from simvue.api.request import ( + put as sv_put, + get_json_from_response, + post as sv_post, + get as sv_get, +) Category = typing.Literal["code", "input", "output"] @@ -31,10 +38,16 @@ class Artifact(SimvueObject): """Connect to/create an artifact locally or on the server""" - def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + def __init__( + self, + identifier: str | None = None, + storage: str | None = None, + run_id: str | None = None, + **kwargs, + ) -> None: + self._storage_id: str | None = storage + self._run_id: str | None = run_id super().__init__(identifier, **kwargs) - self._storage_url: str | None = None - self._storage: str | None = None self._label = "artifact" @classmethod @@ -98,13 +111,29 @@ def new_file( } _artifact = Artifact(_read_only=False, **_upload_data) - _artifact._storage = storage - _artifact._post(**_artifact._staging) - _artifact.offline_mode(offline) + if offline: + return _artifact + + _response = _artifact._post(**_artifact._staging) + + # Either use existing storage ID if provided from this point onwards + # or use the new ID provided + _storage_id = storage or _response["storage_id"] + _artifact._storage_id = _storage_id + + _url = _response.get("url") + + _fields = _response.get("fields") + with open(file_path, "rb") as out_f: - _artifact._upload(artifact_data={"file": out_f}, run_id=run, **_upload_data) + _artifact._upload( + storage_url=_url, + artifact_data={"files": {"file": out_f}, "data": _fields}, + run_id=run, + **_upload_data | {"storage": _storage_id}, + ) return _artifact @@ -139,7 +168,7 @@ def new_object( object to serialize and upload allow_pickling : bool, optional whether to allow the object to be pickled if no other - serialiazation found. Default is True + serialization found. Default is True offline : bool, optional whether to define this artifact locally, default is False @@ -166,52 +195,67 @@ def new_object( } _artifact = Artifact(read_only=False, **_upload_data) - _artifact._storage = storage - _artifact._post(**_artifact._staging) _artifact.offline_mode(offline) - _artifact._upload(artifact_data=_serialized, run_id=run, **_upload_data) + + if offline: + return _artifact + + _response = _artifact._post(**_artifact._staging) + _url = _response.get("url") + + # Either use existing storage ID if provided from this point onwards + # or use the new ID provided + _storage_id = storage or _response["storage_id"] + _artifact._storage_id = _storage_id + + _artifact._upload( + storage_url=_url, + artifact_data=_serialized, + run_id=run, + **_upload_data | {"storage": _storage_id}, + ) return _artifact def commit(self) -> None: raise TypeError("Cannot call method 'commit' on write-once type 'Artifact'") def _upload( - self, artifact_data: typing.Any, run_id: str, **_obj_parameters + self, + artifact_data: typing.Any, + run_id: str, + storage_url: str | None, + **_obj_parameters, ) -> None: - # If local file store then do nothing - if not self.storage_url or self._offline: - return - # NOTE: Assumes URL for Run artifacts is always same _run_artifacts_url: URL = ( URL(self._user_config.server.url) / f"runs/{run_id}/artifacts/{self._identifier}" ) - _response = sv_post( - url=f"{self._storage_url}", - headers={}, - data=artifact_data, - is_json=False, - timeout=UPLOAD_TIMEOUT, - ) + if storage_url: + _response = sv_post( + url=storage_url, headers={}, is_json=False, **artifact_data + ) - self._logger.debug( - "Got status code %d when uploading artifact", - _response.status_code, - ) + self._logger.debug( + "Got status code %d when uploading artifact", + _response.status_code, + ) - get_json_from_response( - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NO_CONTENT], - allow_parse_failure=True, # JSON response from S3 not parsible - scenario=f"uploading artifact '{_obj_parameters['name']}' to object storage", - response=_response, - ) + get_json_from_response( + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NO_CONTENT], + allow_parse_failure=True, # JSON response from S3 not parsible + scenario=f"uploading artifact '{_obj_parameters['name']}' to object storage", + response=_response, + ) + + if not _obj_parameters.get("storage"): + return _response = sv_put( url=f"{_run_artifacts_url}", headers=self._headers, - data=_obj_parameters | {"storage": self.storage}, + data=_obj_parameters, ) get_json_from_response( @@ -221,7 +265,7 @@ def _upload( ) def _get(self, storage: str | None = None, **kwargs) -> dict[str, typing.Any]: - return super()._get(storage=self._storage, **kwargs) + return super()._get(storage=storage or self._storage_id, **kwargs) @property def checksum(self) -> str: @@ -241,39 +285,82 @@ def original_path(self) -> str: @property def storage(self) -> str | None: """Retrieve the storage identifier for this artifact""" - return self._storage + return self._get_attribute("storage") @property def type(self) -> str: """Retrieve the MIME type for this artifact""" return self._get_attribute("type") - @property - def storage_url(self) -> str | None: - """Retrieve storage URL for the artifact""" - return self._storage_url - @property def name(self) -> str | None: """Retrieve name for the artifact""" return self._get_attribute("name") - def download_content(self) -> typing.Any: - """Download content of artifact from storage""" - _response = requests.get(f"{self.storage_url}", timeout=DOWNLOAD_TIMEOUT) + @classmethod + def from_name( + cls, run_id: str, name: str, **kwargs + ) -> typing.Union["Artifact", None]: + _temp = Artifact(**kwargs) + _url = _temp._base_url / f"runs/{run_id}/artifacts" + _response = sv_get(url=_url, params={"name": name}, headers=_temp._headers) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], + scenario=f"Retrieval of artifact '{name}' for run '{run_id}'", + ) - get_json_from_response( + if _response.status_code == http.HTTPStatus.NOT_FOUND: + raise ObjectNotFoundError(_temp._label, name, extra=f"for run '{run_id}'") + + return Artifact(run_id=run_id, **_json_response) + + @functools.lru_cache + def get_storage_url(self, run_id: str | None) -> typing.Any: + """Retrieve the storage location for a particular run""" + if not self._identifier: + raise ValueError("Cannot retrieve artifact, no ID specified") + + _run_id = run_id or self._run_id + + if not _run_id: + raise ValueError( + "A run identifier must be specified when downloading an artifact" + ) + + _url = self._base_url / "runs" / run_id / "artifacts" / self._identifier + + _response = sv_get( + f"{_url}", + headers=self._headers, + ) + + _json_response = get_json_from_response( response=_response, - expected_status=[http.HTTPStatus.OK], + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], scenario=f"Retrieval of content for {self._label} '{self._identifier}'", ) - return _response.content + if _response.status_code == http.HTTPStatus.NOT_FOUND: + raise ObjectNotFoundError( + self._label, self.name, extra=f"for run '{run_id}'" + ) + + if not (_url := _json_response.get("url")): + raise RuntimeError( + f"Expected key 'url' for retrieval of artifact '{self.name}'" + ) + + return _url @pydantic.validate_call - def download(self, output_file: pathlib.Path) -> pathlib.Path | None: + def download( + self, output_file: pathlib.Path, run_id: str | None = None + ) -> pathlib.Path | None: + _storage_url = self.get_storage_url(run_id) + _response = requests.get( - f"{self.storage_url}", stream=True, timeout=DOWNLOAD_TIMEOUT + f"{_storage_url}", stream=True, timeout=DOWNLOAD_TIMEOUT ) get_json_from_response( @@ -298,3 +385,16 @@ def download(self, output_file: pathlib.Path) -> pathlib.Path | None: out_f.write(data) return output_file if output_file.exists() else None + + def download_content(self, run_id: str | None = None) -> typing.Any: + """Download content of artifact from storage""" + _storage_url = self.get_storage_url(run_id) + _response = requests.get(_storage_url, timeout=DOWNLOAD_TIMEOUT) + + get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieval of content for {self._label} '{self._identifier}'", + ) + + return _response.content diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 6439e560..afbb02b0 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -225,10 +225,10 @@ def started(self, started: datetime.datetime) -> None: @property @staging_check def endtime(self) -> datetime.datetime | None: - _endtime: str | None = self._get_attribute("endtime") - if not _endtime: - return None - return datetime.datetime.strptime(_endtime, DATETIME_FORMAT) + _endtime: str | None = self._get_attribute("endtime", None) + return ( + datetime.datetime.strptime(_endtime, DATETIME_FORMAT) if _endtime else None + ) @endtime.setter @write_only diff --git a/simvue/api/request.py b/simvue/api/request.py index eb238c5b..41317595 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -62,7 +62,11 @@ def is_retryable_exception(exception: Exception) -> bool: reraise=True, ) def post( - url: str, headers: dict[str, str], data: typing.Any, is_json: bool = True + url: str, + headers: dict[str, str], + data: typing.Any, + is_json: bool = True, + files: dict[str, typing.Any] | None = None, ) -> requests.Response: """HTTP POST with retries @@ -91,7 +95,7 @@ def post( logging.debug(f"POST: {url}\n\tdata={data_sent}") response = requests.post( - url, headers=headers, data=data_sent, timeout=DEFAULT_API_TIMEOUT + url, headers=headers, data=data_sent, timeout=DEFAULT_API_TIMEOUT, files=files ) if response.status_code == http.HTTPStatus.UNPROCESSABLE_ENTITY: diff --git a/simvue/client.py b/simvue/client.py index 32853936..f996af32 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -120,13 +120,11 @@ def get_run_id_from_name( "Could not collect ID - no run found with this name." ) from e - try: + with contextlib.suppress(StopIteration): next(_runs) raise RuntimeError( "Could not collect ID - more than one run exists with this name." ) - except StopIteration: - pass return _id @@ -448,7 +446,7 @@ def list_artifacts(self, run_id: str) -> typing.Generator[Artifact, None, None]: def _retrieve_artifacts_from_server( self, run_id: str, name: str, count: int | None = None ) -> typing.Generator[tuple[str, Artifact], None, None]: - return Artifact.get(runs=[run_id], name=name, count=count) # type: ignore + return Artifact.get(runs=json.dumps([run_id]), name=name, count=count) # type: ignore @prettify_pydantic @pydantic.validate_call @@ -496,12 +494,12 @@ def get_artifact( RuntimeError if retrieval of artifact from the server failed """ - _artifacts = self._retrieve_artifacts_from_server(run_id, name, count=1) - - try: - _id, _artifact = next(_artifacts) - except StopIteration as e: - raise ValueError(f"No artifact '{name}' found for run '{run_id}'") from e + _artifact = Artifact.from_name( + run_id=run_id, + name=name, + server_url=self._user_config.server.url, + server_token=self._user_config.server.token, + ) _content = _artifact.download_content() diff --git a/simvue/exception.py b/simvue/exception.py index 58bafb10..3dc5e65e 100644 --- a/simvue/exception.py +++ b/simvue/exception.py @@ -10,9 +10,11 @@ class ObjectNotFoundError(Exception): """For failure retrieving Simvue object from server""" - def __init__(self, obj_type: str, name: str) -> None: + def __init__(self, obj_type: str, name: str, extra: str | None = None) -> None: super().__init__( - f"Failed to retrieve '{name}' of type '{obj_type}' " f"no such object" + f"Failed to retrieve '{name}' of type '{obj_type}' " + f"{f'{extra}, ' if extra else ''}" + "no such object" ) diff --git a/tests/conftest.py b/tests/conftest.py index 153ef9ac..5ea1ef28 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,7 +8,9 @@ import json import pathlib import logging +from simvue.api.objects.artifact import Artifact import simvue.run as sv_run +import simvue.api.objects as sv_api_obj import simvue.utilities MAX_BUFFER_SIZE: int = 10 @@ -90,6 +92,17 @@ def create_plain_run_offline(mocker: pytest_mock.MockerFixture, request, monkeyp clear_out_files() +@pytest.fixture +def create_run_object() -> sv_api_obj.Run: + _fix_use_id: str = str(uuid.uuid4()).split('-', 1)[0] + _folder = sv_api_obj.Folder.new(path=f"/simvue_unit_testing/{_fix_use_id}") + _folder.commit() + _run = sv_api_obj.Run.new(folder=f"/simvue_unit_testing/{_fix_use_id}") + yield _run + _run.delete() + _folder.delete(recursive=True, runs_only=False, delete_runs=True) + + def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.FixtureRequest, created_only: bool=False): fix_use_id: str = str(uuid.uuid4()).split('-', 1)[0] TEST_DATA = { @@ -161,7 +174,6 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur if create_objects: TEST_DATA["metrics"] = ("metric_counter", "metric_val") - assert not run._id.startswith("offline_") TEST_DATA["run_id"] = run._id TEST_DATA["run_name"] = run._name TEST_DATA["url"] = run._user_config.server.url From a7f5b5f59d3f423d05dc51dad0c4c38c943854ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 19 Dec 2024 09:14:56 +0000 Subject: [PATCH 023/163] Refactor the object upload --- simvue/api/objects/artifact.py | 162 ++++++++++++++++----------------- 1 file changed, 80 insertions(+), 82 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index a9c7c015..daa5a1c8 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -12,6 +12,7 @@ import pydantic import os.path import functools +import io import sys import requests @@ -41,20 +42,48 @@ class Artifact(SimvueObject): def __init__( self, identifier: str | None = None, - storage: str | None = None, - run_id: str | None = None, **kwargs, ) -> None: - self._storage_id: str | None = storage - self._run_id: str | None = run_id super().__init__(identifier, **kwargs) + self._staging = {"server": kwargs, "storage": {}} self._label = "artifact" @classmethod - def new(cls, *_, **__) -> None: - raise NotImplementedError( - "No method 'new' for type 'artifact', use 'new_file' or 'new_object'" + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + run_id: str, + storage_id: str | None, + category: Category, + offline: bool = False, + **kwargs, + ) -> typing.Self: + _artifact = Artifact( + run=run_id, + name=name, + storage=storage_id, + category=category, + _read_only=False, + **kwargs, ) + _artifact.offline_mode(offline) + + if offline: + return _artifact + + # Firstly submit a request for a new artifact + _response = _artifact._post(**_artifact._staging["server"]) + + # If this artifact does not exist a URL will be returned + _artifact._staging["server"]["url"] = _response["url"] + + # If a storage ID has been provided store that else retrieve it + _artifact._staging["server"]["storage"] = storage_id or _response["storage_id"] + _artifact._staging["storage"]["data"] = _response.get("fields") + _artifact._staging["storage"]["files"] = None + + return _artifact @classmethod @pydantic.validate_call @@ -62,8 +91,8 @@ def new_file( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - run: str, - storage: str | None, + run_id: str, + storage_id: str | None, category: Category, file_path: pydantic.FilePath, file_type: str | None, @@ -77,9 +106,9 @@ def new_file( ---------- name : str the name for this artifact - run : str + run_id : str the identifier with which this artifact is associated - storage : str | None + storage_id : str | None the identifier for the storage location for this object category : "code" | "input" | "output" the category of this artifact @@ -100,40 +129,24 @@ def new_file( _file_orig_path = file_path.expanduser().absolute() _file_checksum = calculate_sha256(f"{file_path}", is_file=True) - _upload_data = { - "name": name, - "storage": storage, - "category": category, - "originalPath": os.path.expandvars(_file_orig_path), - "size": _file_size, - "type": _file_type, - "checksum": _file_checksum, - } - - _artifact = Artifact(_read_only=False, **_upload_data) - _artifact.offline_mode(offline) + _artifact = Artifact.new( + name=name, + run_id=run_id, + storage_id=storage_id, + category=category, + originalPath=os.path.expandvars(_file_orig_path), + size=_file_size, + type=_file_type, + checksum=_file_checksum, + offline=offline, + ) if offline: return _artifact - _response = _artifact._post(**_artifact._staging) - - # Either use existing storage ID if provided from this point onwards - # or use the new ID provided - _storage_id = storage or _response["storage_id"] - _artifact._storage_id = _storage_id - - _url = _response.get("url") - - _fields = _response.get("fields") - with open(file_path, "rb") as out_f: - _artifact._upload( - storage_url=_url, - artifact_data={"files": {"file": out_f}, "data": _fields}, - run_id=run, - **_upload_data | {"storage": _storage_id}, - ) + _artifact._staging["storage"]["files"] = {"file": out_f} + _artifact._upload() return _artifact @@ -143,7 +156,7 @@ def new_object( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - run: str, + run_id: str, storage: str | None, category: Category, obj: typing.Any, @@ -158,7 +171,7 @@ def new_object( ---------- name : str the name for this artifact - run : str + run_id : str the identifier with which this artifact is associated storage : str | None the identifier for the storage location for this object @@ -184,57 +197,42 @@ def new_object( ) _checksum = calculate_sha256(_serialized, is_file=False) - _upload_data = { - "name": name, - "storage": storage, - "category": category, - "originalPath": "", - "size": sys.getsizeof(obj), - "type": _data_type, - "checksum": _checksum, - } - - _artifact = Artifact(read_only=False, **_upload_data) + + _artifact = Artifact.new( + run_id=run_id, + name=name, + storage=storage, + category=category, + originalPath="", + size=sys.getsizeof(obj), + type=_data_type, + checksum=_checksum, + ) _artifact.offline_mode(offline) if offline: return _artifact - _response = _artifact._post(**_artifact._staging) - _url = _response.get("url") - - # Either use existing storage ID if provided from this point onwards - # or use the new ID provided - _storage_id = storage or _response["storage_id"] - _artifact._storage_id = _storage_id - - _artifact._upload( - storage_url=_url, - artifact_data=_serialized, - run_id=run, - **_upload_data | {"storage": _storage_id}, - ) + _artifact._staging["storage"]["files"] = {"file": io.BytesIO(_serialized)} + _artifact._upload() return _artifact def commit(self) -> None: raise TypeError("Cannot call method 'commit' on write-once type 'Artifact'") - def _upload( - self, - artifact_data: typing.Any, - run_id: str, - storage_url: str | None, - **_obj_parameters, - ) -> None: - # NOTE: Assumes URL for Run artifacts is always same + def _upload(self, data: typing.Any) -> None: + _run_id = self._staging["server"]["run"] + _files = self._staging["storage"]["files"] + _name = self._staging["server"]["name"] + _run_artifacts_url: URL = ( URL(self._user_config.server.url) - / f"runs/{run_id}/artifacts/{self._identifier}" + / f"runs/{_run_id}/artifacts/{self._identifier}" ) - if storage_url: + if _url := self._staging["server"]["url"]: _response = sv_post( - url=storage_url, headers={}, is_json=False, **artifact_data + url=_url, headers={}, is_json=False, files=_files, data=data ) self._logger.debug( @@ -245,22 +243,22 @@ def _upload( get_json_from_response( expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NO_CONTENT], allow_parse_failure=True, # JSON response from S3 not parsible - scenario=f"uploading artifact '{_obj_parameters['name']}' to object storage", + scenario=f"uploading artifact '{_name}' to object storage", response=_response, ) - if not _obj_parameters.get("storage"): + if not self._staging["server"].get("storage"): return _response = sv_put( url=f"{_run_artifacts_url}", headers=self._headers, - data=_obj_parameters, + data=self._staging["server"], ) get_json_from_response( expected_status=[http.HTTPStatus.OK], - scenario=f"adding artifact '{_obj_parameters['name']}' to run '{run_id}'", + scenario=f"adding artifact '{_name}' to run '{_run_id}'", response=_response, ) From ef0547c15b1c8608aa81e0fb409fbb916e9379e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 19 Dec 2024 10:15:11 +0000 Subject: [PATCH 024/163] Fix validation issue --- simvue/api/objects/artifact.py | 27 ++++++++++++-------- simvue/api/url.py | 14 +++++----- simvue/config/parameters.py | 2 +- simvue/config/user.py | 11 +++++--- simvue/run.py | 8 +++--- tests/functional/test_run_artifact_upload.py | 4 +-- 6 files changed, 38 insertions(+), 28 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index daa5a1c8..0738a288 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -76,10 +76,12 @@ def new( _response = _artifact._post(**_artifact._staging["server"]) # If this artifact does not exist a URL will be returned - _artifact._staging["server"]["url"] = _response["url"] + _artifact._staging["server"]["url"] = _response.get("url") # If a storage ID has been provided store that else retrieve it - _artifact._staging["server"]["storage"] = storage_id or _response["storage_id"] + _artifact._staging["server"]["storage"] = storage_id or _response.get( + "storage_id" + ) _artifact._staging["storage"]["data"] = _response.get("fields") _artifact._staging["storage"]["files"] = None @@ -141,8 +143,7 @@ def new_file( offline=offline, ) - if offline: - return _artifact + _artifact.offline_mode(offline) with open(file_path, "rb") as out_f: _artifact._staging["storage"]["files"] = {"file": out_f} @@ -210,9 +211,6 @@ def new_object( ) _artifact.offline_mode(offline) - if offline: - return _artifact - _artifact._staging["storage"]["files"] = {"file": io.BytesIO(_serialized)} _artifact._upload() return _artifact @@ -220,10 +218,15 @@ def new_object( def commit(self) -> None: raise TypeError("Cannot call method 'commit' on write-once type 'Artifact'") - def _upload(self, data: typing.Any) -> None: + def _upload(self) -> None: + if self._offline: + super().commit() + return + _run_id = self._staging["server"]["run"] _files = self._staging["storage"]["files"] _name = self._staging["server"]["name"] + _data = self._staging["storage"].get("data") _run_artifacts_url: URL = ( URL(self._user_config.server.url) @@ -232,7 +235,7 @@ def _upload(self, data: typing.Any) -> None: if _url := self._staging["server"]["url"]: _response = sv_post( - url=_url, headers={}, is_json=False, files=_files, data=data + url=_url, headers={}, is_json=False, files=_files, data=_data ) self._logger.debug( @@ -263,7 +266,9 @@ def _upload(self, data: typing.Any) -> None: ) def _get(self, storage: str | None = None, **kwargs) -> dict[str, typing.Any]: - return super()._get(storage=storage or self._storage_id, **kwargs) + return super()._get( + storage=storage or self._staging.get("server", {}).get("storage"), **kwargs + ) @property def checksum(self) -> str: @@ -301,7 +306,7 @@ def from_name( ) -> typing.Union["Artifact", None]: _temp = Artifact(**kwargs) _url = _temp._base_url / f"runs/{run_id}/artifacts" - _response = sv_get(url=_url, params={"name": name}, headers=_temp._headers) + _response = sv_get(url=f"{_url}", params={"name": name}, headers=_temp._headers) _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], diff --git a/simvue/api/url.py b/simvue/api/url.py index 443c4021..eb7754cd 100644 --- a/simvue/api/url.py +++ b/simvue/api/url.py @@ -2,11 +2,13 @@ import urllib.parse import copy +import pydantic + class URL: + @pydantic.validate_call def __init__(self, url: str) -> None: - if url.endswith("/"): - url = url[:-1] + url = url[:-1] if url.endswith("/") else url _url = urllib.parse.urlparse(url) self._scheme: str = _url.scheme @@ -20,11 +22,11 @@ def __truediv__(self, other: str) -> typing.Self: _new /= other return _new + @pydantic.validate_call def __itruediv__(self, other: str) -> typing.Self: - if other.startswith("/"): - other = other[1:] - if other.endswith("/"): - other = other[:-1] + other = other[1:] if other.startswith("/") else other + other = other[:-1] if other.endswith("/") else other + self._path = f"{self._path}/{other}" return self diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index cfb90bf4..a13d865f 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -28,7 +28,7 @@ class ServerSpecifications(pydantic.BaseModel): @classmethod def url_to_api_url(cls, v: typing.Any) -> str: if f"{v}".endswith("/api"): - return URL(f"{v}") + return f"{v}" _url = URL(f"{v}") / "api" return f"{_url}" diff --git a/simvue/config/user.py b/simvue/config/user.py index 9f28b553..014087a4 100644 --- a/simvue/config/user.py +++ b/simvue/config/user.py @@ -146,9 +146,9 @@ def check_valid_server(cls, values: "SimvueConfiguration") -> bool: @sv_util.prettify_pydantic def fetch( cls, - server_url: typing.Optional[str] = None, - server_token: typing.Optional[str] = None, - mode: typing.Optional[typing.Literal["offline", "online", "disabled"]] = None, + server_url: str | None = None, + server_token: str | None = None, + mode: typing.Literal["offline", "online", "disabled"] | None = None, ) -> "SimvueConfiguration": """Retrieve the Simvue configuration from this project @@ -157,7 +157,7 @@ def fetch( Parameters ---------- - server_url : str, optional + server_url : str | URL, optional override the URL used for this session server_token : str, optional override the token used for this session @@ -204,6 +204,9 @@ def fetch( "SIMVUE_URL", server_url or _config_dict["server"].get("url") ) + if isinstance(_server_url, URL): + _server_url = str(_server_url) + _server_token = os.environ.get( "SIMVUE_TOKEN", server_token or _config_dict["server"].get("token") ) diff --git a/simvue/run.py b/simvue/run.py index 3a53497a..8ab1b1ec 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1266,12 +1266,12 @@ def save_object( try: Artifact.new_object( - run=self.id, + run_id=self.id, name=_name, category=category, obj=obj, allow_pickling=allow_pickle, - storage=self._storage_id, + storage_id=self._storage_id, ) except (ValueError, RuntimeError) as e: self._error(f"Failed to save object '{_name}' to run '{self.id}': {e}") @@ -1331,8 +1331,8 @@ def save_file( # Register file Artifact.new_file( name=name or stored_file_name, - run=self.id, - storage=self._storage_id, + run_id=self.id, + storage_id=self._storage_id, file_path=file_path, offline=self._user_config.run.mode == "offline", file_type=filetype, diff --git a/tests/functional/test_run_artifact_upload.py b/tests/functional/test_run_artifact_upload.py index 2daeb326..45f6fd07 100644 --- a/tests/functional/test_run_artifact_upload.py +++ b/tests/functional/test_run_artifact_upload.py @@ -26,9 +26,9 @@ def test_add_artifact_to_run() -> None: _artifact = Artifact.new_file( name=f"test_{_uuid}", - run=_run.id, + run_id=_run.id, category="input", - storage=None, + storage_id=None, file_path=pathlib.Path(tempf.name), file_type=None ) From a3dc10ec0062e622fb798cd597420fcfbedff1a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 19 Dec 2024 15:51:59 +0000 Subject: [PATCH 025/163] Fixed artifact unit test --- simvue/api/objects/artifact.py | 60 ++++++++++++++++++++++------------ simvue/api/objects/base.py | 12 ++++--- tests/unit/test_artifact.py | 17 +++++----- 3 files changed, 56 insertions(+), 33 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 0738a288..2e4622bd 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -46,6 +46,7 @@ def __init__( ) -> None: super().__init__(identifier, **kwargs) self._staging = {"server": kwargs, "storage": {}} + self._run_id = kwargs.get("run") self._label = "artifact" @classmethod @@ -265,25 +266,32 @@ def _upload(self) -> None: response=_response, ) - def _get(self, storage: str | None = None, **kwargs) -> dict[str, typing.Any]: + def _get( + self, storage: str | None = None, url: str | None = None, **kwargs + ) -> dict[str, typing.Any]: return super()._get( - storage=storage or self._staging.get("server", {}).get("storage"), **kwargs + storage=storage or self._staging.get("server", {}).get("storage"), + url=url, + **kwargs, ) + def _get_from_run(self, attribute: str, *default) -> typing.Any: + return self._get_attribute(attribute, default, url=self.run_url) + @property def checksum(self) -> str: """Retrieve the checksum for this artifact""" return self._get_attribute("checksum") @property - def category(self) -> Category: - """Retrieve the category for this artifact""" - return self._get_attribute("category") + def category(self) -> Category | None: + """Retrieve the category for this artifact if applicable""" + return self._get_from_run("category") @property def original_path(self) -> str: """Retrieve the original path of the file associated with this artifact""" - return self._get_attribute("originalPath") + return self._get_from_run("originalPath") @property def storage(self) -> str | None: @@ -295,6 +303,11 @@ def type(self) -> str: """Retrieve the MIME type for this artifact""" return self._get_attribute("type") + @property + def run_id(self) -> str | None: + """Retrieve ID for run relating to this artifact""" + return self._run_id + @property def name(self) -> str | None: """Retrieve name for the artifact""" @@ -316,25 +329,30 @@ def from_name( if _response.status_code == http.HTTPStatus.NOT_FOUND: raise ObjectNotFoundError(_temp._label, name, extra=f"for run '{run_id}'") - return Artifact(run_id=run_id, **_json_response) + return Artifact(run=run_id, **_json_response) + + @property + def run_url(self) -> URL | None: + """If artifact is connected to a run return the run artifact endpoint""" + if not self.run_id: + return None + _url = URL(self._user_config.server.url) + _url /= f"runs/{self.run_id}/artifacts/{self._identifier}" + return _url @functools.lru_cache - def get_storage_url(self, run_id: str | None) -> typing.Any: + def get_storage_url(self) -> typing.Any: """Retrieve the storage location for a particular run""" if not self._identifier: raise ValueError("Cannot retrieve artifact, no ID specified") - _run_id = run_id or self._run_id - - if not _run_id: + if not self.run_id: raise ValueError( "A run identifier must be specified when downloading an artifact" ) - _url = self._base_url / "runs" / run_id / "artifacts" / self._identifier - _response = sv_get( - f"{_url}", + f"{self.run_url}", headers=self._headers, ) @@ -346,7 +364,9 @@ def get_storage_url(self, run_id: str | None) -> typing.Any: if _response.status_code == http.HTTPStatus.NOT_FOUND: raise ObjectNotFoundError( - self._label, self.name, extra=f"for run '{run_id}'" + self._label, + self.name, + extra=f"with id '{self._identifier}' for run '{self._run_id}'", ) if not (_url := _json_response.get("url")): @@ -357,10 +377,8 @@ def get_storage_url(self, run_id: str | None) -> typing.Any: return _url @pydantic.validate_call - def download( - self, output_file: pathlib.Path, run_id: str | None = None - ) -> pathlib.Path | None: - _storage_url = self.get_storage_url(run_id) + def download(self, output_file: pathlib.Path) -> pathlib.Path | None: + _storage_url = self.get_storage_url() _response = requests.get( f"{_storage_url}", stream=True, timeout=DOWNLOAD_TIMEOUT @@ -389,9 +407,9 @@ def download( return output_file if output_file.exists() else None - def download_content(self, run_id: str | None = None) -> typing.Any: + def download_content(self) -> typing.Any: """Download content of artifact from storage""" - _storage_url = self.get_storage_url(run_id) + _storage_url = self.get_storage_url() _response = requests.get(_storage_url, timeout=DOWNLOAD_TIMEOUT) get_json_from_response( diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index aba440fc..7405be09 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -196,7 +196,9 @@ def _stage_to_other(self, obj_label: str, key: str, value: typing.Any) -> None: with self._local_staging_file.open("w") as out_f: json.dump(_staged_data, out_f, indent=2) - def _get_attribute(self, attribute: str, *default) -> typing.Any: + def _get_attribute( + self, attribute: str, *default, url: str | None = None + ) -> typing.Any: # In the case where the object is read-only, staging is the data # already retrieved from the server _attribute_is_property: bool = attribute in self._properties @@ -215,7 +217,7 @@ def _get_attribute(self, attribute: str, *default) -> typing.Any: ) from e try: - return self._get()[attribute] + return self._get(url=url)[attribute] except KeyError as e: if default: return default[0] @@ -437,14 +439,16 @@ def delete( return _json_response def _get( - self, allow_parse_failure: bool = False, **kwargs + self, url: str | None = None, allow_parse_failure: bool = False, **kwargs ) -> dict[str, typing.Any]: if self._identifier.startswith("offline_"): return self._get_local_staged() if not self.url: raise RuntimeError(f"Identifier for instance of {self._label} Unknown") - _response = sv_get(url=f"{self.url}", headers=self._headers, params=kwargs) + _response = sv_get( + url=f"{url or self.url}", headers=self._headers, params=kwargs + ) if _response.status_code == http.HTTPStatus.NOT_FOUND: raise ObjectNotFoundError( diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index b5d80717..b6240c40 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -1,3 +1,4 @@ +import os import pytest import uuid import time @@ -21,13 +22,13 @@ def test_artifact_creation_online() -> None: with tempfile.NamedTemporaryFile(suffix=".txt") as temp_f: _path = pathlib.Path(temp_f.name) with _path.open("w") as out_f: - out_f.write("Hello World!") + out_f.write(f"Hello World! {_uuid}") _artifact = Artifact.new_file( name=f"test_artifact_{_uuid}", - run=_run.id, + run_id=_run.id, file_path=_path, category="input", - storage=None, + storage_id=None, file_type=None ) time.sleep(1) @@ -37,11 +38,12 @@ def test_artifact_creation_online() -> None: except Exception as e: _failed.append((member, f"{e}")) assert _artifact.name == f"test_artifact_{_uuid}" - _artifact.delete() + os.remove(temp_f.name) + _artifact.download(temp_f.name) if _failed: raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) _run.delete() - _folder.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) @pytest.mark.api @@ -57,10 +59,10 @@ def test_artifact_creation_offline() -> None: out_f.write("Hello World!") _artifact = Artifact.new_file( name=f"test_artifact_{_uuid}", - run=_run.id, + run_id=_run.id, file_path=_path, category="input", - storage=None, + storage_id=None, file_type=None, offline=True ) @@ -69,7 +71,6 @@ def test_artifact_creation_offline() -> None: _artifact.commit() time.sleep(1) assert _artifact.name == f"test_artifact_{_uuid}" - _artifact.delete() _run.delete() _folder.delete() From 93cefaa889d86538a506c3d56e58cbff13b53a53 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 2 Jan 2025 13:01:07 +0000 Subject: [PATCH 026/163] Fix artifact retrieval --- simvue/api/objects/artifact.py | 84 +++++++++++++++++----------------- tests/unit/test_artifact.py | 3 ++ 2 files changed, 45 insertions(+), 42 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 2e4622bd..7bfd17d5 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -6,19 +6,19 @@ """ +import datetime import http import pathlib import typing import pydantic import os.path -import functools import io import sys import requests from simvue.api.url import URL from simvue.exception import ObjectNotFoundError -from simvue.models import NAME_REGEX +from simvue.models import NAME_REGEX, DATETIME_FORMAT from simvue.utilities import get_mimetype_for_file, get_mimetypes, calculate_sha256 from simvue.api.objects.base import SimvueObject from simvue.serialization import serialize_object @@ -283,6 +283,16 @@ def checksum(self) -> str: """Retrieve the checksum for this artifact""" return self._get_attribute("checksum") + @property + def uploaded(self) -> bool: + """Retrieve if the artifact has an upload""" + return self._get_attribute("uploaded") + + @property + def storage_url(self) -> URL | None: + """Retrieve upload URL for artifact""" + return URL(_url) if (_url := self._get_attribute("url")) else None + @property def category(self) -> Category | None: """Retrieve the category for this artifact if applicable""" @@ -291,7 +301,7 @@ def category(self) -> Category | None: @property def original_path(self) -> str: """Retrieve the original path of the file associated with this artifact""" - return self._get_from_run("originalPath") + return self._get_attribute("original_path") @property def storage(self) -> str | None: @@ -303,6 +313,11 @@ def type(self) -> str: """Retrieve the MIME type for this artifact""" return self._get_attribute("type") + @property + def size(self) -> int: + """Retrieve the size for this artifact in bytes""" + return self._get_attribute("size") + @property def run_id(self) -> str | None: """Retrieve ID for run relating to this artifact""" @@ -313,6 +328,13 @@ def name(self) -> str | None: """Retrieve name for the artifact""" return self._get_attribute("name") + @property + def created(self) -> datetime.datetime | None: + """Retrieve created datetime for the artifact""" + _created: str | None = self._get_attribute("created") + _format = DATETIME_FORMAT.replace(" ", "T") + return datetime.datetime.strptime(_created, _format) if _created else None + @classmethod def from_name( cls, run_id: str, name: str, **kwargs @@ -340,48 +362,22 @@ def run_url(self) -> URL | None: _url /= f"runs/{self.run_id}/artifacts/{self._identifier}" return _url - @functools.lru_cache - def get_storage_url(self) -> typing.Any: - """Retrieve the storage location for a particular run""" - if not self._identifier: - raise ValueError("Cannot retrieve artifact, no ID specified") + @property + def download_url(self) -> URL | None: + """Retrieve the URL for downloading this artifact""" + return self.url / "download" if self._identifier else None - if not self.run_id: + @pydantic.validate_call + def download(self, output_file: pathlib.Path) -> pathlib.Path | None: + if not self.download_url: raise ValueError( - "A run identifier must be specified when downloading an artifact" + f"Could not retrieve URL for artifact '{self._identifier}'" ) - _response = sv_get( - f"{self.run_url}", + f"{self.download_url}", headers=self._headers, - ) - - _json_response = get_json_from_response( - response=_response, - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], - scenario=f"Retrieval of content for {self._label} '{self._identifier}'", - ) - - if _response.status_code == http.HTTPStatus.NOT_FOUND: - raise ObjectNotFoundError( - self._label, - self.name, - extra=f"with id '{self._identifier}' for run '{self._run_id}'", - ) - - if not (_url := _json_response.get("url")): - raise RuntimeError( - f"Expected key 'url' for retrieval of artifact '{self.name}'" - ) - - return _url - - @pydantic.validate_call - def download(self, output_file: pathlib.Path) -> pathlib.Path | None: - _storage_url = self.get_storage_url() - - _response = requests.get( - f"{_storage_url}", stream=True, timeout=DOWNLOAD_TIMEOUT + timeout=DOWNLOAD_TIMEOUT, + params={"storage": self.storage}, ) get_json_from_response( @@ -409,8 +405,12 @@ def download(self, output_file: pathlib.Path) -> pathlib.Path | None: def download_content(self) -> typing.Any: """Download content of artifact from storage""" - _storage_url = self.get_storage_url() - _response = requests.get(_storage_url, timeout=DOWNLOAD_TIMEOUT) + if not self.storage_url: + raise ValueError( + f"Could not retrieve URL for artifact '{self._identifier}'" + ) + + _response = requests.get(self.storage_url, timeout=DOWNLOAD_TIMEOUT) get_json_from_response( response=_response, diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index b6240c40..ba0af789 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -40,6 +40,9 @@ def test_artifact_creation_online() -> None: assert _artifact.name == f"test_artifact_{_uuid}" os.remove(temp_f.name) _artifact.download(temp_f.name) + assert os.path.exists(temp_f.name) + with open(temp_f.name) as in_f: + assert in_f.readline() == f"Hello World! {_uuid}\n" if _failed: raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) _run.delete() From 7197a52abc30fbff8b377bac3ce9908c4c637fc9 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 6 Jan 2025 09:37:44 +0000 Subject: [PATCH 027/163] Fixed typing.Self so that it also works for 3.10 --- simvue/api/objects/administrator/tenant.py | 10 +++++----- simvue/api/objects/administrator/user.py | 8 ++++++-- simvue/api/objects/alert/events.py | 6 +++++- simvue/api/objects/alert/metrics.py | 9 +++++++-- simvue/api/objects/alert/user.py | 7 ++++++- simvue/api/objects/artifact.py | 11 ++++++++--- simvue/api/objects/run.py | 7 ++++++- simvue/api/objects/storage/file.py | 7 ++++++- simvue/api/objects/storage/s3.py | 7 ++++++- simvue/api/url.py | 9 ++++++--- 10 files changed, 61 insertions(+), 20 deletions(-) diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py index e111ae97..ca803a5c 100644 --- a/simvue/api/objects/administrator/tenant.py +++ b/simvue/api/objects/administrator/tenant.py @@ -1,5 +1,7 @@ -import typing - +try: + from typing import Self +except ImportError: + from typing_extensions import Self import pydantic from simvue.api.objects.base import write_only, SimvueObject, staging_check @@ -8,9 +10,7 @@ class Tenant(SimvueObject): @classmethod @pydantic.validate_call - def new( - cls, *, name: str, enabled: bool = True, offline: bool = False - ) -> typing.Self: + def new(cls, *, name: str, enabled: bool = True, offline: bool = False) -> Self: _tenant = Tenant(name=name, enabled=enabled, offline=offline, _read_only=False) _tenant.offline_mode(offline) return _tenant # type: ignore diff --git a/simvue/api/objects/administrator/user.py b/simvue/api/objects/administrator/user.py index c7925995..fc053d8c 100644 --- a/simvue/api/objects/administrator/user.py +++ b/simvue/api/objects/administrator/user.py @@ -1,5 +1,9 @@ import pydantic -import typing + +try: + from typing import Self +except ImportError: + from typing_extensions import Self from simvue.api.objects.base import SimvueObject, staging_check, write_only @@ -19,7 +23,7 @@ def new( tenant: str, enabled: bool = True, offline: bool = False, - ) -> typing.Self: + ) -> Self: _user_info: dict[str, str | bool] = { "username": username, "fullname": fullname, diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index ea6bbcac..4ccab804 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -9,6 +9,10 @@ import typing import pydantic +try: + from typing import Self +except ImportError: + from typing_extensions import Self from simvue.api.objects.base import write_only from .base import AlertBase, staging_check from simvue.models import NAME_REGEX @@ -39,7 +43,7 @@ def new( frequency: pydantic.PositiveInt, enabled: bool = True, offline: bool = False, - ) -> typing.Self: + ) -> Self: """Create a new event-based alert Note parameters are keyword arguments only. diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index 4725b48f..e91a41ac 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -10,6 +10,11 @@ import pydantic import typing +try: + from typing import Self +except ImportError: + from typing_extensions import Self + from simvue.api.objects.base import write_only from .base import AlertBase, staging_check from simvue.models import NAME_REGEX @@ -47,7 +52,7 @@ def new( frequency: pydantic.PositiveInt, enabled: bool = True, offline: bool = False, - ) -> typing.Self: + ) -> Self: """Create a new metric threshold alert either locally or on the server Note all arguments are keyword arguments. @@ -123,7 +128,7 @@ def new( frequency: pydantic.PositiveInt, enabled: bool = True, offline: bool = False, - ) -> typing.Self: + ) -> Self: """Create a new metric range alert either locally or on the server Note all arguments are keyword arguments. diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index 74ce4052..1381fac6 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -8,6 +8,11 @@ import pydantic import typing + +try: + from typing import Self +except ImportError: + from typing_extensions import Self import http from simvue.api.request import get_json_from_response, put as sv_put @@ -27,7 +32,7 @@ def new( notification: typing.Literal["none", "email"], enabled: bool = True, offline: bool = False, - ) -> typing.Self: + ) -> Self: """Create a new user-defined alert Note all arguments are keyword arguments. diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 7bfd17d5..0846e5c8 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -16,6 +16,11 @@ import sys import requests +try: + from typing import Self +except ImportError: + from typing_extensions import Self + from simvue.api.url import URL from simvue.exception import ObjectNotFoundError from simvue.models import NAME_REGEX, DATETIME_FORMAT @@ -59,7 +64,7 @@ def new( category: Category, offline: bool = False, **kwargs, - ) -> typing.Self: + ) -> Self: _artifact = Artifact( run=run_id, name=name, @@ -100,7 +105,7 @@ def new_file( file_path: pydantic.FilePath, file_type: str | None, offline: bool = False, - ) -> typing.Self: + ) -> Self: """Create a new artifact either locally or on the server Note all arguments are keyword arguments @@ -164,7 +169,7 @@ def new_object( obj: typing.Any, allow_pickling: bool = True, offline: bool = False, - ) -> typing.Self: + ) -> Self: """Create a new artifact either locally or on the server Note all arguments are keyword arguments diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index afbb02b0..4fce381d 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -4,6 +4,11 @@ import pydantic import datetime +try: + from typing import Self +except ImportError: + from typing_extensions import Self + from .base import SimvueObject, staging_check, Visibility, write_only from simvue.api.request import ( get as sv_get, @@ -52,7 +57,7 @@ def new( *, folder: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], offline: bool = False, - ) -> typing.Self: + ) -> Self: """Create a new Folder on the Simvue server with the given path""" _run = Run(folder=folder, system=None, status="created", _read_only=False) _run.offline_mode(offline) diff --git a/simvue/api/objects/storage/file.py b/simvue/api/objects/storage/file.py index fc1989bd..decd1288 100644 --- a/simvue/api/objects/storage/file.py +++ b/simvue/api/objects/storage/file.py @@ -1,4 +1,9 @@ import typing + +try: + from typing import Self +except ImportError: + from typing_extensions import Self import pydantic from .base import StorageBase @@ -16,7 +21,7 @@ def new( tenant_usable: bool, default: bool, offline: bool = False, - ) -> typing.Self: + ) -> Self: """Create a new file storage object""" _storage = FileStorage( name=name, diff --git a/simvue/api/objects/storage/s3.py b/simvue/api/objects/storage/s3.py index d2962f05..9907910e 100644 --- a/simvue/api/objects/storage/s3.py +++ b/simvue/api/objects/storage/s3.py @@ -1,4 +1,9 @@ import typing + +try: + from typing import Self +except ImportError: + from typing_extensions import Self import pydantic from simvue.api.objects.base import write_only @@ -27,7 +32,7 @@ def new( tenant_usable: bool, default: bool, offline: bool = False, - ) -> typing.Self: + ) -> Self: """Create a new S3 storage object""" _config: dict[str, str] = { "endpoint_url": endpoint_url.__str__(), diff --git a/simvue/api/url.py b/simvue/api/url.py index eb7754cd..a5f0a7de 100644 --- a/simvue/api/url.py +++ b/simvue/api/url.py @@ -1,4 +1,7 @@ -import typing +try: + from typing import Self +except ImportError: + from typing_extensions import Self import urllib.parse import copy @@ -17,13 +20,13 @@ def __init__(self, url: str) -> None: self._port: int | None = _url.port self._fragment: str = _url.fragment - def __truediv__(self, other: str) -> typing.Self: + def __truediv__(self, other: str) -> Self: _new = copy.deepcopy(self) _new /= other return _new @pydantic.validate_call - def __itruediv__(self, other: str) -> typing.Self: + def __itruediv__(self, other: str) -> Self: other = other[1:] if other.startswith("/") else other other = other[:-1] if other.endswith("/") else other From 66beb76e458cd6044acadc26ee30bdefcb4dece3 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 6 Jan 2025 13:22:18 +0000 Subject: [PATCH 028/163] Added online, offline and local tags to unit tests --- tests/unit/test_artifact.py | 2 ++ tests/unit/test_conversion_to_dataframe.py | 2 ++ tests/unit/test_event_alert.py | 5 +++++ tests/unit/test_file_storage.py | 2 ++ tests/unit/test_folder.py | 5 +++++ tests/unit/test_matplotlib_figure_mime_type.py | 1 + tests/unit/test_metadata.py | 4 ++++ tests/unit/test_metric_range_alert.py | 5 +++++ tests/unit/test_metric_threshold_alert.py | 5 +++++ tests/unit/test_numpy_array_mime_type.py | 2 ++ tests/unit/test_numpy_array_serialization.py | 2 ++ tests/unit/test_pandas_dataframe_mimetype.py | 1 + tests/unit/test_pandas_dataframe_serialization.py | 1 + tests/unit/test_pickle_serialization.py | 3 ++- tests/unit/test_plotly_figure_mime_type.py | 2 +- tests/unit/test_pytorch_tensor_mime_type.py | 3 ++- tests/unit/test_pytorch_tensor_serialization.py | 1 + tests/unit/test_run.py | 5 +++++ tests/unit/test_run_init_folder.py | 2 +- tests/unit/test_run_init_metadata.py | 1 + tests/unit/test_run_init_tags.py | 1 + tests/unit/test_s3_storage.py | 2 ++ tests/unit/test_stats.py | 1 + tests/unit/test_suppress_errors.py | 3 +++ tests/unit/test_tag.py | 5 +++++ tests/unit/test_tenant.py | 3 +++ tests/unit/test_user_alert.py | 6 ++++++ 27 files changed, 71 insertions(+), 4 deletions(-) diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index ba0af789..83b55c22 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -9,6 +9,7 @@ from simvue.api.objects.folder import Folder @pytest.mark.api +@pytest.mark.online def test_artifact_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" @@ -50,6 +51,7 @@ def test_artifact_creation_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_artifact_creation_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" diff --git a/tests/unit/test_conversion_to_dataframe.py b/tests/unit/test_conversion_to_dataframe.py index a2cfc82b..7c35e187 100644 --- a/tests/unit/test_conversion_to_dataframe.py +++ b/tests/unit/test_conversion_to_dataframe.py @@ -1,5 +1,7 @@ +import pytest from simvue.converters import to_dataframe +@pytest.mark.local def test_run_conversion_to_dataframe(): """ Check that runs can be successfully converted to a dataframe diff --git a/tests/unit/test_event_alert.py b/tests/unit/test_event_alert.py index 3934dbe3..775e9dab 100644 --- a/tests/unit/test_event_alert.py +++ b/tests/unit/test_event_alert.py @@ -6,6 +6,7 @@ from simvue.api.objects import Alert, EventsAlert @pytest.mark.api +@pytest.mark.online def test_event_alert_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = EventsAlert.new( @@ -24,6 +25,7 @@ def test_event_alert_creation_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_event_alert_creation_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = EventsAlert.new( @@ -49,6 +51,7 @@ def test_event_alert_creation_offline() -> None: @pytest.mark.api +@pytest.mark.online def test_event_alert_modification_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = EventsAlert.new( @@ -70,6 +73,7 @@ def test_event_alert_modification_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_event_alert_modification_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = EventsAlert.new( @@ -94,6 +98,7 @@ def test_event_alert_modification_offline() -> None: @pytest.mark.api +@pytest.mark.online def test_event_alert_properties() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = EventsAlert.new( diff --git a/tests/unit/test_file_storage.py b/tests/unit/test_file_storage.py index db037cd5..91cb27e0 100644 --- a/tests/unit/test_file_storage.py +++ b/tests/unit/test_file_storage.py @@ -6,6 +6,7 @@ from simvue.api.objects import FileStorage @pytest.mark.api +@pytest.mark.online def test_create_file_storage_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _storage = FileStorage.new(name=_uuid, disable_check=False, tenant_usable=False, default=False) @@ -16,6 +17,7 @@ def test_create_file_storage_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_create_file_storage_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _storage = FileStorage.new(name=_uuid, disable_check=False, tenant_usable=False, default=False, offline=True) diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index b2dcb0ce..8124225a 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -8,6 +8,7 @@ from simvue.api.objects.folder import Folder @pytest.mark.api +@pytest.mark.online def test_folder_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _path = f"/simvue_unit_testing/objects/folder/{_uuid}" @@ -28,6 +29,7 @@ def test_folder_creation_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_folder_creation_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _path = f"/simvue_unit_testing/objects/folder/{_uuid}" @@ -48,6 +50,7 @@ def test_folder_creation_offline() -> None: @pytest.mark.api +@pytest.mark.online def test_folder_modification_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _path = f"/simvue_unit_testing/objects/folder/{_uuid}" @@ -70,6 +73,7 @@ def test_folder_modification_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_folder_modification_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _path = f"/simvue_unit_testing/objects/folder/{_uuid}" @@ -92,6 +96,7 @@ def test_folder_modification_offline() -> None: @pytest.mark.api +@pytest.mark.online def test_folder_get_properties() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _path = f"/simvue_unit_testing/objects/folder/{_uuid}" diff --git a/tests/unit/test_matplotlib_figure_mime_type.py b/tests/unit/test_matplotlib_figure_mime_type.py index c0deaa2a..63ce5d5c 100644 --- a/tests/unit/test_matplotlib_figure_mime_type.py +++ b/tests/unit/test_matplotlib_figure_mime_type.py @@ -8,6 +8,7 @@ plt = None @pytest.mark.skipif(not plt, reason="Matplotlib is not installed") +@pytest.mark.local def test_matplotlib_figure_mime_type(): """ Check that a matplotlib figure has the correct mime-type diff --git a/tests/unit/test_metadata.py b/tests/unit/test_metadata.py index 6b2ea5f4..4d01e14f 100644 --- a/tests/unit/test_metadata.py +++ b/tests/unit/test_metadata.py @@ -5,12 +5,14 @@ @pytest.mark.metadata +@pytest.mark.local def test_cargo_env() -> None: metadata = sv_meta._rust_env(pathlib.Path(__file__).parents[1].joinpath("example_data")) assert metadata["rust.environment.serde"] == "1.0.123" assert metadata["rust.project.name"] == "example_project" @pytest.mark.metadata +@pytest.mark.local @pytest.mark.parametrize( "backend", ("poetry", "uv", None) ) @@ -28,6 +30,7 @@ def test_python_env(backend: str | None) -> None: @pytest.mark.metadata +@pytest.mark.local def test_julia_env() -> None: metadata = sv_meta._julia_env(pathlib.Path(__file__).parents[1].joinpath("example_data")) assert metadata["julia.project.name"] == "Julia Demo Project" @@ -35,6 +38,7 @@ def test_julia_env() -> None: @pytest.mark.metadata +@pytest.mark.local def test_js_env() -> None: metadata = sv_meta._node_js_env(pathlib.Path(__file__).parents[1].joinpath("example_data")) assert metadata["javascript.project.name"] == "my-awesome-project" diff --git a/tests/unit/test_metric_range_alert.py b/tests/unit/test_metric_range_alert.py index f9dd1eb6..f0e8424b 100644 --- a/tests/unit/test_metric_range_alert.py +++ b/tests/unit/test_metric_range_alert.py @@ -7,6 +7,7 @@ from simvue.api.objects import MetricsRangeAlert, Alert @pytest.mark.api +@pytest.mark.online def test_metric_range_alert_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsRangeAlert.new( @@ -29,6 +30,7 @@ def test_metric_range_alert_creation_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_metric_range_alert_creation_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsRangeAlert.new( @@ -57,6 +59,7 @@ def test_metric_range_alert_creation_offline() -> None: @pytest.mark.api +@pytest.mark.online def test_metric_range_alert_modification_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsRangeAlert.new( @@ -82,6 +85,7 @@ def test_metric_range_alert_modification_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_metric_range_alert_modification_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsRangeAlert.new( @@ -107,6 +111,7 @@ def test_metric_range_alert_modification_offline() -> None: _new_alert.delete() @pytest.mark.api +@pytest.mark.online def test_metric_range_alert_properties() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsRangeAlert.new( diff --git a/tests/unit/test_metric_threshold_alert.py b/tests/unit/test_metric_threshold_alert.py index 85c8deca..0ed4f61a 100644 --- a/tests/unit/test_metric_threshold_alert.py +++ b/tests/unit/test_metric_threshold_alert.py @@ -8,6 +8,7 @@ @pytest.mark.api +@pytest.mark.online def test_metric_threshold_alert_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsThresholdAlert.new( @@ -29,6 +30,7 @@ def test_metric_threshold_alert_creation_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_metric_threshold_alert_creation_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsThresholdAlert.new( @@ -56,6 +58,7 @@ def test_metric_threshold_alert_creation_offline() -> None: @pytest.mark.api +@pytest.mark.online def test_metric_threshold_alert_modification_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsThresholdAlert.new( @@ -80,6 +83,7 @@ def test_metric_threshold_alert_modification_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_metric_threshold_alert_modification_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsThresholdAlert.new( @@ -104,6 +108,7 @@ def test_metric_threshold_alert_modification_offline() -> None: _new_alert.delete() @pytest.mark.api +@pytest.mark.online def test_metric_range_alert_properties() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsThresholdAlert.new( diff --git a/tests/unit/test_numpy_array_mime_type.py b/tests/unit/test_numpy_array_mime_type.py index 7523d30b..bcbd6e52 100644 --- a/tests/unit/test_numpy_array_mime_type.py +++ b/tests/unit/test_numpy_array_mime_type.py @@ -1,6 +1,8 @@ from simvue.serialization import serialize_object import numpy as np +import pytest +@pytest.mark.local def test_numpy_array_mime_type(): """ Check that the mimetype for numpy arrays is correct diff --git a/tests/unit/test_numpy_array_serialization.py b/tests/unit/test_numpy_array_serialization.py index 0f713cdd..ede5ccae 100644 --- a/tests/unit/test_numpy_array_serialization.py +++ b/tests/unit/test_numpy_array_serialization.py @@ -1,6 +1,8 @@ from simvue.serialization import serialize_object, deserialize_data import numpy as np +import pytest +@pytest.mark.local def test_numpy_array_serialization(): """ Check that a numpy array can be serialized then deserialized successfully diff --git a/tests/unit/test_pandas_dataframe_mimetype.py b/tests/unit/test_pandas_dataframe_mimetype.py index 57d5f775..ac645b48 100644 --- a/tests/unit/test_pandas_dataframe_mimetype.py +++ b/tests/unit/test_pandas_dataframe_mimetype.py @@ -7,6 +7,7 @@ except ImportError: pd = None +@pytest.mark.local @pytest.mark.skipif(not pd, reason="Pandas is not installed") def test_pandas_dataframe_mimetype(): """ diff --git a/tests/unit/test_pandas_dataframe_serialization.py b/tests/unit/test_pandas_dataframe_serialization.py index fde9262d..3dea46b0 100644 --- a/tests/unit/test_pandas_dataframe_serialization.py +++ b/tests/unit/test_pandas_dataframe_serialization.py @@ -7,6 +7,7 @@ except ImportError: pd = None +@pytest.mark.local @pytest.mark.skipif(not pd, reason="Pandas is not installed") def test_pandas_dataframe_serialization(): """ diff --git a/tests/unit/test_pickle_serialization.py b/tests/unit/test_pickle_serialization.py index 60833665..c3191c58 100644 --- a/tests/unit/test_pickle_serialization.py +++ b/tests/unit/test_pickle_serialization.py @@ -1,5 +1,6 @@ from simvue.serialization import deserialize_data, serialize_object - +import pytest +@pytest.mark.local def test_pickle_serialization(): """ Check that a dictionary can be serialized then deserialized successfully diff --git a/tests/unit/test_plotly_figure_mime_type.py b/tests/unit/test_plotly_figure_mime_type.py index 8cf8a479..f520f2dc 100644 --- a/tests/unit/test_plotly_figure_mime_type.py +++ b/tests/unit/test_plotly_figure_mime_type.py @@ -13,7 +13,7 @@ except ImportError: plotly = None - +@pytest.mark.local @pytest.mark.skipif(not plt, reason="Matplotlib is not installed") @pytest.mark.skipif(not plotly, reason="Plotly is not installed") def test_plotly_figure_mime_type(): diff --git a/tests/unit/test_pytorch_tensor_mime_type.py b/tests/unit/test_pytorch_tensor_mime_type.py index 011dd1b4..38aef668 100644 --- a/tests/unit/test_pytorch_tensor_mime_type.py +++ b/tests/unit/test_pytorch_tensor_mime_type.py @@ -6,7 +6,8 @@ import torch except ImportError: torch = None - + +@pytest.mark.local @pytest.mark.skipif(not torch, reason="Torch is not installed") def test_pytorch_tensor_mime_type(): """ diff --git a/tests/unit/test_pytorch_tensor_serialization.py b/tests/unit/test_pytorch_tensor_serialization.py index 07b46048..26022259 100644 --- a/tests/unit/test_pytorch_tensor_serialization.py +++ b/tests/unit/test_pytorch_tensor_serialization.py @@ -6,6 +6,7 @@ except ImportError: torch = None +@pytest.mark.local @pytest.mark.skipif(not torch, reason="Torch is not installed") def test_pytorch_tensor_serialization(): """ diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index cfcd7f23..dedd920b 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -8,6 +8,7 @@ from simvue.api.objects import Run, Folder @pytest.mark.api +@pytest.mark.online def test_run_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" @@ -21,6 +22,7 @@ def test_run_creation_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_run_creation_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" @@ -40,6 +42,7 @@ def test_run_creation_offline() -> None: @pytest.mark.api +@pytest.mark.online def test_run_modification_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" @@ -68,6 +71,7 @@ def test_run_modification_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_run_modification_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" @@ -102,6 +106,7 @@ def test_run_modification_offline() -> None: @pytest.mark.api +@pytest.mark.online def test_run_get_properties() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" diff --git a/tests/unit/test_run_init_folder.py b/tests/unit/test_run_init_folder.py index 286f6251..0ca2c383 100644 --- a/tests/unit/test_run_init_folder.py +++ b/tests/unit/test_run_init_folder.py @@ -1,7 +1,7 @@ from simvue import Run import pytest - +@pytest.mark.local def test_run_init_folder(): """ Check that run.init throws an exception if folder input is not specified correctly diff --git a/tests/unit/test_run_init_metadata.py b/tests/unit/test_run_init_metadata.py index 1973f35a..0a5f4bda 100644 --- a/tests/unit/test_run_init_metadata.py +++ b/tests/unit/test_run_init_metadata.py @@ -1,6 +1,7 @@ from simvue import Run import pytest +@pytest.mark.local def test_run_init_metadata(): """ Check that run.init throws an exception if tuples are passed into metadata dictionary diff --git a/tests/unit/test_run_init_tags.py b/tests/unit/test_run_init_tags.py index 6e247446..7697250a 100644 --- a/tests/unit/test_run_init_tags.py +++ b/tests/unit/test_run_init_tags.py @@ -1,6 +1,7 @@ from simvue import Run import pytest +@pytest.mark.local def test_run_init_tags(): """ Check that run.init throws an exception if tags are not a list diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index 47aad7ef..baac918d 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -7,6 +7,7 @@ from simvue.api.objects.storage.fetch import Storage @pytest.mark.api +@pytest.mark.online def test_create_s3_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _storage = S3Storage.new( @@ -34,6 +35,7 @@ def test_create_s3_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_create_s3_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _storage = S3Storage.new( diff --git a/tests/unit/test_stats.py b/tests/unit/test_stats.py index b11f1853..2e21871e 100644 --- a/tests/unit/test_stats.py +++ b/tests/unit/test_stats.py @@ -3,6 +3,7 @@ from simvue.api.objects import Stats @pytest.mark.api +@pytest.mark.local def test_stats() -> None: _statistics = Stats() assert isinstance(_statistics.runs.created, int) diff --git a/tests/unit/test_suppress_errors.py b/tests/unit/test_suppress_errors.py index 8a1b9f29..5a5f9e6d 100644 --- a/tests/unit/test_suppress_errors.py +++ b/tests/unit/test_suppress_errors.py @@ -2,6 +2,7 @@ import pytest import logging +@pytest.mark.local def test_suppress_errors_false(): """ Check that exceptions are thrown if suppress_errors disabled @@ -15,6 +16,7 @@ def test_suppress_errors_false(): ) assert "Input should be a valid boolean, unable to interpret input" in f"{e.value}" +@pytest.mark.local def test_suppress_errors_true(caplog): """ Check that no exceptions are thrown and messages are added to log if suppress_errors enabled @@ -30,6 +32,7 @@ def test_suppress_errors_true(caplog): assert "Input should be a valid boolean, unable to interpret input" in caplog.text +@pytest.mark.local def test_suppress_errors_default(caplog): """ Check that by default no exceptions are thrown and messages are added to log diff --git a/tests/unit/test_tag.py b/tests/unit/test_tag.py index 87b4a163..1927c87f 100644 --- a/tests/unit/test_tag.py +++ b/tests/unit/test_tag.py @@ -6,6 +6,7 @@ from simvue.api.objects.tag import Tag @pytest.mark.api +@pytest.mark.online def test_tag_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tag = Tag.new(name=f"test_tag_{_uuid}") @@ -17,6 +18,7 @@ def test_tag_creation_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_tag_creation_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tag = Tag.new(name=f"test_tag_{_uuid}", offline=True) @@ -34,6 +36,7 @@ def test_tag_creation_offline() -> None: assert not _local_data.get(_tag._label, {}).get(_tag.id) @pytest.mark.api +@pytest.mark.online def test_tag_modification_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tag = Tag.new(name=f"test_tag_{_uuid}") @@ -50,6 +53,7 @@ def test_tag_modification_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_tag_modification_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tag = Tag.new(name=f"test_tag_{_uuid}", offline=True) @@ -66,6 +70,7 @@ def test_tag_modification_offline() -> None: _tag.delete() @pytest.mark.api +@pytest.mark.online def test_tag_get_properties() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tag = Tag.new(name=f"test_tag_{_uuid}") diff --git a/tests/unit/test_tenant.py b/tests/unit/test_tenant.py index c3f64cf3..cd524942 100644 --- a/tests/unit/test_tenant.py +++ b/tests/unit/test_tenant.py @@ -8,6 +8,7 @@ @pytest.mark.api +@pytest.mark.online def test_create_tenant_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tenant = Tenant.new(name=_uuid) @@ -24,6 +25,7 @@ def test_create_tenant_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_create_tenant_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tenant = Tenant.new(name=_uuid, offline=True) @@ -36,6 +38,7 @@ def test_create_tenant_offline() -> None: @pytest.mark.api +@pytest.mark.online def test_tag_get_properties() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tenant = Tenant.new(name=_uuid) diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py index 26fb4c97..77c58780 100644 --- a/tests/unit/test_user_alert.py +++ b/tests/unit/test_user_alert.py @@ -8,6 +8,7 @@ from simvue.api.objects.folder import Folder @pytest.mark.api +@pytest.mark.online def test_user_alert_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = UserAlert.new( @@ -23,6 +24,7 @@ def test_user_alert_creation_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_user_alert_creation_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = UserAlert.new( @@ -43,6 +45,7 @@ def test_user_alert_creation_offline() -> None: @pytest.mark.api +@pytest.mark.online def test_user_alert_modification_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = UserAlert.new( @@ -61,6 +64,7 @@ def test_user_alert_modification_online() -> None: @pytest.mark.api +@pytest.mark.offline def test_user_alert_modification_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = UserAlert.new( @@ -82,6 +86,7 @@ def test_user_alert_modification_offline() -> None: _new_alert.delete() @pytest.mark.api +@pytest.mark.online def test_user_alert_properties() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = UserAlert.new( @@ -105,6 +110,7 @@ def test_user_alert_properties() -> None: @pytest.mark.api +@pytest.mark.online def test_user_alert_status() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = UserAlert.new( From 954d787117c0a31d4abc192826369602eb1e7421 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 6 Jan 2025 13:24:43 +0000 Subject: [PATCH 029/163] Changed _tag to _tenant --- tests/unit/test_tenant.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/unit/test_tenant.py b/tests/unit/test_tenant.py index cd524942..1cb6b2f2 100644 --- a/tests/unit/test_tenant.py +++ b/tests/unit/test_tenant.py @@ -49,13 +49,13 @@ def test_tag_get_properties() -> None: return _failed = [] - for member in _tag._properties: + for member in _tenant._properties: try: - getattr(_tag, member) + getattr(_tenant, member) except Exception as e: _failed.append((member, f"{e}")) with contextlib.suppress(Exception): - _tag.delete() + _tenant.delete() if _failed: raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) From 75b50e3609fafd0b6f1a98c8e8ab320db3143df9 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 6 Jan 2025 17:05:51 +0000 Subject: [PATCH 030/163] Added write_only decorator to alert classes and fixed test --- simvue/api/objects/alert/base.py | 8 +++++++- simvue/api/objects/base.py | 8 ++++---- tests/unit/test_metric_range_alert.py | 2 +- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 5bd245dc..cc14e4e5 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -9,7 +9,7 @@ import http import pydantic import typing -from simvue.api.objects.base import SimvueObject, staging_check +from simvue.api.objects.base import SimvueObject, staging_check, write_only from simvue.api.request import get as sv_get, get_json_from_response from simvue.models import NAME_REGEX @@ -46,6 +46,7 @@ def name(self) -> str: return self._get_attribute("name") @name.setter + @write_only @pydantic.validate_call def name( self, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] @@ -60,6 +61,7 @@ def description(self) -> str | None: return self._get_attribute("description") @description.setter + @write_only @pydantic.validate_call def description(self, description: str | None) -> None: """Set alert description""" @@ -72,6 +74,7 @@ def tags(self) -> list[str]: return self._get_attribute("tags") @tags.setter + @write_only @pydantic.validate_call def tags(self, tags: list[str]) -> None: """Set alert tags""" @@ -84,6 +87,7 @@ def notification(self) -> typing.Literal["none", "email"]: return self._get_attribute("notification") @notification.setter + @write_only @pydantic.validate_call def notification(self, notification: typing.Literal["none", "email"]) -> None: """Configure alert notification setting""" @@ -101,6 +105,7 @@ def enabled(self) -> bool: return self._get_attribute("enabled") @enabled.setter + @write_only @pydantic.validate_call def enabled(self, enabled: str) -> None: """Enable/disable alert""" @@ -113,6 +118,7 @@ def abort(self) -> bool: return self._get_attribute("abort") @abort.setter + @write_only @pydantic.validate_call def abort(self, abort: bool) -> None: """Configure alert to trigger aborts""" diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 7405be09..55875e0e 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -150,10 +150,10 @@ def __init__( "User-Agent": f"Simvue Python client {__version__}", } - self._staging: dict[str, typing.Any] = {} + if not self._identifier.startswith("offline_"): + self._cached: dict[str, typing.Any] = self._get() - if not self._identifier.startswith("offline_") and self._read_only: - self._staging = self._get() + self._staging: dict[str, typing.Any] = {} # Recover any locally staged changes if not read-only self._staging |= {} if _read_only else self._get_local_staged() @@ -209,7 +209,7 @@ def _get_attribute( if (_attribute_is_property and _state_is_read_only) or _offline_state: try: - return self._staging[attribute] + return self._cached[attribute] except KeyError as e: raise AttributeError( f"Could not retrieve attribute '{attribute}' " diff --git a/tests/unit/test_metric_range_alert.py b/tests/unit/test_metric_range_alert.py index f0e8424b..7f724647 100644 --- a/tests/unit/test_metric_range_alert.py +++ b/tests/unit/test_metric_range_alert.py @@ -75,7 +75,7 @@ def test_metric_range_alert_modification_online() -> None: ) _alert.commit() time.sleep(1) - _new_alert = Alert(_alert.id) + _new_alert = Alert(_alert.id, _read_only=False) assert isinstance(_new_alert, MetricsRangeAlert) _new_alert.description = "updated!" assert _new_alert.description != "updated!" From 4297b5e41227a8557149c8558f448f545452f504 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 7 Jan 2025 09:23:45 +0000 Subject: [PATCH 031/163] Reset changes to add cached instead of staged --- simvue/api/objects/base.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 55875e0e..7405be09 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -150,11 +150,11 @@ def __init__( "User-Agent": f"Simvue Python client {__version__}", } - if not self._identifier.startswith("offline_"): - self._cached: dict[str, typing.Any] = self._get() - self._staging: dict[str, typing.Any] = {} + if not self._identifier.startswith("offline_") and self._read_only: + self._staging = self._get() + # Recover any locally staged changes if not read-only self._staging |= {} if _read_only else self._get_local_staged() @@ -209,7 +209,7 @@ def _get_attribute( if (_attribute_is_property and _state_is_read_only) or _offline_state: try: - return self._cached[attribute] + return self._staging[attribute] except KeyError as e: raise AttributeError( f"Could not retrieve attribute '{attribute}' " From a35ce36e6d002854e67bd079b294e475ce2c681b Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 7 Jan 2025 13:50:45 +0000 Subject: [PATCH 032/163] Fix folder modification test --- simvue/api/objects/folder.py | 8 ++++++-- tests/unit/test_folder.py | 3 +-- tests/unit/test_metric_range_alert.py | 3 ++- tests/unit/test_run.py | 3 +++ 4 files changed, 12 insertions(+), 5 deletions(-) diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index 7d974843..a59990d0 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -144,8 +144,12 @@ def ttl(self, time_seconds: int) -> None: """Update the retention period for this folder""" self._staging["ttl"] = time_seconds - def delete( - self, *, recursive: bool, delete_runs: bool, runs_only: bool + def delete( # should params to this be optional and default to False? + self, + *, + recursive: typing.Optional[bool] = False, + delete_runs: typing.Optional[bool] = False, + runs_only: typing.Optional[bool] = False, ) -> dict[str, typing.Any]: return super().delete( recursive=recursive, runs=delete_runs, runs_only=runs_only diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index 8124225a..faa326c2 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -60,15 +60,14 @@ def test_folder_modification_online() -> None: _folder.commit() time.sleep(1) _folder_new = Folder(identifier=_folder.id) + _folder_new.read_only(False) _folder_new.tags = _tags _folder_new.description = _description - _folder_new.visibility.tenant = True _folder_new.commit() assert _folder_new.tags == _tags assert _folder.tags == _tags assert _folder_new.description == _description assert _folder.description == _description - assert _folder_new.visibility.tenant _folder.delete() diff --git a/tests/unit/test_metric_range_alert.py b/tests/unit/test_metric_range_alert.py index 7f724647..20c5fc04 100644 --- a/tests/unit/test_metric_range_alert.py +++ b/tests/unit/test_metric_range_alert.py @@ -75,8 +75,9 @@ def test_metric_range_alert_modification_online() -> None: ) _alert.commit() time.sleep(1) - _new_alert = Alert(_alert.id, _read_only=False) + _new_alert = Alert(_alert.id) assert isinstance(_new_alert, MetricsRangeAlert) + _new_alert.read_only(False) _new_alert.description = "updated!" assert _new_alert.description != "updated!" _new_alert.commit() diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index dedd920b..685c9b40 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -54,6 +54,7 @@ def test_run_modification_online() -> None: time.sleep(1) _now = datetime.datetime.now() _new_run = Run(identifier=_run.id) + _new_run.read_only(False) _new_run.name = "simvue_test_run" _new_run.description = "Simvue test run" _new_run.created = _now @@ -61,6 +62,8 @@ def test_run_modification_online() -> None: _new_run.ttl = 120 assert _new_run.ttl != 120 _new_run.commit() + print(_new_run.staged) + time.sleep(1) assert _new_run.ttl == 120 assert _new_run.description == "Simvue test run" assert _new_run.created == _now From 10da62369271ca27033e036dc3a968839ddacc4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 7 Jan 2025 14:50:37 +0000 Subject: [PATCH 033/163] Fix dataframe conversion --- simvue/converters.py | 77 ++++++++++------------ tests/unit/test_conversion_to_dataframe.py | 25 ++++--- 2 files changed, 50 insertions(+), 52 deletions(-) diff --git a/simvue/converters.py b/simvue/converters.py index ff1ca7ce..705392ac 100644 --- a/simvue/converters.py +++ b/simvue/converters.py @@ -44,14 +44,12 @@ def aggregated_metrics_to_dataframe( """ _all_steps: list[float] = sorted( - set( - ( - d[xaxis] - for sublist in request_response_data.values() - for d in sublist - if xaxis in d - ) - ) + { + d[xaxis] + for sublist in request_response_data.values() + for d in sublist + if xaxis in d + } ) # Get the keys from the aggregate which are not the xaxis label @@ -61,7 +59,7 @@ def aggregated_metrics_to_dataframe( _value_types.remove(xaxis) result_dict: dict[str, dict[tuple[float, str], typing.Optional[float]]] = { - metric_name: {} for metric_name in request_response_data.keys() + metric_name: {} for metric_name in request_response_data } for metric_name, metrics in request_response_data.items(): @@ -123,31 +121,19 @@ def parse_run_set_metrics( if an unrecognised parse format is specified """ if not request_response_data: - if parse_to == "dataframe": - return pandas.DataFrame({}) - else: - return {} - + return pandas.DataFrame({}) if parse_to == "dataframe" else {} _all_steps: list[float] = sorted( - set( - ( - d[xaxis] - for run_data in request_response_data.values() - for sublist in run_data.values() - for d in sublist - if xaxis in d - ) - ) + { + d[xaxis] + for run_data in request_response_data.values() + for sublist in run_data.values() + for d in sublist + if xaxis in d + } ) _all_metrics: list[str] = sorted( - set( - ( - key - for run_data in request_response_data.values() - for key in run_data.keys() - ) - ) + {key for run_data in request_response_data.values() for key in run_data.keys()} ) # Get the keys from the aggregate which are not the xaxis label @@ -178,26 +164,34 @@ def parse_run_set_metrics( result_dict[metric_name][step, run_label] = next_item.get("value") if parse_to == "dataframe": - _data_frame = pandas.DataFrame( + return pandas.DataFrame( result_dict, index=pandas.MultiIndex.from_product( [_all_steps, run_labels], names=(xaxis, "run") ), ) - return _data_frame elif parse_to == "dict": return result_dict else: raise ValueError(f"Unrecognised parse format '{parse_to}'") -def to_dataframe(data): +def to_dataframe(data) -> pandas.DataFrame: """ Convert runs to dataframe """ metadata = [] system_columns = [] + columns = { + "name": [], + "status": [], + "folder": [], + "created": [], + "started": [], + "ended": [], + } + for run in data: for item in run.get("metadata", []): if item not in metadata: @@ -209,18 +203,17 @@ def to_dataframe(data): for sub_item in value.keys() if (col_name := f"system.{item}.{sub_item}") not in system_columns ] - else: - if f"system.{item}" not in system_columns: - system_columns.append(f"system.{item}") - - columns = {f"metadata.{column}": [] for column in metadata} - columns |= {column: [] for column in system_columns} + elif f"system.{item}" not in system_columns: + system_columns.append(f"system.{item}") + columns |= {f"metadata.{column}": [] for column in metadata} | { + column: [] for column in system_columns + } for run in data: - run_info = flatdict.FlatDict(run, delimiter=".").as_dict() + run_info = flatdict.FlatDict(run, delimiter=".") - for column in columns: - columns[column].append(run_info.get(column)) + for column, value_ in columns.items(): + value_.append(run_info.get(column)) return pandas.DataFrame(data=columns) diff --git a/tests/unit/test_conversion_to_dataframe.py b/tests/unit/test_conversion_to_dataframe.py index a2cfc82b..df419ef7 100644 --- a/tests/unit/test_conversion_to_dataframe.py +++ b/tests/unit/test_conversion_to_dataframe.py @@ -1,3 +1,4 @@ +from numpy import exp from simvue.converters import to_dataframe def test_run_conversion_to_dataframe(): @@ -21,16 +22,20 @@ def test_run_conversion_to_dataframe(): runs_df = to_dataframe(runs) - assert(runs_df.columns.to_list() == ['name', - 'status', - 'folder', - 'created', - 'started', - 'ended', - 'metadata.a1', - 'metadata.b1', - 'metadata.a2', - 'metadata.b2']) + expected_columns = [ + 'name', + 'status', + 'folder', + 'created', + 'started', + 'ended', + 'metadata.a1', + 'metadata.b1', + 'metadata.a2', + 'metadata.b2' + ] + + assert sorted(runs_df.columns.to_list()) == sorted(expected_columns) data = runs_df.to_dict('records') for i in range(len(runs)): From a5432dee32d3beddb0aebcae0420552ffcca2918 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 7 Jan 2025 15:31:57 +0000 Subject: [PATCH 034/163] Removed erronous get_status for alerts --- simvue/api/objects/alert/base.py | 15 --------------- tests/unit/test_user_alert.py | 2 +- 2 files changed, 1 insertion(+), 16 deletions(-) diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index cc14e4e5..93af1360 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -6,11 +6,9 @@ """ -import http import pydantic import typing from simvue.api.objects.base import SimvueObject, staging_check, write_only -from simvue.api.request import get as sv_get, get_json_from_response from simvue.models import NAME_REGEX @@ -124,19 +122,6 @@ def abort(self, abort: bool) -> None: """Configure alert to trigger aborts""" self._staging["abort"] = abort - def get_status(self, run_id: str) -> typing.Literal["ok", "critical", "no_data"]: - _response = sv_get( - url=self.url / "status", headers=self._headers, params={"run": run_id} - ) - - _json_response = get_json_from_response( - expected_status=[http.HTTPStatus.OK], - response=_response, - scenario=f"Retrieving status for alert '{self._identifier}'", - ) - - return _json_response["status"] - @pydantic.validate_call def set_status(self, run_id: str, status: typing.Literal["ok", "critical"]) -> None: raise AttributeError( diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py index 77c58780..14d9e1cd 100644 --- a/tests/unit/test_user_alert.py +++ b/tests/unit/test_user_alert.py @@ -56,6 +56,7 @@ def test_user_alert_modification_online() -> None: time.sleep(1) _new_alert = Alert(_alert.id) assert isinstance(_new_alert, UserAlert) + _new_alert.read_only(False) _new_alert.description = "updated!" assert _new_alert.description != "updated!" _new_alert.commit() @@ -125,7 +126,6 @@ def test_user_alert_status() -> None: _run.commit() _alert.set_status(_run.id, "critical") time.sleep(1) - assert _alert.get_status(_run.id) == "critical" _run.delete() _folder.delete(recursive=True, runs_only=False, delete_runs=True) _alert.delete() From 17fa181e1e6c0d05eddd168711651fe2882830af Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 7 Jan 2025 17:02:13 +0000 Subject: [PATCH 035/163] Updated validation error formatter --- simvue/api/objects/administrator/tenant.py | 4 ++-- simvue/utilities.py | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py index ca803a5c..46b90b90 100644 --- a/simvue/api/objects/administrator/tenant.py +++ b/simvue/api/objects/administrator/tenant.py @@ -23,12 +23,12 @@ def name(self) -> str: @property @staging_check def enabled(self) -> bool: - """Retrieve if alert is enabled""" + """Retrieve if tenant is enabled""" return self._get_attribute("enabled") @enabled.setter @write_only @pydantic.validate_call def enabled(self, enabled: str) -> None: - """Enable/disable alert""" + """Enable/disable tenant""" self._staging["enabled"] = enabled diff --git a/simvue/utilities.py b/simvue/utilities.py index adc1438c..2ef12bb5 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -110,7 +110,10 @@ def parse_validation_response( input_arg = body for loc in location: try: - input_arg = input_arg[loc] + if obj_type == "missing": + input_arg = None + else: + input_arg = input_arg[loc] except TypeError: break information.append(input_arg) From af9226e1da29d436fee6fe58e3055464040fd170 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 8 Jan 2025 08:18:49 +0000 Subject: [PATCH 036/163] Remove deprecated visibility for folders, and updated storage class --- simvue/api/objects/folder.py | 3 +-- simvue/api/objects/storage/base.py | 42 ++++++++++++------------------ simvue/api/objects/storage/file.py | 6 +++-- tests/unit/test_file_storage.py | 5 ++-- tests/unit/test_folder.py | 8 +----- 5 files changed, 25 insertions(+), 39 deletions(-) diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index 7d974843..7708e299 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -15,7 +15,7 @@ from simvue.exception import ObjectNotFoundError -from .base import SimvueObject, Visibility, staging_check, write_only +from .base import SimvueObject, staging_check, write_only from simvue.models import FOLDER_REGEX @@ -45,7 +45,6 @@ def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: **kwargs : dict any additional arguments to be passed to the object initialiser """ - self.visibility = Visibility(self) super().__init__(identifier, **kwargs) @classmethod diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py index 872f2131..26939619 100644 --- a/simvue/api/objects/storage/base.py +++ b/simvue/api/objects/storage/base.py @@ -16,15 +16,11 @@ def __init__( self._label = "storage" self._endpoint = self._label super().__init__(identifier, _read_only=_read_only, **kwargs) - self.status = Status(self) @classmethod def new(cls, **kwargs): pass - def get_status(self) -> dict[str, typing.Any]: - return {} if self._offline else self._get_attribute("status") - @property @staging_check def name(self) -> str: @@ -49,20 +45,20 @@ def type(self) -> str: @staging_check def default(self) -> bool: """Retrieve if this is the default storage for the user""" - return self._get_attribute("default") + return self._get_attribute("is_default") @default.setter @write_only @pydantic.validate_call def default(self, is_default: bool) -> None: """Set this storage to be the default""" - self._staging["default"] = is_default + self._staging["is_default"] = is_default @property @staging_check def tenant_usable(self) -> bool: """Retrieve if this is usable by the current user tenant""" - return self._get_attribute("tenant_usable") + return self._get_attribute("is_tenant_usable") @tenant_usable.setter @write_only @@ -71,6 +67,19 @@ def tenant_usable(self, is_tenant_usable: bool) -> None: """Set this storage to be usable by the current user tenant""" self._staging["tenant_usable"] = is_tenant_usable + @property + @staging_check + def enabled(self) -> bool: + """Retrieve if this is enabled""" + return self._get_attribute("is_enabled") + + @enabled.setter + @write_only + @pydantic.validate_call + def enabled(self, is_enabled: bool) -> None: + """Set this storage to be usable by the current user tenant""" + self._staging["is_enabled"] = is_enabled + @property def usage(self) -> int | None: return None if self._offline else self._get_attribute("usage") @@ -78,22 +87,3 @@ def usage(self) -> int | None: @property def user(self) -> str | None: return None if self._offline else self._get_attribute("user") - - -class Status: - def __init__(self, storage: StorageBase) -> None: - self._sv_obj = storage - - @property - def status(self) -> str: - try: - return self._sv_obj.get_status()["status"] - except KeyError as e: - raise RuntimeError("Expected key 'status' in status retrieval") from e - - @property - def timestamp(self) -> str: - try: - return self._sv_obj.get_status()["timestamp"] - except KeyError as e: - raise RuntimeError("Expected key 'timestamp' in status retrieval") from e diff --git a/simvue/api/objects/storage/file.py b/simvue/api/objects/storage/file.py index fc1989bd..9fe0c64e 100644 --- a/simvue/api/objects/storage/file.py +++ b/simvue/api/objects/storage/file.py @@ -14,6 +14,7 @@ def new( name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], disable_check: bool, tenant_usable: bool, + enabled: bool, default: bool, offline: bool = False, ) -> typing.Self: @@ -22,8 +23,9 @@ def new( name=name, type="File", disable_check=disable_check, - tenant_useable=tenant_usable, - default=default, + is_tenant_useable=tenant_usable, + is_default=default, + is_enabled=enabled, _read_only=False, ) _storage.offline_mode(offline) diff --git a/tests/unit/test_file_storage.py b/tests/unit/test_file_storage.py index db037cd5..0db05784 100644 --- a/tests/unit/test_file_storage.py +++ b/tests/unit/test_file_storage.py @@ -8,9 +8,10 @@ @pytest.mark.api def test_create_file_storage_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] - _storage = FileStorage.new(name=_uuid, disable_check=False, tenant_usable=False, default=False) + _storage = FileStorage.new( + name=_uuid, disable_check=False, tenant_usable=False, default=False, enabled=True) _storage.commit() - assert _storage.status.status + assert _storage.enabled assert _storage.name == _uuid _storage.delete() diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index b2dcb0ce..2bf7f341 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -15,10 +15,7 @@ def test_folder_creation_online() -> None: _folder.commit() assert _folder.id assert _folder.path == _path - assert not _folder.visibility.public - assert not _folder.visibility.tenant - assert not _folder.visibility.users - _folders = Folder.get(count=10) + _folders = dict(Folder.get(count=10)) assert _folders assert _folders[_folder.id] assert _folders[_folder.id]._read_only @@ -36,9 +33,6 @@ def test_folder_creation_offline() -> None: assert _folder.id assert _folder.path == _path - with pytest.raises(AttributeError): - _folder.visibility.public - _folder.delete() with _folder._local_staging_file.open() as in_f: From 158a5363df97a414956567efd8b6d41648b587b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 8 Jan 2025 08:20:42 +0000 Subject: [PATCH 037/163] Fix online folder test --- tests/unit/test_folder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index 2bf7f341..78b8f1e8 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -21,7 +21,7 @@ def test_folder_creation_online() -> None: assert _folders[_folder.id]._read_only with pytest.raises(AssertionError): _folders[_folder.id].name = "hello" - _folder.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) @pytest.mark.api From f852b4ac2eead9b65f33124bff403bb1fbb6e9fd Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 8 Jan 2025 09:45:45 +0000 Subject: [PATCH 038/163] Added new arguments for tenant --- simvue/api/objects/administrator/tenant.py | 77 ++++++++++++++++++++-- 1 file changed, 70 insertions(+), 7 deletions(-) diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py index 46b90b90..8ea8389b 100644 --- a/simvue/api/objects/administrator/tenant.py +++ b/simvue/api/objects/administrator/tenant.py @@ -10,8 +10,25 @@ class Tenant(SimvueObject): @classmethod @pydantic.validate_call - def new(cls, *, name: str, enabled: bool = True, offline: bool = False) -> Self: - _tenant = Tenant(name=name, enabled=enabled, offline=offline, _read_only=False) + def new( + cls, + *, + name: str, + is_enabled: bool = True, + max_request_rate: int = 0, + max_runs: int = 0, + max_data_volume: int = 0, + offline: bool = False, + ) -> Self: + _tenant = Tenant( + name=name, + is_enabled=is_enabled, + max_request_rate=max_request_rate, + max_runs=max_runs, + max_data_volume=max_data_volume, + offline=offline, + _read_only=False, + ) _tenant.offline_mode(offline) return _tenant # type: ignore @@ -20,15 +37,61 @@ def name(self) -> str: """Retrieve the name of the tenant""" return self._get_attribute("name") + @name.setter + @write_only + @pydantic.validate_call + def name(self, name: str) -> None: + """Change name of tenant""" + self._staging["name"] = name + @property @staging_check - def enabled(self) -> bool: + def is_enabled(self) -> bool: """Retrieve if tenant is enabled""" - return self._get_attribute("enabled") + return self._get_attribute("is_enabled") - @enabled.setter + @is_enabled.setter @write_only @pydantic.validate_call - def enabled(self, enabled: str) -> None: + def is_enabled(self, is_enabled: str) -> None: """Enable/disable tenant""" - self._staging["enabled"] = enabled + self._staging["is_enabled"] = is_enabled + + @property + @staging_check + def max_request_rate(self) -> bool: + """Retrieve the tenant's maximum request rate""" + return self._get_attribute("max_request_rate") + + @max_request_rate.setter + @write_only + @pydantic.validate_call + def max_request_rate(self, max_request_rate: str) -> None: + """Update tenant's maximum request rate""" + self._staging["max_request_rate"] = max_request_rate + + @property + @staging_check + def max_runs(self) -> bool: + """Retrieve the tenant's maximum runs""" + return self._get_attribute("max_runs") + + @max_runs.setter + @write_only + @pydantic.validate_call + def max_runs(self, max_runs: str) -> None: + """Update tenant's maximum runs""" + self._staging["max_runs"] = max_runs + + @property + @staging_check + def max_data_volume(self) -> bool: + """Retrieve the tenant's maximum data volume""" + return self._get_attribute("max_data_volume") + + @max_data_volume.setter + @write_only + @pydantic.validate_call + def max_data_volume(self, max_data_volume: str) -> None: + """Update tenant's maximum data volume""" + self._staging["max_data_volume"] = max_data_volume From b36103820d2400f87385eb3c9b90fe947bd2f3d6 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 8 Jan 2025 09:50:08 +0000 Subject: [PATCH 039/163] Fixed typing --- simvue/api/objects/administrator/tenant.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py index 8ea8389b..a4173dc1 100644 --- a/simvue/api/objects/administrator/tenant.py +++ b/simvue/api/objects/administrator/tenant.py @@ -53,45 +53,45 @@ def is_enabled(self) -> bool: @is_enabled.setter @write_only @pydantic.validate_call - def is_enabled(self, is_enabled: str) -> None: + def is_enabled(self, is_enabled: bool) -> None: """Enable/disable tenant""" self._staging["is_enabled"] = is_enabled @property @staging_check - def max_request_rate(self) -> bool: + def max_request_rate(self) -> int: """Retrieve the tenant's maximum request rate""" return self._get_attribute("max_request_rate") @max_request_rate.setter @write_only @pydantic.validate_call - def max_request_rate(self, max_request_rate: str) -> None: + def max_request_rate(self, max_request_rate: int) -> None: """Update tenant's maximum request rate""" self._staging["max_request_rate"] = max_request_rate @property @staging_check - def max_runs(self) -> bool: + def max_runs(self) -> int: """Retrieve the tenant's maximum runs""" return self._get_attribute("max_runs") @max_runs.setter @write_only @pydantic.validate_call - def max_runs(self, max_runs: str) -> None: + def max_runs(self, max_runs: int) -> None: """Update tenant's maximum runs""" self._staging["max_runs"] = max_runs @property @staging_check - def max_data_volume(self) -> bool: + def max_data_volume(self) -> int: """Retrieve the tenant's maximum data volume""" return self._get_attribute("max_data_volume") @max_data_volume.setter @write_only @pydantic.validate_call - def max_data_volume(self, max_data_volume: str) -> None: + def max_data_volume(self, max_data_volume: int) -> None: """Update tenant's maximum data volume""" self._staging["max_data_volume"] = max_data_volume From d81dff80ead4c9540d6c1007e27b0b828cd69a16 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 8 Jan 2025 10:20:23 +0000 Subject: [PATCH 040/163] Addressed MR comments --- pyproject.toml | 4 +++- simvue/api/objects/folder.py | 6 +++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 7e0658c9..c9510c44 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -107,7 +107,9 @@ markers = [ "unix: tests for UNIX systems only", "metadata: tests of metadata gathering functions", "proxies: tests for remote/offline Simvue proxies", - "offline: tests for offline functionality" + "online: tests for online functionality", + "offline: tests for offline functionality", + "local: tests of functionality which do not involve a server or writing to an offline cache file" ] [tool.interrogate] diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index 932dd1dd..2e16a471 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -146,9 +146,9 @@ def ttl(self, time_seconds: int) -> None: def delete( # should params to this be optional and default to False? self, *, - recursive: typing.Optional[bool] = False, - delete_runs: typing.Optional[bool] = False, - runs_only: typing.Optional[bool] = False, + recursive: bool | None = False, + delete_runs: bool | None = False, + runs_only: bool | None = False, ) -> dict[str, typing.Any]: return super().delete( recursive=recursive, runs=delete_runs, runs_only=runs_only From d99e840e01b8ed2757c89f7c3a7a46ac75e92f41 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 8 Jan 2025 10:48:23 +0000 Subject: [PATCH 041/163] Changed is_enabled to enabled --- simvue/api/objects/administrator/tenant.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py index a4173dc1..7cba2352 100644 --- a/simvue/api/objects/administrator/tenant.py +++ b/simvue/api/objects/administrator/tenant.py @@ -46,16 +46,16 @@ def name(self, name: str) -> None: @property @staging_check - def is_enabled(self) -> bool: + def enabled(self) -> bool: """Retrieve if tenant is enabled""" return self._get_attribute("is_enabled") - @is_enabled.setter + @enabled.setter @write_only @pydantic.validate_call - def is_enabled(self, is_enabled: bool) -> None: + def enabled(self, enabled: bool) -> None: """Enable/disable tenant""" - self._staging["is_enabled"] = is_enabled + self._staging["is_enabled"] = enabled @property @staging_check From e82d0a09283e7c7ccd28b445d8f249e1fde2d111 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 8 Jan 2025 10:48:53 +0000 Subject: [PATCH 042/163] Fix use of Folder.delete in tests and ensure | used for typing --- simvue/api/objects/alert/base.py | 12 +++--------- simvue/api/objects/base.py | 8 ++++---- simvue/api/objects/run.py | 2 +- tests/unit/test_folder.py | 5 ++--- tests/unit/test_metric_range_alert.py | 1 + tests/unit/test_run.py | 7 ++++--- 6 files changed, 15 insertions(+), 20 deletions(-) diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 5bd245dc..7d0d1a90 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -67,15 +67,9 @@ def description(self, description: str | None) -> None: @property @staging_check - def tags(self) -> list[str]: - """Retrieve alert tags""" - return self._get_attribute("tags") - - @tags.setter - @pydantic.validate_call - def tags(self, tags: list[str]) -> None: - """Set alert tags""" - self._staging["tags"] = tags + def auto_tags(self) -> list[str]: + """Retrieve automatically assigned tags from runs""" + return self._get_attribute("auto_tags") @property @staging_check diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 7405be09..69e6d8d7 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -116,13 +116,13 @@ def tenant(self, tenant: bool) -> None: class SimvueObject(abc.ABC): def __init__( - self, identifier: typing.Optional[str] = None, _read_only: bool = True, **kwargs + self, identifier: str | None = None, _read_only: bool = True, **kwargs ) -> None: self._logger = logging.getLogger(f"simvue.{self.__class__.__name__}") self._label: str = getattr(self, "_label", self.__class__.__name__.lower()) self._read_only: bool = _read_only self._endpoint: str = getattr(self, "_endpoint", f"{self._label}s") - self._identifier: typing.Optional[str] = ( + self._identifier: str | None = ( identifier if identifier is not None else f"offline_{uuid.uuid1()}" ) self._properties = [ @@ -355,7 +355,7 @@ def commit(self) -> None: self._clear_staging() @property - def id(self) -> typing.Optional[str]: + def id(self) -> str | None: return self._identifier @property @@ -363,7 +363,7 @@ def _base_url(self) -> URL: return URL(self._user_config.server.url) / self._endpoint @property - def url(self) -> typing.Optional[URL]: + def url(self) -> URL | None: return None if self._identifier is None else self._base_url / self._identifier def _post(self, **kwargs) -> dict[str, typing.Any]: diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index afbb02b0..0b791ec7 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -106,7 +106,7 @@ def ttl(self) -> int: @ttl.setter @write_only @pydantic.validate_call - def ttl(self, time_seconds: int | None) -> None: + def ttl(self, time_seconds: pydantic.NonNegativeInt) -> None: """Update the retention period for this run""" self._staging["ttl"] = time_seconds diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index 78b8f1e8..2f9c6009 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -51,16 +51,15 @@ def test_folder_modification_online() -> None: _folder.commit() time.sleep(1) _folder_new = Folder(identifier=_folder.id) + _folder_new.read_only(False) _folder_new.tags = _tags _folder_new.description = _description - _folder_new.visibility.tenant = True _folder_new.commit() assert _folder_new.tags == _tags assert _folder.tags == _tags assert _folder_new.description == _description assert _folder.description == _description - assert _folder_new.visibility.tenant - _folder.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) @pytest.mark.api diff --git a/tests/unit/test_metric_range_alert.py b/tests/unit/test_metric_range_alert.py index f9dd1eb6..639e0436 100644 --- a/tests/unit/test_metric_range_alert.py +++ b/tests/unit/test_metric_range_alert.py @@ -73,6 +73,7 @@ def test_metric_range_alert_modification_online() -> None: _alert.commit() time.sleep(1) _new_alert = Alert(_alert.id) + _new_alert.read_only(False) assert isinstance(_new_alert, MetricsRangeAlert) _new_alert.description = "updated!" assert _new_alert.description != "updated!" diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index cfcd7f23..6edb6855 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -17,7 +17,7 @@ def test_run_creation_online() -> None: _run.commit() assert _run.folder == _folder_name _run.delete() - _folder.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) @pytest.mark.api @@ -30,7 +30,7 @@ def test_run_creation_offline() -> None: _run.commit() assert _run.folder == _folder_name _run.delete() - _folder.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) with _run._local_staging_file.open() as in_f: _local_data = json.load(in_f) @@ -51,6 +51,7 @@ def test_run_modification_online() -> None: time.sleep(1) _now = datetime.datetime.now() _new_run = Run(identifier=_run.id) + _new_run.read_only(False) _new_run.name = "simvue_test_run" _new_run.description = "Simvue test run" _new_run.created = _now @@ -64,7 +65,7 @@ def test_run_modification_online() -> None: assert sorted(_new_run.tags) == sorted(["simvue", "test", "tag"]) assert _new_run.name == "simvue_test_run" _run.delete() - _folder.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) @pytest.mark.api From 3ca0f14b77a7749ef0db5c3a0b81952f6fed865d Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 8 Jan 2025 10:51:17 +0000 Subject: [PATCH 043/163] Changed is_enabled to enabled --- simvue/api/objects/administrator/tenant.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py index 7cba2352..85620d5a 100644 --- a/simvue/api/objects/administrator/tenant.py +++ b/simvue/api/objects/administrator/tenant.py @@ -14,7 +14,7 @@ def new( cls, *, name: str, - is_enabled: bool = True, + enabled: bool = True, max_request_rate: int = 0, max_runs: int = 0, max_data_volume: int = 0, @@ -22,7 +22,7 @@ def new( ) -> Self: _tenant = Tenant( name=name, - is_enabled=is_enabled, + enabled=enabled, max_request_rate=max_request_rate, max_runs=max_runs, max_data_volume=max_data_volume, From 8fdc571a7c500803803b1a0fad1d7d79370ddf29 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 8 Jan 2025 10:58:11 +0000 Subject: [PATCH 044/163] Changed enabled to is_enabled in init --- simvue/api/objects/administrator/tenant.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py index 85620d5a..cd5b05f3 100644 --- a/simvue/api/objects/administrator/tenant.py +++ b/simvue/api/objects/administrator/tenant.py @@ -22,7 +22,7 @@ def new( ) -> Self: _tenant = Tenant( name=name, - enabled=enabled, + is_enabled=enabled, max_request_rate=max_request_rate, max_runs=max_runs, max_data_volume=max_data_volume, From b83a4a6a35db04b27edea047f05a8d64728fa314 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 8 Jan 2025 11:06:02 +0000 Subject: [PATCH 045/163] Fix S3 storage test --- simvue/api/objects/storage/base.py | 15 ++++++++------- simvue/api/objects/storage/fetch.py | 4 ++-- tests/unit/test_s3_storage.py | 5 +---- 3 files changed, 11 insertions(+), 13 deletions(-) diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py index 26939619..452d9edf 100644 --- a/simvue/api/objects/storage/base.py +++ b/simvue/api/objects/storage/base.py @@ -1,8 +1,10 @@ import typing import pydantic +import datetime + from simvue.api.objects.base import SimvueObject, staging_check, write_only -from simvue.models import NAME_REGEX +from simvue.models import NAME_REGEX, DATETIME_FORMAT class StorageBase(SimvueObject): @@ -81,9 +83,8 @@ def enabled(self, is_enabled: bool) -> None: self._staging["is_enabled"] = is_enabled @property - def usage(self) -> int | None: - return None if self._offline else self._get_attribute("usage") - - @property - def user(self) -> str | None: - return None if self._offline else self._get_attribute("user") + def created(self) -> datetime.datetime | None: + """Retrieve created datetime for the artifact""" + _created: str | None = self._get_attribute("created") + _format = DATETIME_FORMAT.replace(" ", "T") + return datetime.datetime.strptime(_created, _format) if _created else None diff --git a/simvue/api/objects/storage/fetch.py b/simvue/api/objects/storage/fetch.py index 1fd6d8e0..4bc6cfd5 100644 --- a/simvue/api/objects/storage/fetch.py +++ b/simvue/api/objects/storage/fetch.py @@ -58,10 +58,10 @@ def get( for _entry in _json_response: _id = _entry.pop("id") if _entry["type"] == "S3": - yield _entry["id"], S3Storage(read_only=True, identifier=_id, **_entry) + yield _id, S3Storage(read_only=True, identifier=_id, **_entry) elif _entry["type"] == "File": yield ( - _entry["id"], + _id, FileStorage(read_only=True, identifier=_id, **_entry), ) else: diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index 47aad7ef..bbab22e7 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -25,10 +25,7 @@ def test_create_s3_online() -> None: assert _storage.config.endpoint_url == "https://not_a_real_url.io/" assert _storage.config.region_name == "fictionsville" assert _storage.config.bucket == "dummy_bucket" - assert not _storage.usage - assert _storage.status.status - assert _storage.status.timestamp - assert _storage.user + assert _storage.created assert dict(Storage.get()) _storage.delete() From a46ed6ae530601257a4c3d1cc43e53aaa85c7451 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 8 Jan 2025 11:36:20 +0000 Subject: [PATCH 046/163] Fix Stats --- simvue/api/objects/stats.py | 2 +- tests/unit/test_stats.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/simvue/api/objects/stats.py b/simvue/api/objects/stats.py index 3664be79..dcb10f7d 100644 --- a/simvue/api/objects/stats.py +++ b/simvue/api/objects/stats.py @@ -8,8 +8,8 @@ class Stats(SimvueObject): def __init__(self) -> None: self.runs = RunStatistics(self) - super().__init__() self._label = "stat" + super().__init__() # Stats is a singular object (i.e. identifier is not applicable) # set it to empty string so not None diff --git a/tests/unit/test_stats.py b/tests/unit/test_stats.py index 2e21871e..fb766e8b 100644 --- a/tests/unit/test_stats.py +++ b/tests/unit/test_stats.py @@ -6,6 +6,7 @@ @pytest.mark.local def test_stats() -> None: _statistics = Stats() + assert f"{_statistics.url}" == f"{_statistics._base_url}/" assert isinstance(_statistics.runs.created, int) assert isinstance(_statistics.runs.running, int) assert isinstance(_statistics.runs.completed, int) From aac65f6e02f6b6526f263b9a65ab18795ea9603e Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 8 Jan 2025 11:51:27 +0000 Subject: [PATCH 047/163] Set read only to false in alert modification test --- tests/unit/test_metric_threshold_alert.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/unit/test_metric_threshold_alert.py b/tests/unit/test_metric_threshold_alert.py index 0ed4f61a..d59d5a11 100644 --- a/tests/unit/test_metric_threshold_alert.py +++ b/tests/unit/test_metric_threshold_alert.py @@ -75,6 +75,7 @@ def test_metric_threshold_alert_modification_online() -> None: time.sleep(1) _new_alert = Alert(_alert.id) assert isinstance(_new_alert, MetricsThresholdAlert) + _new_alert.read_only(False) _new_alert.description = "updated!" assert _new_alert.description != "updated!" _new_alert.commit() From e52f96a1aca985f56e80eefee16a6012bca7fd31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 8 Jan 2025 12:50:39 +0000 Subject: [PATCH 048/163] Updated user object --- simvue/api/objects/administrator/user.py | 38 ++++++++++++++---------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/simvue/api/objects/administrator/user.py b/simvue/api/objects/administrator/user.py index fc053d8c..3702ba56 100644 --- a/simvue/api/objects/administrator/user.py +++ b/simvue/api/objects/administrator/user.py @@ -28,11 +28,11 @@ def new( "username": username, "fullname": fullname, "email": email, - "manager": manager, - "readonly": readonly, + "is_manager": manager, + "is_readonly": readonly, "welcome": welcome, - "admin": admin, - "enabled": enabled, + "is_admin": admin, + "is_enabled": enabled, } _user = User(user=_user_info, tenant=tenant, offline=offline, _read_only=False) _user.offline_mode(offline) @@ -76,37 +76,43 @@ def fullname(self, fullname: str) -> None: @staging_check def manager(self) -> bool: if self.id and self.id.startswith("offline_"): - return self._get_attribute("user")["manager"] - return self._get_attribute("manager") + return self._get_attribute("user")["is_manager"] + return self._get_attribute("is_manager") @manager.setter @write_only @pydantic.validate_call - def manager(self, manager: bool) -> None: - self._staging["manager"] = manager + def manager(self, is_manager: bool) -> None: + self._staging["is_manager"] = is_manager @property @staging_check def admin(self) -> bool: if self.id and self.id.startswith("offline_"): - return self._get_attribute("user")["admin"] - return self._get_attribute("admin") + return self._get_attribute("user")["is_admin"] + return self._get_attribute("is_admin") @admin.setter @write_only @pydantic.validate_call - def admin(self, admin: bool) -> None: - self._staging["admin"] = admin + def admin(self, is_admin: bool) -> None: + self._staging["is_admin"] = is_admin + + @property + def deleted(self) -> bool: + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["is_deleted"] + return self._get_attribute("is_deleted") @property @staging_check def readonly(self) -> bool: if self.id and self.id.startswith("offline_"): - return self._get_attribute("user")["readonly"] - return self._get_attribute("readonly") + return self._get_attribute("user")["is_readonly"] + return self._get_attribute("is_readonly") @readonly.setter @write_only @pydantic.validate_call - def readonly(self, readonly: bool) -> None: - self._staging["readonly"] = readonly + def readonly(self, is_readonly: bool) -> None: + self._staging["is_readonly"] = is_readonly From 4e08934f6c9be055e2f1b2ea500db4dad2701107 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 8 Jan 2025 13:25:17 +0000 Subject: [PATCH 049/163] Set read-only to false in tag modification test --- tests/unit/test_tag.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/unit/test_tag.py b/tests/unit/test_tag.py index 1927c87f..06a3c9aa 100644 --- a/tests/unit/test_tag.py +++ b/tests/unit/test_tag.py @@ -43,6 +43,7 @@ def test_tag_modification_online() -> None: _tag.commit() time.sleep(1) _new_tag = Tag(_tag.id) + _new_tag.read_only(False) _new_tag.name = _tag.name.replace("test", "test_modified") _new_tag.color = "rgb({r}, {g}, {b})".format(r=250, g=0, b=0) _new_tag.description = "modified test tag" From 0ee6075c303a17d076bceac26a96dad3ebe8f447 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 8 Jan 2025 16:24:43 +0000 Subject: [PATCH 050/163] Made created immutable --- simvue/api/objects/run.py | 6 ------ tests/unit/test_run.py | 4 ---- 2 files changed, 10 deletions(-) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 2dc66f70..21dc2eba 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -208,12 +208,6 @@ def created(self) -> datetime.datetime: self._get_attribute("created"), DATETIME_FORMAT ) - @created.setter - @write_only - @pydantic.validate_call - def created(self, created: datetime.datetime) -> None: - self._staging["created"] = created.strftime(DATETIME_FORMAT) - @property @staging_check def started(self) -> datetime.datetime: diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index 2ad7d1c3..ef9f3109 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -57,7 +57,6 @@ def test_run_modification_online() -> None: _new_run.read_only(False) _new_run.name = "simvue_test_run" _new_run.description = "Simvue test run" - _new_run.created = _now _new_run.tags = ["simvue", "test", "tag"] _new_run.ttl = 120 assert _new_run.ttl != 120 @@ -66,7 +65,6 @@ def test_run_modification_online() -> None: time.sleep(1) assert _new_run.ttl == 120 assert _new_run.description == "Simvue test run" - assert _new_run.created == _now assert sorted(_new_run.tags) == sorted(["simvue", "test", "tag"]) assert _new_run.name == "simvue_test_run" _run.delete() @@ -88,7 +86,6 @@ def test_run_modification_offline() -> None: _new_run = Run(identifier=_run.id) _new_run.name = "simvue_test_run" _new_run.description = "Simvue test run" - _new_run.created = _now _new_run.tags = ["simvue", "test", "tag"] _new_run.ttl = 120 @@ -101,7 +98,6 @@ def test_run_modification_offline() -> None: assert _new_run.ttl == 120 assert _new_run.description == "Simvue test run" - assert _new_run.created == _now assert sorted(_new_run.tags) == sorted(["simvue", "test", "tag"]) assert _new_run.name == "simvue_test_run" _run.delete() From 4c6238c4a32ed5e51cbded003e85497dcfdeed17 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 8 Jan 2025 16:37:41 +0000 Subject: [PATCH 051/163] Fix datetime format when retrieving times for run --- simvue/api/objects/run.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 2dc66f70..86f7f6c7 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -203,10 +203,11 @@ def alerts(self, alerts: list[str]) -> None: @property @staging_check - def created(self) -> datetime.datetime: - return datetime.datetime.strptime( - self._get_attribute("created"), DATETIME_FORMAT - ) + def created(self) -> datetime.datetime | None: + """Retrieve created datetime for the run""" + _created: str | None = self._get_attribute("created") + _format = DATETIME_FORMAT.replace(" ", "T") + return datetime.datetime.strptime(_created, _format) if _created else None @created.setter @write_only @@ -216,10 +217,11 @@ def created(self, created: datetime.datetime) -> None: @property @staging_check - def started(self) -> datetime.datetime: - return datetime.datetime.strptime( - self._get_attribute("started"), DATETIME_FORMAT - ) + def started(self) -> datetime.datetime | None: + """Retrieve started datetime for the run""" + _started: str | None = self._get_attribute("started") + _format = DATETIME_FORMAT.replace(" ", "T") + return datetime.datetime.strptime(_started, _format) if _started else None @started.setter @write_only @@ -230,10 +232,10 @@ def started(self, started: datetime.datetime) -> None: @property @staging_check def endtime(self) -> datetime.datetime | None: - _endtime: str | None = self._get_attribute("endtime", None) - return ( - datetime.datetime.strptime(_endtime, DATETIME_FORMAT) if _endtime else None - ) + """Retrieve endtime datetime for the run""" + _endtime: str | None = self._get_attribute("endtime") + _format = DATETIME_FORMAT.replace(" ", "T") + return datetime.datetime.strptime(_endtime, _format) if _endtime else None @endtime.setter @write_only From c644b23b9935eea203a9e32c6778ebed8af772d6 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 8 Jan 2025 16:55:35 +0000 Subject: [PATCH 052/163] Changed datetime format of S3 storage and updated test --- simvue/api/objects/storage/base.py | 5 +++-- tests/unit/test_s3_storage.py | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py index 452d9edf..71bd566c 100644 --- a/simvue/api/objects/storage/base.py +++ b/simvue/api/objects/storage/base.py @@ -86,5 +86,6 @@ def enabled(self, is_enabled: bool) -> None: def created(self) -> datetime.datetime | None: """Retrieve created datetime for the artifact""" _created: str | None = self._get_attribute("created") - _format = DATETIME_FORMAT.replace(" ", "T") - return datetime.datetime.strptime(_created, _format) if _created else None + return ( + datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None + ) diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index bc4f5fb5..d6f937d8 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -12,7 +12,7 @@ def test_create_s3_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _storage = S3Storage.new( name=_uuid, - endpoint_url="https://not_a_real_url.io", + endpoint_url="https://not-a-real-url.io", disable_check=True, tenant_usable=False, default=False, @@ -23,7 +23,7 @@ def test_create_s3_online() -> None: ) _storage.commit() assert _storage.name == _uuid - assert _storage.config.endpoint_url == "https://not_a_real_url.io/" + assert _storage.config.endpoint_url == "https://not-a-real-url.io/" assert _storage.config.region_name == "fictionsville" assert _storage.config.bucket == "dummy_bucket" assert _storage.created From bfa85ea9cfe3c2c7dbf08c35ec4be3e1891bb48f Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 9 Jan 2025 09:17:03 +0000 Subject: [PATCH 053/163] Remove Ts from date formats --- simvue/api/objects/artifact.py | 5 +++-- simvue/api/objects/run.py | 15 +++++++++------ 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 0846e5c8..d2566207 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -337,8 +337,9 @@ def name(self) -> str | None: def created(self) -> datetime.datetime | None: """Retrieve created datetime for the artifact""" _created: str | None = self._get_attribute("created") - _format = DATETIME_FORMAT.replace(" ", "T") - return datetime.datetime.strptime(_created, _format) if _created else None + return ( + datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None + ) @classmethod def from_name( diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 290f10c7..c59779e9 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -206,16 +206,18 @@ def alerts(self, alerts: list[str]) -> None: def created(self) -> datetime.datetime | None: """Retrieve created datetime for the run""" _created: str | None = self._get_attribute("created") - _format = DATETIME_FORMAT.replace(" ", "T") - return datetime.datetime.strptime(_created, _format) if _created else None + return ( + datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None + ) @property @staging_check def started(self) -> datetime.datetime | None: """Retrieve started datetime for the run""" _started: str | None = self._get_attribute("started") - _format = DATETIME_FORMAT.replace(" ", "T") - return datetime.datetime.strptime(_started, _format) if _started else None + return ( + datetime.datetime.strptime(_started, DATETIME_FORMAT) if _started else None + ) @started.setter @write_only @@ -228,8 +230,9 @@ def started(self, started: datetime.datetime) -> None: def endtime(self) -> datetime.datetime | None: """Retrieve endtime datetime for the run""" _endtime: str | None = self._get_attribute("endtime") - _format = DATETIME_FORMAT.replace(" ", "T") - return datetime.datetime.strptime(_endtime, _format) if _endtime else None + return ( + datetime.datetime.strptime(_endtime, DATETIME_FORMAT) if _endtime else None + ) @endtime.setter @write_only From 09b7492fefcfb0c86afe086e1eed6da0e0f24956 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 9 Jan 2025 15:53:02 +0000 Subject: [PATCH 054/163] Remove url slash at end if identifier is blank --- simvue/api/url.py | 2 +- tests/unit/test_stats.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/simvue/api/url.py b/simvue/api/url.py index a5f0a7de..43a83ef6 100644 --- a/simvue/api/url.py +++ b/simvue/api/url.py @@ -30,7 +30,7 @@ def __itruediv__(self, other: str) -> Self: other = other[1:] if other.startswith("/") else other other = other[:-1] if other.endswith("/") else other - self._path = f"{self._path}/{other}" + self._path = f"{self._path}/{other}" if other else self._path return self @property diff --git a/tests/unit/test_stats.py b/tests/unit/test_stats.py index fb766e8b..2139cf3d 100644 --- a/tests/unit/test_stats.py +++ b/tests/unit/test_stats.py @@ -3,10 +3,10 @@ from simvue.api.objects import Stats @pytest.mark.api -@pytest.mark.local +@pytest.mark.online def test_stats() -> None: _statistics = Stats() - assert f"{_statistics.url}" == f"{_statistics._base_url}/" + assert f"{_statistics.url}" == f"{_statistics._base_url}" assert isinstance(_statistics.runs.created, int) assert isinstance(_statistics.runs.running, int) assert isinstance(_statistics.runs.completed, int) From 97ced62dbfab8b3d9ddd5ebe5081c94fc8402cb4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 10 Jan 2025 08:12:09 +0000 Subject: [PATCH 055/163] Started update of artifact class --- simvue/api/objects/artifact.py | 48 ++++++++++++++++++---------------- tests/unit/test_artifact.py | 3 ++- 2 files changed, 27 insertions(+), 24 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 0846e5c8..bcae4efa 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -12,7 +12,6 @@ import typing import pydantic import os.path -import io import sys import requests @@ -44,34 +43,30 @@ class Artifact(SimvueObject): """Connect to/create an artifact locally or on the server""" - def __init__( - self, - identifier: str | None = None, - **kwargs, - ) -> None: - super().__init__(identifier, **kwargs) - self._staging = {"server": kwargs, "storage": {}} - self._run_id = kwargs.get("run") - self._label = "artifact" - @classmethod def new( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - run_id: str, + run_id: str | None, storage_id: str | None, + checksum: str, + size: int, + file_type: str, category: Category, + original_path: pathlib.Path | None, + metadata: dict[str, typing.Any] | None, offline: bool = False, **kwargs, ) -> Self: _artifact = Artifact( - run=run_id, name=name, - storage=storage_id, - category=category, + checksum=checksum, + size=size, + type=file_type, + originalPath=f"{original_path or ''}", + metadata=metadata, _read_only=False, - **kwargs, ) _artifact.offline_mode(offline) @@ -79,7 +74,7 @@ def new( return _artifact # Firstly submit a request for a new artifact - _response = _artifact._post(**_artifact._staging["server"]) + _response = _artifact._post(**_artifact._staging) # If this artifact does not exist a URL will be returned _artifact._staging["server"]["url"] = _response.get("url") @@ -104,6 +99,7 @@ def new_file( category: Category, file_path: pydantic.FilePath, file_type: str | None, + metadata: dict[str, typing.Any] | None, offline: bool = False, ) -> Self: """Create a new artifact either locally or on the server @@ -124,6 +120,8 @@ def new_file( path to the file this artifact represents file_type : str | None the mime type for this file, else this is determined + metadata : dict[str, Any] | None + supply metadata information for this artifact offline : bool, optional whether to define this artifact locally, default is False @@ -142,11 +140,12 @@ def new_file( run_id=run_id, storage_id=storage_id, category=category, - originalPath=os.path.expandvars(_file_orig_path), + original_path=os.path.expandvars(_file_orig_path), size=_file_size, - type=_file_type, + file_type=_file_type, checksum=_file_checksum, offline=offline, + metadata=metadata, ) _artifact.offline_mode(offline) @@ -167,6 +166,7 @@ def new_object( storage: str | None, category: Category, obj: typing.Any, + metadata: dict[str, typing.Any] | None, allow_pickling: bool = True, offline: bool = False, ) -> Self: @@ -186,6 +186,8 @@ def new_object( the category of this artifact obj : Any object to serialize and upload + metadata : dict[str, Any] | None + supply metadata information for this artifact allow_pickling : bool, optional whether to allow the object to be pickled if no other serialization found. Default is True @@ -210,15 +212,15 @@ def new_object( name=name, storage=storage, category=category, - originalPath="", size=sys.getsizeof(obj), - type=_data_type, + file_type=_data_type, checksum=_checksum, + metadata=metadata, ) _artifact.offline_mode(offline) - _artifact._staging["storage"]["files"] = {"file": io.BytesIO(_serialized)} - _artifact._upload() + # _artifact._staging["storage"]["files"] = {"file": io.BytesIO(_serialized)} + # _artifact._upload() return _artifact def commit(self) -> None: diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index 83b55c22..592c0a7a 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -30,7 +30,8 @@ def test_artifact_creation_online() -> None: file_path=_path, category="input", storage_id=None, - file_type=None + file_type=None, + metadata=None ) time.sleep(1) for member in _artifact._properties: From 00afcc0cc696feee6c34ce1fd4cc3996d582610b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 10 Jan 2025 08:12:27 +0000 Subject: [PATCH 056/163] Fixed wrong fake URL for storage test --- tests/unit/test_s3_storage.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index bc4f5fb5..4181d630 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -12,7 +12,7 @@ def test_create_s3_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _storage = S3Storage.new( name=_uuid, - endpoint_url="https://not_a_real_url.io", + endpoint_url="https://not-a-real-url.io", disable_check=True, tenant_usable=False, default=False, @@ -23,7 +23,7 @@ def test_create_s3_online() -> None: ) _storage.commit() assert _storage.name == _uuid - assert _storage.config.endpoint_url == "https://not_a_real_url.io/" + assert _storage.config.endpoint_url == "https://not-a-real-url.io/" assert _storage.config.region_name == "fictionsville" assert _storage.config.bucket == "dummy_bucket" assert _storage.created @@ -37,7 +37,7 @@ def test_create_s3_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _storage = S3Storage.new( name=_uuid, - endpoint_url="https://not_a_real_url.io", + endpoint_url="https://not-a-real-url.io", disable_check=False, region_name="fictionsville", access_key_id="dummy_key", @@ -49,7 +49,7 @@ def test_create_s3_offline() -> None: ) _storage.commit() assert _storage.name == _uuid - assert _storage.config.endpoint_url == "https://not_a_real_url.io" + assert _storage.config.endpoint_url == "https://not-a-real-url.io" assert _storage.config.region_name == "fictionsville" assert _storage.config.bucket == "dummy_bucket" assert not _storage.status From d7762683780b39e44b7137a3195458ce1e4c5501 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 10 Jan 2025 11:33:15 +0000 Subject: [PATCH 057/163] Modify Artifact to new specification --- simvue/api/objects/artifact.py | 160 +++++++++++++++------------------ simvue/api/request.py | 23 +++-- tests/unit/test_artifact.py | 7 +- 3 files changed, 90 insertions(+), 100 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 7d39f224..57967ff8 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -8,6 +8,7 @@ import datetime import http +import io import pathlib import typing import pydantic @@ -43,17 +44,25 @@ class Artifact(SimvueObject): """Connect to/create an artifact locally or on the server""" + def __init__( + self, identifier: str | None = None, _read_only: bool = True, **kwargs + ) -> None: + super().__init__(identifier=identifier, _read_only=_read_only, **kwargs) + + # If the artifact is an online instance, need a place to store the response + # from the initial creation + self._init_data: dict[str, dict] | None = None + self._staging |= {"runs": []} + @classmethod def new( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - run_id: str | None, storage_id: str | None, checksum: str, size: int, file_type: str, - category: Category, original_path: pathlib.Path | None, metadata: dict[str, typing.Any] | None, offline: bool = False, @@ -63,8 +72,9 @@ def new( name=name, checksum=checksum, size=size, - type=file_type, originalPath=f"{original_path or ''}", + storage=storage_id, + type=file_type, metadata=metadata, _read_only=False, ) @@ -73,18 +83,11 @@ def new( if offline: return _artifact - # Firstly submit a request for a new artifact - _response = _artifact._post(**_artifact._staging) - - # If this artifact does not exist a URL will be returned - _artifact._staging["server"]["url"] = _response.get("url") - - # If a storage ID has been provided store that else retrieve it - _artifact._staging["server"]["storage"] = storage_id or _response.get( - "storage_id" - ) - _artifact._staging["storage"]["data"] = _response.get("fields") - _artifact._staging["storage"]["files"] = None + # Firstly submit a request for a new artifact, remove the run IDs + # as these are not an argument for artifact creation + _post_args = _artifact._staging.copy() + _post_args.pop("runs", None) + _artifact._init_data = _artifact._post(**_post_args) return _artifact @@ -94,9 +97,7 @@ def new_file( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - run_id: str, storage_id: str | None, - category: Category, file_path: pydantic.FilePath, file_type: str | None, metadata: dict[str, typing.Any] | None, @@ -110,8 +111,6 @@ def new_file( ---------- name : str the name for this artifact - run_id : str - the identifier with which this artifact is associated storage_id : str | None the identifier for the storage location for this object category : "code" | "input" | "output" @@ -137,9 +136,7 @@ def new_file( _artifact = Artifact.new( name=name, - run_id=run_id, storage_id=storage_id, - category=category, original_path=os.path.expandvars(_file_orig_path), size=_file_size, file_type=_file_type, @@ -148,11 +145,8 @@ def new_file( metadata=metadata, ) - _artifact.offline_mode(offline) - with open(file_path, "rb") as out_f: - _artifact._staging["storage"]["files"] = {"file": out_f} - _artifact._upload() + _artifact._upload(file=out_f) return _artifact @@ -162,7 +156,6 @@ def new_object( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - run_id: str, storage: str | None, category: Category, obj: typing.Any, @@ -178,8 +171,6 @@ def new_object( ---------- name : str the name for this artifact - run_id : str - the identifier with which this artifact is associated storage : str | None the identifier for the storage location for this object category : "code" | "input" | "output" @@ -208,42 +199,60 @@ def new_object( _checksum = calculate_sha256(_serialized, is_file=False) _artifact = Artifact.new( - run_id=run_id, name=name, storage=storage, - category=category, size=sys.getsizeof(obj), file_type=_data_type, checksum=_checksum, metadata=metadata, ) - _artifact.offline_mode(offline) - # _artifact._staging["storage"]["files"] = {"file": io.BytesIO(_serialized)} - # _artifact._upload() + _artifact._upload(file=io.BytesIO(_serialized)) return _artifact def commit(self) -> None: raise TypeError("Cannot call method 'commit' on write-once type 'Artifact'") - def _upload(self) -> None: + def attach_to_run(self, run_id: str, category: Category) -> None: + """Attach this artifact to a given run""" + self._staging["runs"].append({"id": run_id, "category": category}) + if self._offline: super().commit() return - _run_id = self._staging["server"]["run"] - _files = self._staging["storage"]["files"] - _name = self._staging["server"]["name"] - _data = self._staging["storage"].get("data") - - _run_artifacts_url: URL = ( + _name = self._staging["name"] + _run_artifacts_url = ( URL(self._user_config.server.url) - / f"runs/{_run_id}/artifacts/{self._identifier}" + / f"runs/{run_id}/artifacts/{self._init_data['id']}" + ) + + _response = sv_put( + url=f"{_run_artifacts_url}", + headers=self._headers, + json={"category": category}, ) - if _url := self._staging["server"]["url"]: + get_json_from_response( + expected_status=[http.HTTPStatus.OK], + scenario=f"adding artifact '{_name}' to run '{run_id}'", + response=_response, + ) + + def _upload(self, file: io.BytesIO) -> None: + if self._offline: + super().commit() + return + + if _url := self._init_data.get("url"): + _name = self._staging["name"] + _response = sv_post( - url=_url, headers={}, is_json=False, files=_files, data=_data + url=_url, + headers={}, + is_json=False, + files={"file": file}, + data=self._init_data.get("fields"), ) self._logger.debug( @@ -258,21 +267,6 @@ def _upload(self) -> None: response=_response, ) - if not self._staging["server"].get("storage"): - return - - _response = sv_put( - url=f"{_run_artifacts_url}", - headers=self._headers, - data=self._staging["server"], - ) - - get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario=f"adding artifact '{_name}' to run '{_run_id}'", - response=_response, - ) - def _get( self, storage: str | None = None, url: str | None = None, **kwargs ) -> dict[str, typing.Any]: @@ -282,33 +276,20 @@ def _get( **kwargs, ) - def _get_from_run(self, attribute: str, *default) -> typing.Any: - return self._get_attribute(attribute, default, url=self.run_url) - @property def checksum(self) -> str: """Retrieve the checksum for this artifact""" return self._get_attribute("checksum") - @property - def uploaded(self) -> bool: - """Retrieve if the artifact has an upload""" - return self._get_attribute("uploaded") - @property def storage_url(self) -> URL | None: """Retrieve upload URL for artifact""" - return URL(_url) if (_url := self._get_attribute("url")) else None - - @property - def category(self) -> Category | None: - """Retrieve the category for this artifact if applicable""" - return self._get_from_run("category") + return URL(_url) if (_url := self._init_data.get("url")) else None @property def original_path(self) -> str: """Retrieve the original path of the file associated with this artifact""" - return self._get_attribute("original_path") + return self._get_attribute("originalPath") @property def storage(self) -> str | None: @@ -325,11 +306,6 @@ def size(self) -> int: """Retrieve the size for this artifact in bytes""" return self._get_attribute("size") - @property - def run_id(self) -> str | None: - """Retrieve ID for run relating to this artifact""" - return self._run_id - @property def name(self) -> str | None: """Retrieve name for the artifact""" @@ -361,20 +337,30 @@ def from_name( return Artifact(run=run_id, **_json_response) - @property - def run_url(self) -> URL | None: - """If artifact is connected to a run return the run artifact endpoint""" - if not self.run_id: - return None - _url = URL(self._user_config.server.url) - _url /= f"runs/{self.run_id}/artifacts/{self._identifier}" - return _url - @property def download_url(self) -> URL | None: """Retrieve the URL for downloading this artifact""" return self.url / "download" if self._identifier else None + def get_category(self, run_id: str) -> Category: + """Retrieve the category of this artifact with respect to a given run""" + _run_url = ( + URL(self._user_config.server.url) + / f"runs/{run_id}/artifacts/{self._identifier}" + ) + _response = sv_get(url=_run_url, header=self._headers) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], + scenario=f"Retrieval of category for artifact '{self._identifier}' with respect to run '{run_id}'", + ) + if _response.status_code == http.HTTPStatus.NOT_FOUND: + raise ObjectNotFoundError( + self._label, self._identifier, extra=f"for run '{run_id}'" + ) + + return _json_response["category"] + @pydantic.validate_call def download(self, output_file: pathlib.Path) -> pathlib.Path | None: if not self.download_url: diff --git a/simvue/api/request.py b/simvue/api/request.py index 41317595..1c63a149 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -8,7 +8,7 @@ """ import copy -import json +import json as json_module import typing import http @@ -88,7 +88,7 @@ def post( """ if is_json: - data_sent: typing.Union[str, dict[str, typing.Any]] = json.dumps(data) + data_sent: typing.Union[str, dict[str, typing.Any]] = json_module.dumps(data) headers = set_json_header(headers) else: data_sent = data @@ -116,7 +116,8 @@ def post( def put( url: str, headers: dict[str, str], - data: dict[str, typing.Any], + data: dict[str, typing.Any] | None = None, + json: dict[str, typing.Any] | None = None, is_json: bool = True, timeout: int = DEFAULT_API_TIMEOUT, ) -> requests.Response: @@ -130,6 +131,8 @@ def put( headers for the post request data : dict[str, typing.Any] data to put + json : dict | None + json data to send is_json : bool, optional send as JSON string, by default True timeout : int, optional @@ -140,15 +143,17 @@ def put( requests.Response response from executing PUT """ - if is_json: - data_sent: typing.Union[str, dict[str, typing.Any]] = json.dumps(data) + if is_json and data: + data_sent: typing.Union[str, dict[str, typing.Any]] = json_module.dumps(data) headers = set_json_header(headers) else: data_sent = data - logging.debug(f"PUT: {url}\n\tdata={data_sent}") + logging.debug(f"PUT: {url}\n\tdata={data_sent}\n\tjson={json}") - return requests.put(url, headers=headers, data=data_sent, timeout=timeout) + return requests.put( + url, headers=headers, data=data_sent, timeout=timeout, json=json + ) @retry( @@ -222,13 +227,13 @@ def get_json_from_response( scenario: str, response: requests.Response, allow_parse_failure: bool = False, - expected_type: typing.Literal[list, dict] = dict, + expected_type: list | dict = dict, ) -> typing.Union[dict, list]: try: json_response = response.json() json_response = json_response or ({} if expected_type is dict else []) decode_error = "" - except json.JSONDecodeError as e: + except json_module.JSONDecodeError as e: json_response = {} if allow_parse_failure else None decode_error = f"{e}" diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index 592c0a7a..7244109c 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -26,13 +26,12 @@ def test_artifact_creation_online() -> None: out_f.write(f"Hello World! {_uuid}") _artifact = Artifact.new_file( name=f"test_artifact_{_uuid}", - run_id=_run.id, file_path=_path, - category="input", storage_id=None, file_type=None, metadata=None ) + _artifact.attach_to_run(_run.id, "input") time.sleep(1) for member in _artifact._properties: try: @@ -45,10 +44,10 @@ def test_artifact_creation_online() -> None: assert os.path.exists(temp_f.name) with open(temp_f.name) as in_f: assert in_f.readline() == f"Hello World! {_uuid}\n" - if _failed: - raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) + if _failed: + raise AssertionError("\n\t-" + "\n\t- ".join(": ".join(i) for i in _failed)) @pytest.mark.api From efce7dea97a31a81eb2eb293c9f846f7b40f21ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 10 Jan 2025 15:31:22 +0000 Subject: [PATCH 058/163] Continue test fixes --- simvue/api/objects/artifact.py | 7 ++++++ simvue/api/objects/base.py | 5 ++++- simvue/config/user.py | 2 -- simvue/run.py | 23 ++++++++++++++------ tests/functional/test_run_artifact_upload.py | 6 ++--- tests/functional/test_run_class.py | 7 ------ tests/unit/test_artifact.py | 5 ++--- tests/unit/test_run_init_metadata.py | 23 -------------------- 8 files changed, 32 insertions(+), 46 deletions(-) delete mode 100644 tests/unit/test_run_init_metadata.py diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 57967ff8..1fcd6cd2 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -26,6 +26,7 @@ from simvue.models import NAME_REGEX, DATETIME_FORMAT from simvue.utilities import get_mimetype_for_file, get_mimetypes, calculate_sha256 from simvue.api.objects.base import SimvueObject +from simvue.api.objects.run import Run from simvue.serialization import serialize_object from simvue.api.request import ( put as sv_put, @@ -342,6 +343,12 @@ def download_url(self) -> URL | None: """Retrieve the URL for downloading this artifact""" return self.url / "download" if self._identifier else None + @property + def runs(self) -> typing.Generator[str, None, None]: + """Retrieve all runs for which this artifact is related""" + for _id, _ in Run.get(filters=[f"artifact.id == {self.id}"]): + yield _id + def get_category(self, run_id: str) -> Category: """Retrieve the category of this artifact with respect to a given run""" _run_url = ( diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 69e6d8d7..45db73c3 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -282,7 +282,10 @@ def get( ) for _entry in _data: - _id = _entry.pop("id") + if not (_id := _entry.pop("id", None)): + raise RuntimeError( + f"Expected key 'id' for {_class_instance.__class__.__name__.lower()}" + ) yield _id, cls(read_only=True, identifier=_id, **_entry) @classmethod diff --git a/simvue/config/user.py b/simvue/config/user.py index 014087a4..53ce59fa 100644 --- a/simvue/config/user.py +++ b/simvue/config/user.py @@ -173,8 +173,6 @@ def fetch( _config_dict: dict[str, dict[str, str]] = cls._load_pyproject_configs() or {} try: - logger.info(f"Using config file '{cls.config_file()}'") - # NOTE: Legacy INI support has been removed _config_dict |= toml.load(cls.config_file()) diff --git a/simvue/run.py b/simvue/run.py index 8ab1b1ec..8b1fa7e4 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -682,7 +682,10 @@ def init( self._data = self._sv_obj._staging self._sv_obj.commit() - name, self._id = self._sv_obj.name, self._sv_obj.id + if self._user_config.run.mode == "online": + name = self._sv_obj.name + + self._id = self._sv_obj.id if not name: return False @@ -1239,6 +1242,7 @@ def save_object( typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] ] = None, allow_pickle: bool = False, + metadata: dict[str, typing.Any] = None, ) -> bool: """Save an object to the Simvue server @@ -1252,6 +1256,8 @@ def save_object( name to associate with this object, by default None allow_pickle : bool, optional whether to allow pickling if all other serialization types fail, by default False + metadata : str | None, optional + any metadata to attach to the artifact Returns ------- @@ -1265,14 +1271,14 @@ def save_object( _name: str = name or f"{obj.__class__.__name__.lower()}_{id(obj)}" try: - Artifact.new_object( - run_id=self.id, + _artifact = Artifact.new_object( name=_name, - category=category, obj=obj, allow_pickling=allow_pickle, storage_id=self._storage_id, + metadata=metadata, ) + _artifact.attach_to_run(self.id, category) except (ValueError, RuntimeError) as e: self._error(f"Failed to save object '{_name}' to run '{self.id}': {e}") return False @@ -1291,6 +1297,7 @@ def save_file( name: typing.Optional[ typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] ] = None, + metadata: dict[str, typing.Any] = None, ) -> bool: """Upload file to the server @@ -1306,6 +1313,8 @@ def save_file( whether to preserve the path during storage, by default False name : str, optional name to associate with this file, by default None + metadata : str | None, optional + any metadata to attach to the artifact Returns ------- @@ -1329,15 +1338,15 @@ def save_file( try: # Register file - Artifact.new_file( + _artifact = Artifact.new_file( name=name or stored_file_name, - run_id=self.id, storage_id=self._storage_id, file_path=file_path, offline=self._user_config.run.mode == "offline", file_type=filetype, - category=category, + metadata=metadata, ) + _artifact.attach_to_run(self.id, category) except (ValueError, RuntimeError) as e: self._error(f"Failed to save file: {e}") return False diff --git a/tests/functional/test_run_artifact_upload.py b/tests/functional/test_run_artifact_upload.py index 45f6fd07..9d571dbd 100644 --- a/tests/functional/test_run_artifact_upload.py +++ b/tests/functional/test_run_artifact_upload.py @@ -26,12 +26,12 @@ def test_add_artifact_to_run() -> None: _artifact = Artifact.new_file( name=f"test_{_uuid}", - run_id=_run.id, - category="input", storage_id=None, file_path=pathlib.Path(tempf.name), - file_type=None + file_type=None, + metadata=None ) + _artifact.attach_to_run(_run.id, "input") _run.status = "completed" _run.commit() assert _run.artifacts diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 758e711e..67aa28c8 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -515,13 +515,6 @@ def test_save_file_online( return variable = capfd.readouterr() - with capfd.disabled(): - if empty_file: - assert ( - variable.out - == "[simvue] WARNING: saving zero-sized files not currently supported\n" - ) - return simvue_run.close() time.sleep(1.0) os.remove(out_name) diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index 7244109c..d2de0b04 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -64,12 +64,11 @@ def test_artifact_creation_offline() -> None: out_f.write("Hello World!") _artifact = Artifact.new_file( name=f"test_artifact_{_uuid}", - run_id=_run.id, file_path=_path, - category="input", storage_id=None, file_type=None, - offline=True + offline=True, + metadata=None ) _folder.commit() _run.commit() diff --git a/tests/unit/test_run_init_metadata.py b/tests/unit/test_run_init_metadata.py deleted file mode 100644 index 0a5f4bda..00000000 --- a/tests/unit/test_run_init_metadata.py +++ /dev/null @@ -1,23 +0,0 @@ -from simvue import Run -import pytest - -@pytest.mark.local -def test_run_init_metadata(): - """ - Check that run.init throws an exception if tuples are passed into metadata dictionary - """ - - x1_lower = 2, - x1_upper = 6, - - run = Run(mode='offline') - - with pytest.raises(RuntimeError) as exc_info: - run.init(metadata={'dataset.x1_lower': x1_lower, 'dataset.x1_upper': x1_upper}, - description="A test to validate inputs passed into metadata dictionary", - retention_period="1 hour", - folder="/simvue_unit_testing", - tags=["simvue_client_unit_tests", "test_run_init_metadata"] - ) - - assert "Input should be a valid integer" in str(exc_info.value) From 24cbbe8961753c216eea8e3c1f9bbdda157a7112 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 10 Jan 2025 15:56:09 +0000 Subject: [PATCH 059/163] Fixed bug where GET is called even if instance constructed from arguments --- simvue/api/objects/alert/fetch.py | 23 ++++++++++++++++++----- simvue/api/objects/base.py | 25 +++++++++++++++++-------- simvue/api/objects/storage/fetch.py | 9 ++++++--- 3 files changed, 41 insertions(+), 16 deletions(-) diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index 9df929f8..f87905b1 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -48,7 +48,7 @@ def get( # Currently no alert filters kwargs.pop("filters", None) - _class_instance = AlertBase(read_only=True, **kwargs) + _class_instance = AlertBase(_local=True, _read_only=True, **kwargs) _url = f"{_class_instance._base_url}" _response = sv_get( _url, @@ -72,19 +72,32 @@ def get( for _entry in _json_response["data"]: _id = _entry.pop("id") if _entry["source"] == "events": - yield _id, EventsAlert(read_only=True, identifier=_id, **_entry) + yield ( + _id, + EventsAlert(_read_only=True, identifier=_id, _local=True, **_entry), + ) elif _entry["source"] == "user": - yield _id, UserAlert(read_only=True, identifier=_id, **_entry) + yield ( + _id, + UserAlert(_read_only=True, identifier=_id, _local=True, **_entry), + ) elif _entry["source"] == "metrics" and _entry.get("alert", {}).get( "threshold" ): yield ( _id, - MetricsThresholdAlert(read_only=True, identifier=_id, **_entry), + MetricsThresholdAlert( + _local=True, _read_only=True, identifier=_id, **_entry + ), ) elif _entry["source"] == "metrics" and _entry.get("alert", {}).get( "range_low" ): - yield _id, MetricsRangeAlert(read_only=True, identifier=_id, **_entry) + yield ( + _id, + MetricsRangeAlert( + _local=True, _read_only=True, identifier=_id, **_entry + ), + ) else: raise RuntimeError(f"Unrecognised alert source '{_entry['source']}'") diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 45db73c3..83da3df5 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -116,7 +116,11 @@ def tenant(self, tenant: bool) -> None: class SimvueObject(abc.ABC): def __init__( - self, identifier: str | None = None, _read_only: bool = True, **kwargs + self, + identifier: str | None = None, + _read_only: bool = True, + _local: bool = False, + **kwargs, ) -> None: self._logger = logging.getLogger(f"simvue.{self.__class__.__name__}") self._label: str = getattr(self, "_label", self.__class__.__name__.lower()) @@ -152,7 +156,12 @@ def __init__( self._staging: dict[str, typing.Any] = {} - if not self._identifier.startswith("offline_") and self._read_only: + # If this object is read-only, but not a local construction, make an API call + if ( + not self._identifier.startswith("offline_") + and self._read_only + and not _local + ): self._staging = self._get() # Recover any locally staged changes if not read-only @@ -264,7 +273,7 @@ def ids( cls, count: int | None = None, offset: int | None = None, **kwargs ) -> list[str]: """Retrieve a list of all object identifiers""" - _class_instance = cls(read_only=True) + _class_instance = cls(_read_only=True, _local=True) if (_data := cls._get_all_objects(count, offset, **kwargs).get("data")) is None: raise RuntimeError( f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" @@ -274,8 +283,8 @@ def ids( @classmethod def get( cls, *, count: int | None = None, offset: int | None = None, **kwargs - ) -> typing.Generator[tuple[str, "SimvueObject"], None, None]: - _class_instance = cls(read_only=True) + ) -> typing.Generator[tuple[str, typing.Optional["SimvueObject"]], None, None]: + _class_instance = cls(_read_only=True, _local=True) if (_data := cls._get_all_objects(count, offset, **kwargs).get("data")) is None: raise RuntimeError( f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" @@ -286,11 +295,11 @@ def get( raise RuntimeError( f"Expected key 'id' for {_class_instance.__class__.__name__.lower()}" ) - yield _id, cls(read_only=True, identifier=_id, **_entry) + yield _id, cls(_read_only=True, identifier=_id, _local=True, **_entry) @classmethod def count(cls, **kwargs) -> int: - _class_instance = cls(read_only=True) + _class_instance = cls(_read_only=True) if ( _count := cls._get_all_objects(count=None, offset=None, **kwargs).get( "count" @@ -305,7 +314,7 @@ def count(cls, **kwargs) -> int: def _get_all_objects( cls, count: int | None, offset: int | None, **kwargs ) -> dict[str, typing.Any]: - _class_instance = cls(read_only=True) + _class_instance = cls(_read_only=True) _url = f"{_class_instance._base_url}" _response = sv_get( _url, diff --git a/simvue/api/objects/storage/fetch.py b/simvue/api/objects/storage/fetch.py index 4bc6cfd5..180d5303 100644 --- a/simvue/api/objects/storage/fetch.py +++ b/simvue/api/objects/storage/fetch.py @@ -39,7 +39,7 @@ def get( # Currently no storage filters kwargs.pop("filters", None) - _class_instance = StorageBase(read_only=True, **kwargs) + _class_instance = StorageBase(_local=True, _read_only=True, **kwargs) _url = f"{_class_instance._base_url}" _response = sv_get( _url, @@ -58,11 +58,14 @@ def get( for _entry in _json_response: _id = _entry.pop("id") if _entry["type"] == "S3": - yield _id, S3Storage(read_only=True, identifier=_id, **_entry) + yield ( + _id, + S3Storage(_local=True, _read_only=True, identifier=_id, **_entry), + ) elif _entry["type"] == "File": yield ( _id, - FileStorage(read_only=True, identifier=_id, **_entry), + FileStorage(_local=True, _read_only=True, identifier=_id, **_entry), ) else: raise RuntimeError(f"Unrecognised storage type '{_entry['type']}'") From 50d5dc14d150c60f0811fe8994af35d2a12d30f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 13 Jan 2025 12:28:10 +0000 Subject: [PATCH 060/163] Fix data streaming to download --- simvue/api/objects/artifact.py | 56 ++++++++++++---------------------- simvue/client.py | 12 ++++++-- 2 files changed, 29 insertions(+), 39 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 1fcd6cd2..01bf27a0 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -14,7 +14,6 @@ import pydantic import os.path import sys -import requests try: from typing import Self @@ -52,7 +51,7 @@ def __init__( # If the artifact is an online instance, need a place to store the response # from the initial creation - self._init_data: dict[str, dict] | None = None + self._init_data: dict[str, dict] = {} self._staging |= {"runs": []} @classmethod @@ -325,18 +324,28 @@ def from_name( cls, run_id: str, name: str, **kwargs ) -> typing.Union["Artifact", None]: _temp = Artifact(**kwargs) - _url = _temp._base_url / f"runs/{run_id}/artifacts" + _url = URL(_temp._user_config.server.url) / f"runs/{run_id}/artifacts" _response = sv_get(url=f"{_url}", params={"name": name}, headers=_temp._headers) _json_response = get_json_from_response( + expected_type=list, response=_response, expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], scenario=f"Retrieval of artifact '{name}' for run '{run_id}'", ) - if _response.status_code == http.HTTPStatus.NOT_FOUND: + if _response.status_code == http.HTTPStatus.NOT_FOUND or not _json_response: raise ObjectNotFoundError(_temp._label, name, extra=f"for run '{run_id}'") - return Artifact(run=run_id, **_json_response) + _first_result: dict[str, typing.Any] = _json_response[0] + _artifact_id: str = _first_result.pop("id") + + return Artifact( + identifier=_artifact_id, + run=run_id, + **_first_result, + _read_only=True, + _local=True, + ) @property def download_url(self) -> URL | None: @@ -369,7 +378,8 @@ def get_category(self, run_id: str) -> Category: return _json_response["category"] @pydantic.validate_call - def download(self, output_file: pathlib.Path) -> pathlib.Path | None: + def download_content(self) -> typing.Generator[bytes, None, None]: + """Stream artifact content""" if not self.download_url: raise ValueError( f"Could not retrieve URL for artifact '{self._identifier}'" @@ -390,33 +400,7 @@ def download(self, output_file: pathlib.Path) -> pathlib.Path | None: _total_length: str | None = _response.headers.get("content-length") - if not output_file.parent.is_dir(): - raise ValueError( - f"Cannot write to '{output_file.parent}', not a directory." - ) - - with output_file.open("wb") as out_f: - if _total_length is None: - out_f.write(_response.content) - else: - for data in _response.iter_content(chunk_size=DOWNLOAD_CHUNK_SIZE): - out_f.write(data) - - return output_file if output_file.exists() else None - - def download_content(self) -> typing.Any: - """Download content of artifact from storage""" - if not self.storage_url: - raise ValueError( - f"Could not retrieve URL for artifact '{self._identifier}'" - ) - - _response = requests.get(self.storage_url, timeout=DOWNLOAD_TIMEOUT) - - get_json_from_response( - response=_response, - expected_status=[http.HTTPStatus.OK], - scenario=f"Retrieval of content for {self._label} '{self._identifier}'", - ) - - return _response.content + if _total_length is None: + yield _response.content + else: + yield from _response.iter_content(chunk_size=DOWNLOAD_CHUNK_SIZE) diff --git a/simvue/client.py b/simvue/client.py index f996af32..d012f8a2 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -45,9 +45,15 @@ def _download_artifact_to_file( artifact: Artifact, output_dir: pathlib.Path | None ) -> None: - _file_name = os.path.basename(artifact.name) + try: + _file_name = os.path.basename(artifact.name) + except AttributeError: + _file_name = os.path.basename(artifact) _output_file = (output_dir or pathlib.Path.cwd()).joinpath(_file_name) - artifact.download(_output_file) + + with _output_file.open("wb") as out_f: + for content in artifact.download_content(): + out_f.write(content) class Client: @@ -501,7 +507,7 @@ def get_artifact( server_token=self._user_config.server.token, ) - _content = _artifact.download_content() + _content = b"".join(_artifact.download_content()) _deserialized_content: typing.Optional[DeserializedContent] = deserialize_data( _content, _artifact.type, allow_pickle From c12358bc12afa80a3dc11ec71f9935e15360e613 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 13 Jan 2025 18:14:00 +0000 Subject: [PATCH 061/163] Fix artifact file test --- simvue/api/objects/artifact.py | 6 ++++++ simvue/client.py | 2 +- tests/functional/test_client.py | 8 ++++---- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 01bf27a0..fc91ec06 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -336,6 +336,12 @@ def from_name( if _response.status_code == http.HTTPStatus.NOT_FOUND or not _json_response: raise ObjectNotFoundError(_temp._label, name, extra=f"for run '{run_id}'") + if (_n_res := len(_json_response)) > 1: + raise RuntimeError( + f"Expected single result for artifact '{name}' for run '{run_id}'" + f" but got {_n_res}" + ) + _first_result: dict[str, typing.Any] = _json_response[0] _artifact_id: str = _first_result.pop("id") diff --git a/simvue/client.py b/simvue/client.py index d012f8a2..37b6510b 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -587,7 +587,7 @@ def get_artifacts_as_files( if there was a failure retrieving artifacts from the server """ _artifacts: typing.Generator[tuple[str, Artifact], None, None] = Artifact.get( - runs=[run_id], category=category + runs=json.dumps([run_id]), category=category ) # type: ignore with ThreadPoolExecutor(CONCURRENT_DOWNLOADS) as executor: diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index 5162cc3a..e09428ce 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -6,6 +6,7 @@ import os.path import typing import glob +import pathlib import time import tempfile import simvue.client as svc @@ -112,14 +113,13 @@ def test_get_artifact_as_file( ) -> None: with tempfile.TemporaryDirectory() as tempd: client = svc.Client() + _file_name = create_test_run[1][f"file_{file_id}"] client.get_artifact_as_file( create_test_run[1]["run_id"], - name=create_test_run[1][f"file_{file_id}"], + name=_file_name, output_dir=tempd, ) - assert create_test_run[1][f"file_{file_id}"] in [ - os.path.basename(i) for i in glob.glob(os.path.join(tempd, "*")) - ] + assert pathlib.Path(tempd).joinpath(_file_name).exists(), f"Failed to download '{_file_name}'" @pytest.mark.dependency From 50d118710bf57f387fa364765e8913c3550a00b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 14 Jan 2025 09:34:56 +0000 Subject: [PATCH 062/163] Try using 'url' for download --- simvue/api/objects/artifact.py | 2 +- tests/unit/test_artifact.py | 7 ++----- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index fc91ec06..f7c06f75 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -356,7 +356,7 @@ def from_name( @property def download_url(self) -> URL | None: """Retrieve the URL for downloading this artifact""" - return self.url / "download" if self._identifier else None + return self._get_attribute("url") @property def runs(self) -> typing.Generator[str, None, None]: diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index d2de0b04..98999c66 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -39,11 +39,8 @@ def test_artifact_creation_online() -> None: except Exception as e: _failed.append((member, f"{e}")) assert _artifact.name == f"test_artifact_{_uuid}" - os.remove(temp_f.name) - _artifact.download(temp_f.name) - assert os.path.exists(temp_f.name) - with open(temp_f.name) as in_f: - assert in_f.readline() == f"Hello World! {_uuid}\n" + _content = b"".join(_artifact.download_content()).decode("UTF-8") + assert _content == f"Hello World! {_uuid}\n" _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) if _failed: From 2a3a15586c6df934b3c4d928c40aa26d6a3de145 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 14 Jan 2025 16:29:31 +0000 Subject: [PATCH 063/163] Break down alert creation function --- README.md | 17 +- simvue/api/objects/alert/events.py | 4 + simvue/api/objects/alert/metrics.py | 8 + simvue/api/objects/alert/user.py | 4 + simvue/api/objects/artifact.py | 5 +- simvue/executor.py | 4 +- simvue/metadata.py | 31 ++- simvue/run.py | 356 ++++++++++++++++------------ tests/conftest.py | 21 +- tests/functional/test_run_class.py | 2 +- tests/unit/test_artifact.py | 2 +- 11 files changed, 266 insertions(+), 188 deletions(-) diff --git a/README.md b/README.md index ce8a1f59..364e85a0 100644 --- a/README.md +++ b/README.md @@ -76,14 +76,15 @@ if __name__ == "__main__": run.save_file('params.in', 'input') # Add an alert (the alert definition will be created if necessary) - run.create_alert(name='loss-too-high', # Name - source='metrics', # Source - rule='is above', # Rule - metric='loss', # Metric - frequency=1, # Frequency - window=1, # Window - threshold=10, # Threshold - notification='email') # Notification type + run.create_metric_threshold_alert( + name='loss-too-high', # Name + rule='is above', # Rule + metric='loss', # Metric + frequency=1, # Frequency + window=1, # Window + threshold=10, # Threshold + notification='email' # Notification type + ) ... diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index 4ccab804..844ab5de 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -38,6 +38,7 @@ def new( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + description: str, notification: typing.Literal["none", "email"], pattern: str, frequency: pydantic.PositiveInt, @@ -52,6 +53,8 @@ def new( ---------- name : str name of the alert + description : str + description for this alert notification : "none" | "email" configure notifications sent by this alert pattern : str @@ -68,6 +71,7 @@ def new( _alert_definition = {"pattern": pattern, "frequency": frequency} _alert = EventsAlert( name=name, + description=description, notification=notification, source="events", alert=_alert_definition, diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index e91a41ac..080900b9 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -44,6 +44,7 @@ def new( *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], metric: str, + description: str, notification: typing.Literal["none", "email"], aggregation: Aggregate, rule: typing.Literal["is above", "is below"], @@ -61,6 +62,8 @@ def new( ---------- name : str name to assign to this alert + description : str + description for this alert metric : str the metric to monitor notification : "none" | "email" @@ -91,6 +94,7 @@ def new( } _alert = MetricsThresholdAlert( name=name, + description=description, notification=notification, source="metrics", alert=_alert_definition, @@ -119,6 +123,7 @@ def new( *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], metric: str, + description: str, notification: typing.Literal["none", "email"], aggregation: Aggregate, rule: typing.Literal["is inside range", "is outside range"], @@ -139,6 +144,8 @@ def new( name to assign to this alert metric : str the metric to monitor + description : str + description for this alert notification : "none" | "email" the notification settings for this alert aggregation : "average" | "sum" | "at least one" | "all" @@ -173,6 +180,7 @@ def new( } _alert = MetricsThresholdAlert( name=name, + description=description, notification=notification, source="metrics", enabled=enabled, diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index 1381fac6..6cd0843d 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -29,6 +29,7 @@ def new( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + description: str, notification: typing.Literal["none", "email"], enabled: bool = True, offline: bool = False, @@ -41,6 +42,8 @@ def new( ---------- name : str the name to assign to this alert + description : str + description for this alert notification : "none" | "email" configure notification settings for this alert enabled : bool, optional @@ -51,6 +54,7 @@ def new( """ _alert = UserAlert( name=name, + description=description, notification=notification, source="user", enabled=enabled, diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index f7c06f75..2ff7e7c0 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -391,10 +391,7 @@ def download_content(self) -> typing.Generator[bytes, None, None]: f"Could not retrieve URL for artifact '{self._identifier}'" ) _response = sv_get( - f"{self.download_url}", - headers=self._headers, - timeout=DOWNLOAD_TIMEOUT, - params={"storage": self.storage}, + f"{self.download_url}", timeout=DOWNLOAD_TIMEOUT, headers=None ) get_json_from_response( diff --git a/simvue/executor.py b/simvue/executor.py index e73284b8..090b4d79 100644 --- a/simvue/executor.py +++ b/simvue/executor.py @@ -261,8 +261,8 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: ) ) - self._alert_ids[identifier] = self._runner.create_alert( - name=f"{identifier}_exit_status", source="user" + self._alert_ids[identifier] = self._runner.create_user_alert( + name=f"{identifier}_exit_status" ) if not self._alert_ids[identifier]: diff --git a/simvue/metadata.py b/simvue/metadata.py index 673baac5..eeae8448 100644 --- a/simvue/metadata.py +++ b/simvue/metadata.py @@ -44,14 +44,12 @@ def git_info(repository: str) -> dict[str, typing.Any]: try: git_repo = git.Repo(repository, search_parent_directories=True) current_commit: git.Commit = git_repo.head.commit - author_list: set[str] = set( + author_list: set[str] = { email for commit in git_repo.iter_commits("--all") if "noreply" not in (email := (commit.author.email or "")) and "[bot]" not in (commit.author.name or "") - ) - - ref: str = current_commit.hexsha + } # In the case where the repository is dirty blame should point to the # current developer, not the person responsible for the latest commit @@ -60,19 +58,20 @@ def git_info(repository: str) -> dict[str, typing.Any]: else: blame = current_commit.committer.email - for tag in git_repo.tags: - if tag.commit == current_commit: - ref = tag.name - break - + ref: str = next( + (tag.name for tag in git_repo.tags if tag.commit == current_commit), + current_commit.hexsha, + ) return { - "git.authors": json.dumps(list(author_list)), - "git.ref": ref, - "git.msg": current_commit.message.strip(), - "git.time_stamp": simvue_timestamp(current_commit.committed_datetime), - "git.blame": blame, - "git.url": git_repo.remote().url, - "git.dirty": dirty, + "git": { + "authors": json.dumps(list(author_list)), + "ref": ref, + "msg": current_commit.message.strip(), + "time_stamp": simvue_timestamp(current_commit.committed_datetime), + "blame": blame, + "url": git_repo.remote().url, + "dirty": dirty, + } } except (git.InvalidGitRepositoryError, ValueError): return {} diff --git a/simvue/run.py b/simvue/run.py index 8b1fa7e4..4eaf651d 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -29,9 +29,11 @@ import click import psutil +from simvue.api.objects.alert.base import AlertBase from simvue.api.objects.alert.fetch import Alert from simvue.api.objects.folder import Folder, get_folder_from_path from simvue.exception import ObjectNotFoundError, SimvueRunError +from simvue.utilities import prettify_pydantic from .config.user import SimvueConfiguration @@ -78,6 +80,9 @@ def check_run_initialised( ) -> typing.Callable[..., typing.Any]: @functools.wraps(function) def _wrapper(self: Self, *args: typing.Any, **kwargs: typing.Any) -> typing.Any: + # Tidy pydantic errors + _function = prettify_pydantic(function) + if self._user_config.run.mode == "disabled": return True @@ -90,7 +95,7 @@ def _wrapper(self: Self, *args: typing.Any, **kwargs: typing.Any) -> typing.Any: "Simvue Run must be initialised before calling " f"'{function.__name__}'" ) - return function(self, *args, **kwargs) + return _function(self, *args, **kwargs) return _wrapper @@ -1636,194 +1641,251 @@ def add_alerts( return False + def _attach_alert_to_run(self, alert: AlertBase) -> str | None: + # Check if the alert already exists + _alert_id: typing.Optional[str] = None + + for _, _existing_alert in Alert.get(): + if _existing_alert.compare(alert): + _alert_id = _existing_alert.id + logger.info("Existing alert found with id: %s", _existing_alert.id) + break + + if not _alert_id: + alert.commit() + _alert_id = alert.id + + self._sv_obj.alerts = [_alert_id] + + self._sv_obj.commit() + + return _alert_id + @skip_if_failed("_aborted", "_suppress_errors", None) @check_run_initialised @pydantic.validate_call - def create_alert( + def create_metric_range_alert( self, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - source: typing.Literal["events", "metrics", "user"] = "metrics", - description: typing.Optional[str] = None, - frequency: typing.Optional[pydantic.PositiveInt] = None, + metric: str, + range_low: float, + range_high: float, + rule: typing.Literal["is inside range", "is outside range"], + *, + description: str | None = None, window: pydantic.PositiveInt = 5, - rule: typing.Optional[ - typing.Literal[ - "is above", "is below", "is inside range", "is outside range" - ] - ] = None, - metric: typing.Optional[str] = None, - threshold: typing.Optional[float] = None, - range_low: typing.Optional[float] = None, - range_high: typing.Optional[float] = None, - aggregation: typing.Optional[ - typing.Literal["average", "sum", "at least one", "all"] + frequency: pydantic.PositiveInt = 1, + aggregation: typing.Literal[ + "average", "sum", "at least one", "all" ] = "average", notification: typing.Literal["email", "none"] = "none", - pattern: typing.Optional[str] = None, trigger_abort: bool = False, - ) -> typing.Optional[str]: - """Creates an alert with the specified name (if it doesn't exist) + ) -> str | None: + """Creates a metric range alert with the specified name (if it doesn't exist) and applies it to the current run. If alert already exists it will not be duplicated. - Note available arguments depend on the alert source: - - Event - ===== - - Alerts triggered based on the contents of an event message, arguments are: - - frequency - - pattern - - Metrics - ======= + Parameters + ---------- + name : str + name of alert + metric : str + metric to monitor + range_low : float + the lower bound value + range_high : float, optional + the upper bound value + rule : Literal['is inside range', 'is outside range'] + rule defining range alert conditions + description : str, optional + description for this alert, default None + window : PositiveInt, optional + time period in seconds over which metrics are averaged, by default 5 + frequency : PositiveInt, optional + frequency at which to check alert condition in seconds, by default 1 + aggregation : Literal['average', 'sum', 'at least one', 'all'], optional + method to use when aggregating metrics within time window, default 'average'. + notification : Literal['email', 'none'], optional + whether to notify on trigger, by default "none" + trigger_abort : bool, optional + whether this alert can trigger a run abort, default False - Alerts triggered based on metric value condictions, arguments are: - - frequency - - rule - - window - - aggregation - - metric - - threshold / (range_low, range_high) + Returns + ------- + str | None + returns the created alert ID if successful - User - ==== + """ + _alert = MetricsRangeAlert.new( + name=name, + description=description, + metric=metric, + window=window, + aggregation=aggregation, + notification=notification, + rule=rule, + range_low=range_low, + range_high=range_high, + frequency=frequency or 60, + offline=self._user_config.run.mode == "offline", + ) + _alert.abort = trigger_abort + return self._attach_alert_to_run(_alert) - User defined alerts, manually triggered. + @skip_if_failed("_aborted", "_suppress_errors", None) + @check_run_initialised + @pydantic.validate_call + def create_metric_threshold_alert( + self, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + metric: str, + threshold: float, + rule: typing.Literal["is above", "is below"], + *, + description: str | None = None, + window: pydantic.PositiveInt = 5, + frequency: pydantic.PositiveInt = 1, + aggregation: typing.Literal[ + "average", "sum", "at least one", "all" + ] = "average", + notification: typing.Literal["email", "none"] = "none", + trigger_abort: bool = False, + ) -> str | None: + """Creates a metric threshold alert with the specified name (if it doesn't exist) + and applies it to the current run. If alert already exists it will + not be duplicated. Parameters ---------- name : str name of alert - source : Literal['events', 'metrics', 'user'], optional - the source which triggers this alert based on status, either - event based, metric values or manual user defined trigger. By default "metrics". + metric : str + metric to monitor + threshold : float + the threshold value + rule : Literal['is inside range', 'is outside range'] + rule defining range alert conditions description : str, optional - description for this alert - frequency : PositiveInt, optional - frequency at which to check alert condition in seconds, by default None + description for this alert, default None window : PositiveInt, optional time period in seconds over which metrics are averaged, by default 5 - rule : Literal['is above', 'is below', 'is inside', 'is outside range'], optional - rule defining metric based alert conditions, by default None - metric : str, optional - metric to monitor, by default None - threshold : float, optional - the threshold value if 'rule' is 'is below' or 'is above', by default None - range_low : float, optional - the lower bound value if 'rule' is 'is inside range' or 'is outside range', by default None - range_high : float, optional - the upper bound value if 'rule' is 'is inside range' or 'is outside range', by default None + frequency : PositiveInt, optional + frequency at which to check alert condition in seconds, by default 1 aggregation : Literal['average', 'sum', 'at least one', 'all'], optional method to use when aggregating metrics within time window, default 'average'. notification : Literal['email', 'none'], optional whether to notify on trigger, by default "none" - pattern : str, optional - for event based alerts pattern to look for, by default None trigger_abort : bool, optional - whether this alert can trigger a run abort + whether this alert can trigger a run abort, default False Returns ------- str | None returns the created alert ID if successful - """ - if not self._sv_obj: - self._error("Cannot add alert, run not initialised") - return None - if rule in ("is below", "is above") and threshold is None: - self._error("threshold must be defined for the specified alert type") - return None + """ + _alert = MetricsThresholdAlert.new( + name=name, + metric=metric, + description=description, + threshold=threshold, + rule=rule, + window=window, + frequency=frequency, + aggregation=aggregation, + notification=notification, + offline=self._user_config.run.mode == "offline", + ) + _alert.abort = trigger_abort + return self._attach_alert_to_run(_alert) - if rule in ("is outside range", "is inside range") and ( - range_low is None or range_high is None - ): - self._error( - "range_low and range_high must be defined for the specified alert type" - ) - return None - - _alert: EventsAlert | MetricsRangeAlert | MetricsThresholdAlert | UserAlert - - if source == "metrics" and threshold: - if not metric or not aggregation or not rule: - self._error("Missing arguments for alert of type 'metric threshold'") - return None - - _alert = MetricsThresholdAlert.new( - name=name, - metric=metric, - window=window, - aggregation=aggregation, - rule=rule, - notification=notification, - threshold=threshold, - frequency=frequency or 60, - offline=self._user_config.run.mode == "offline", - ) - elif source == "metrics": - if ( - not metric - or not aggregation - or not rule - or not range_low - or not range_high - ): - self._error("Missing arguments for alert of type 'metric range'") - return None - - _alert = MetricsRangeAlert.new( - name=name, - metric=metric, - window=window, - aggregation=aggregation, - notification=notification, - rule=rule, - range_low=range_low, - range_high=range_high, - frequency=frequency or 60, - offline=self._user_config.run.mode == "offline", - ) - elif source == "events": - if not pattern: - self._error("Missing arguments for alert of type 'events'") - return None - - _alert = EventsAlert.new( - name=name, - pattern=pattern, - notification=notification, - frequency=frequency or 60, - offline=self._user_config.run.mode == "offline", - ) - else: - _alert = UserAlert.new( - name=name, - notification=notification, - offline=self._user_config.run.mode == "offline", - ) + @skip_if_failed("_aborted", "_suppress_errors", None) + @check_run_initialised + @pydantic.validate_call + def create_event_alert( + self, + name: str, + pattern: str, + *, + description: str | None = None, + frequency: pydantic.PositiveInt = 1, + notification: typing.Literal["email", "none"] = "none", + trigger_abort: bool = False, + ) -> str | None: + """Creates an events alert with the specified name (if it doesn't exist) + and applies it to the current run. If alert already exists it will + not be duplicated. - _alert.abort = trigger_abort + Parameters + ---------- + name : str + name of alert + pattern : str, optional + for event based alerts pattern to look for, by default None + frequency : PositiveInt, optional + frequency at which to check alert condition in seconds, by default None + notification : Literal['email', 'none'], optional + whether to notify on trigger, by default "none" + trigger_abort : bool, optional + whether this alert can trigger a run abort - # Check if the alert already exists - _alert_id: typing.Optional[str] = None + Returns + ------- + str | None + returns the created alert ID if successful - for _, _existing_alert in Alert.get(): - if _existing_alert.compare(_alert): - _alert_id = _existing_alert.id - logger.info("Existing alert found with id: %s", _existing_alert.id) - break + """ + _alert = EventsAlert.new( + name=name, + description=description, + pattern=pattern, + notification=notification, + frequency=frequency, + offline=self._user_config.run.mode == "offline", + ) + _alert.abort = trigger_abort + return self._attach_alert_to_run(_alert) - if not _alert_id: - _alert.commit() - _alert_id = _alert.id + @skip_if_failed("_aborted", "_suppress_errors", None) + @check_run_initialised + @pydantic.validate_call + def create_user_alert( + self, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + *, + description: str | None = None, + notification: typing.Literal["email", "none"] = "none", + trigger_abort: bool = False, + ) -> None: + """Creates a user alert with the specified name (if it doesn't exist) + and applies it to the current run. If alert already exists it will + not be duplicated. - self._sv_obj.alerts = [_alert_id] + Parameters + ---------- + name : str + name of alert + description : str, optional + description for this alert, default None + notification : Literal['email', 'none'], optional + whether to notify on trigger, by default "none" + trigger_abort : bool, optional + whether this alert can trigger a run abort, default False - self._sv_obj.commit() + Returns + ------- + str | None + returns the created alert ID if successful - return _alert_id + """ + _alert = UserAlert.new( + name=name, + notification=notification, + description=description, + offline=self._user_config.run.mode == "offline", + ) + _alert.abort = trigger_abort + return self._attach_alert_to_run(_alert) @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised diff --git a/tests/conftest.py b/tests/conftest.py index 5ea1ef28..b9e7845f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -140,30 +140,33 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur TEST_DATA['created_alerts'] = [] for i in range(5): - run.create_alert(name=f"test_alert/alert_{i}", source="events", frequency=1, pattern=TEST_DATA['event_contains']) + run.create_event_alert( + name=f"test_alert/alert_{i}", + frequency=1, + pattern=TEST_DATA['event_contains'] + ) TEST_DATA['created_alerts'].append(f"test_alert/alert_{i}") - run.create_alert( + run.create_metric_threshold_alert( name='test_alert/value_below_1', - source='metrics', frequency=1, rule='is below', threshold=1, metric='metric_counter', window=2 ) - run.create_alert( - name='test_alert/value_above_1', - source='metrics', + run.create_metric_range_alert( + name='test_alert/value_within_1', frequency=1, - rule='is above', - threshold=1, + rule = "is inside range", + range_low = 2, + range_high = 5, metric='metric_counter', window=2 ) TEST_DATA['created_alerts'] += [ "test_alert/value_above_1", - "test_alert/value_below_1" + "test_alert/value_within_1" ] for i in range(5): diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 67aa28c8..1451f335 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -726,7 +726,7 @@ def testing_exit(status: int) -> None: run.config(resources_metrics_interval=1) run._heartbeat_interval = 1 run._testing = True - alert_id = run.create_alert("abort_test", source="user", trigger_abort=True) + alert_id = run.create_user_alert("abort_test", trigger_abort=True) run.add_process(identifier="forever_long", executable="bash", c="sleep 10") time.sleep(2) run.log_alert(alert_id, "critical") diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index 98999c66..693478ce 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -40,7 +40,7 @@ def test_artifact_creation_online() -> None: _failed.append((member, f"{e}")) assert _artifact.name == f"test_artifact_{_uuid}" _content = b"".join(_artifact.download_content()).decode("UTF-8") - assert _content == f"Hello World! {_uuid}\n" + assert _content == f"Hello World! {_uuid}" _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) if _failed: From cdfc572984324e13b31be58db1a1b3159f41786b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 14 Jan 2025 17:30:38 +0000 Subject: [PATCH 064/163] Few more test fixes and using correct query for retrieving an artifact --- simvue/api/objects/alert/events.py | 15 +++++++++++++-- simvue/api/objects/alert/metrics.py | 14 ++++++++++---- simvue/api/objects/alert/user.py | 4 ++-- simvue/client.py | 10 +++++++--- tests/conftest.py | 2 +- tests/functional/test_client.py | 5 ++++- 6 files changed, 37 insertions(+), 13 deletions(-) diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index 844ab5de..3489dbd1 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -38,7 +38,7 @@ def new( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - description: str, + description: str | None, notification: typing.Literal["none", "email"], pattern: str, frequency: pydantic.PositiveInt, @@ -53,7 +53,7 @@ def new( ---------- name : str name of the alert - description : str + description : str | None description for this alert notification : "none" | "email" configure notifications sent by this alert @@ -89,6 +89,17 @@ def __init__(self, alert: EventsAlert) -> None: """Initialise an alert definition with its parent alert""" self._sv_obj = alert + def compare(self, other: "EventAlertDefinition") -> bool: + if not isinstance(other, EventAlertDefinition): + return False + + return all( + [ + self.frequency == other.frequency, + self.pattern == other.pattern, + ] + ) + @property def pattern(self) -> str: """Retrieve the event log pattern monitored by this alert""" diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index 080900b9..b41ee1d7 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -44,7 +44,7 @@ def new( *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], metric: str, - description: str, + description: str | None, notification: typing.Literal["none", "email"], aggregation: Aggregate, rule: typing.Literal["is above", "is below"], @@ -62,7 +62,7 @@ def new( ---------- name : str name to assign to this alert - description : str + description : str | None description for this alert metric : str the metric to monitor @@ -123,7 +123,7 @@ def new( *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], metric: str, - description: str, + description: str | None, notification: typing.Literal["none", "email"], aggregation: Aggregate, rule: typing.Literal["is inside range", "is outside range"], @@ -144,7 +144,7 @@ def new( name to assign to this alert metric : str the metric to monitor - description : str + description : str | None description for this alert notification : "none" | "email" the notification settings for this alert @@ -255,6 +255,9 @@ class MetricThresholdAlertDefinition(MetricsAlertDefinition): """Alert definition for metric threshold alerts""" def compare(self, other: "MetricThresholdAlertDefinition") -> bool: + if not isinstance(other, MetricThresholdAlertDefinition): + return False + return all([super().compare(other), self.threshold == other.threshold]) @property @@ -269,6 +272,9 @@ class MetricRangeAlertDefinition(MetricsAlertDefinition): """Alert definition for metric range alerts""" def compare(self, other: "MetricRangeAlertDefinition") -> bool: + if not isinstance(other, MetricRangeAlertDefinition): + return False + return all( [ super().compare(other), diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index 6cd0843d..5e94a814 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -29,7 +29,7 @@ def new( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - description: str, + description: str | None, notification: typing.Literal["none", "email"], enabled: bool = True, offline: bool = False, @@ -42,7 +42,7 @@ def new( ---------- name : str the name to assign to this alert - description : str + description : str | None description for this alert notification : "none" | "email" configure notification settings for this alert diff --git a/simvue/client.py b/simvue/client.py index 37b6510b..6a1a2be3 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -452,7 +452,11 @@ def list_artifacts(self, run_id: str) -> typing.Generator[Artifact, None, None]: def _retrieve_artifacts_from_server( self, run_id: str, name: str, count: int | None = None ) -> typing.Generator[tuple[str, Artifact], None, None]: - return Artifact.get(runs=json.dumps([run_id]), name=name, count=count) # type: ignore + return Artifact.get( + runs=json.dumps([run_id]), + filters=json.dumps([f"name == {name}"]), + count=count, + ) # type: ignore @prettify_pydantic @pydantic.validate_call @@ -549,8 +553,8 @@ def get_artifact_as_file( try: _id, _artifact = next(_artifacts) - except StopIteration: - raise ValueError(f"No artifact '{name}' found for run '{run_id}'") + except StopIteration as e: + raise ValueError(f"No artifact '{name}' found for run '{run_id}'") from e _download_artifact_to_file(_artifact, output_dir) diff --git a/tests/conftest.py b/tests/conftest.py index b9e7845f..f16a14e4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -165,7 +165,7 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur window=2 ) TEST_DATA['created_alerts'] += [ - "test_alert/value_above_1", + "test_alert/value_below_1", "test_alert/value_within_1" ] diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index e09428ce..6b8a0def 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -39,7 +39,9 @@ def test_get_alerts(create_test_run: tuple[sv_run.Run, dict], from_run: bool) -> assert alert["alert"]["status"]["current"] == "critical" else: assert (triggered_alerts_full := client.get_alerts(names_only=True, critical_only=False)) - assert all(a in triggered_alerts_full for a in run_data['created_alerts']) + + for alert in run_data["created_alerts"]: + assert alert in triggered_alerts_full, f"Alert '{alert}' was not triggered" @pytest.mark.dependency @@ -112,6 +114,7 @@ def test_get_artifact_as_file( create_test_run: tuple[sv_run.Run, dict], file_id: int ) -> None: with tempfile.TemporaryDirectory() as tempd: + tempd = os.getcwd() client = svc.Client() _file_name = create_test_run[1][f"file_{file_id}"] client.get_artifact_as_file( From e48d6ef4db4776ba1735a90111374ef621f32d10 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 15 Jan 2025 17:22:54 +0000 Subject: [PATCH 065/163] Further client class and test fixes --- simvue/api/objects/alert/metrics.py | 5 +- simvue/api/objects/run.py | 2 +- simvue/client.py | 85 +++++++++++------------------ simvue/converters.py | 1 + simvue/run.py | 2 +- tests/conftest.py | 4 +- tests/functional/test_client.py | 16 ++++-- tests/functional/test_run_class.py | 5 +- 8 files changed, 56 insertions(+), 64 deletions(-) diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index b41ee1d7..f8c4ea91 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -114,7 +114,10 @@ def __init__(self, identifier: str | None = None, **kwargs) -> None: super().__init__(identifier, **kwargs) def compare(self, other: "MetricsRangeAlert") -> bool: - return all([self.alert.compare(other.alert), super().compare(other)]) + if not super().compare(other): + return False + + return self.alert.compare(other) @classmethod @pydantic.validate_call diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index c59779e9..ac3f9a49 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -111,7 +111,7 @@ def ttl(self) -> int: @ttl.setter @write_only @pydantic.validate_call - def ttl(self, time_seconds: pydantic.NonNegativeInt) -> None: + def ttl(self, time_seconds: pydantic.NonNegativeInt | None) -> None: """Update the retention period for this run""" self._staging["ttl"] = time_seconds diff --git a/simvue/client.py b/simvue/client.py index 6a1a2be3..f3ac145d 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -187,7 +187,7 @@ def get_runs( count_limit: typing.Optional[pydantic.PositiveInt] = 100, start_index: typing.Optional[pydantic.PositiveInt] = 0, show_shared: bool = False, - ) -> typing.Union[DataFrame, list[Run], None]: + ) -> typing.Union[DataFrame, typing.Generator[tuple[str, Run]], None]: """Retrieve all runs matching filters. Parameters @@ -230,19 +230,20 @@ def get_runs( if not show_shared: filters = (filters or []) + ["user == self"] + _runs = Run.get( + count=count_limit, + offset=start_index, + filters=json.dumps(filters), + return_basic=True, + return_metrics=metrics, + return_alerts=alerts, + return_system=system, + return_metadata=metadata, + ) + if output_format == "objects": - return dict( - Run.get( - count=count_limit, - offset=start_index, - filters=json.dumps(filters), - return_basic=True, - return_metrics=metrics, - return_alerts=alerts, - return_system=system, - return_metadata=metadata, - ) - ) + return _runs + _params: dict[str, bool | str] = { "filters": json.dumps(filters), "return_basic": True, @@ -696,8 +697,8 @@ def get_metrics_names(self, run_id: str) -> typing.Generator[str, None, None]: """ _run = Run(identifier=run_id) - for metric in _run.metrics: - yield metric.name + for id, _ in _run.metrics: + yield id def _get_run_metrics_from_server( self, @@ -721,14 +722,12 @@ def _get_run_metrics_from_server( params=params, ) - json_response = get_json_from_response( + return get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario=f"Retrieval of metrics '{metric_names}' in " f"runs '{run_ids}'", response=metrics_response, ) - return json_response - @prettify_pydantic @pydantic.validate_call def get_metric_values( @@ -792,51 +791,33 @@ def get_metric_values( "'xaxis=timestamp'" ) - if run_filters is not None: - if not (filtered_runs := self.get_runs(filters=run_filters)): - return None - - run_ids = [run["id"] for run in filtered_runs if run["id"]] - - if use_run_names: - run_labels = [run["name"] for run in filtered_runs] - elif run_ids is not None: - if use_run_names: - run_labels = [ - self.get_run_name_from_id(run_id) for run_id in run_ids if run_id - ] - else: - raise AssertionError( - "Expected either argument 'run_ids' or 'run_filters' for get_metric_values" - ) - - if not run_ids or not all(run_ids): - raise ValueError( - f"Expected list of run identifiers for 'run_ids' but got '{run_ids}'" - ) + _args = {"filters": json.dumps(run_filters)} if run_filters else {} - if not use_run_names: - run_labels = run_ids + _run_data = dict(Run.get(**_args)) - if run_metrics := self._get_run_metrics_from_server( + if _run_metrics := self._get_run_metrics_from_server( metric_names=metric_names, - run_ids=run_ids, + run_ids=run_ids or list(_run_data.keys()), xaxis=xaxis, aggregate=aggregate, max_points=max_points, ): - return ( - aggregated_metrics_to_dataframe( - run_metrics, xaxis=xaxis, parse_to=output_format + if aggregate: + return aggregated_metrics_to_dataframe( + _run_metrics, xaxis=xaxis, parse_to=output_format ) - if aggregate - else parse_run_set_metrics( - run_metrics, + else: + if use_run_names: + _run_metrics = { + _run_data[key].name: _run_metrics[key] + for key in _run_metrics.keys() + } + return parse_run_set_metrics( + _run_metrics, xaxis=xaxis, - run_labels=run_labels, + run_labels=list(_run_data.keys()), parse_to=output_format, ) - ) else: return None diff --git a/simvue/converters.py b/simvue/converters.py index 705392ac..a77d5ac8 100644 --- a/simvue/converters.py +++ b/simvue/converters.py @@ -122,6 +122,7 @@ def parse_run_set_metrics( """ if not request_response_data: return pandas.DataFrame({}) if parse_to == "dataframe" else {} + _all_steps: list[float] = sorted( { d[xaxis] diff --git a/simvue/run.py b/simvue/run.py index 4eaf651d..13c90bbc 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1919,7 +1919,7 @@ def log_alert( ) return False _alert.read_only(False) - _alert.set_state(run_id=self._id, state=state) + _alert.set_status(run_id=self._id, status=state) _alert.commit() return True diff --git a/tests/conftest.py b/tests/conftest.py index f16a14e4..d9bd11ac 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -200,8 +200,8 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur out_f.write( "print('Hello World!')" ) - run.save_file(test_script, category="code", name="test_empty_file") - TEST_DATA["file_3"] = "test_empty_file" + run.save_file(test_script, category="code", name="test_code_upload") + TEST_DATA["file_3"] = "test_code_upload" time.sleep(1.) return TEST_DATA diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index 6b8a0def..696f97e9 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -56,9 +56,17 @@ def test_get_run_id_from_name(create_test_run: tuple[sv_run.Run, dict]) -> None: @pytest.mark.dependency @pytest.mark.client -@pytest.mark.parametrize("aggregate", (True, False), ids=("aggregate", "complete")) +@pytest.mark.parametrize( + "aggregate,use_name_labels", + [ + (True, False), + (False, False), + (False, True) + ], + ids=("aggregate", "complete_ids", "complete_labels") +) def test_get_metric_values( - create_test_run: tuple[sv_run.Run, dict], aggregate: bool + create_test_run: tuple[sv_run.Run, dict], aggregate: bool, use_name_labels: bool ) -> None: client = svc.Client() time.sleep(0.5) @@ -66,6 +74,7 @@ def test_get_metric_values( run_ids=[create_test_run[1]["run_id"]], metric_names=[create_test_run[1]["metrics"][0]], xaxis="step", + use_run_names=use_name_labels, aggregate=aggregate, output_format="dict", ) @@ -114,7 +123,6 @@ def test_get_artifact_as_file( create_test_run: tuple[sv_run.Run, dict], file_id: int ) -> None: with tempfile.TemporaryDirectory() as tempd: - tempd = os.getcwd() client = svc.Client() _file_name = create_test_run[1][f"file_{file_id}"] client.get_artifact_as_file( @@ -321,7 +329,7 @@ def test_multiple_metric_retrieval( @pytest.mark.client def test_alert_deletion() -> None: - _alert = sv_api_obj.UserAlert.new(name="test_alert", notification="none") + _alert = sv_api_obj.UserAlert.new(name="test_alert", notification="none", description=None) _alert.commit() _client = svc.Client() time.sleep(1) diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 1451f335..810bcdab 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -31,7 +31,7 @@ @pytest.mark.run def test_created_run() -> None: with sv_run.Run() as run_created: - run_created.init(running=False) + run_created.init(running=False, retention_period="1 min") _run = RunObject(identifier=run_created.id) assert _run.status == "created" @@ -50,7 +50,7 @@ def test_check_run_initialised_decorator() -> None: @pytest.mark.run def test_run_with_emissions() -> None: with sv_run.Run() as run_created: - run_created.init() + run_created.init(retention_period="1 min") run_created.config(enable_emission_metrics=True, emission_metrics_interval=1) time.sleep(5) _run = RunObject(identifier=run_created.id) @@ -209,7 +209,6 @@ def test_offline_tags(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None assert run_data["tags"][-1] in [tag["name"] for tag in tags] - @pytest.mark.run def test_update_metadata_running(create_test_run: tuple[sv_run.Run, dict]) -> None: METADATA = {"a": 10, "b": 1.2, "c": "word"} From 699771f9512c69bae64f7294bd8917d8c92d9a84 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 15 Jan 2025 19:47:46 +0000 Subject: [PATCH 066/163] Fix generator typing --- simvue/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simvue/client.py b/simvue/client.py index f3ac145d..b518bab5 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -187,7 +187,7 @@ def get_runs( count_limit: typing.Optional[pydantic.PositiveInt] = 100, start_index: typing.Optional[pydantic.PositiveInt] = 0, show_shared: bool = False, - ) -> typing.Union[DataFrame, typing.Generator[tuple[str, Run]], None]: + ) -> typing.Union[DataFrame, typing.Generator[tuple[str, Run], None, None], None]: """Retrieve all runs matching filters. Parameters From 5139f1871b435a3ed3423c62d19ea8967440783c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 16 Jan 2025 08:20:59 +0000 Subject: [PATCH 067/163] Added created member to Folder and Tag classes --- simvue/api/objects/folder.py | 11 ++++++++++- simvue/api/objects/tag.py | 19 +++++++++++++++++++ 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index 2e16a471..357bdfdf 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -9,6 +9,7 @@ import pathlib import typing +import datetime from codecarbon.output_methods.emissions_data import json import pydantic @@ -16,7 +17,7 @@ from simvue.exception import ObjectNotFoundError from .base import SimvueObject, staging_check, write_only -from simvue.models import FOLDER_REGEX +from simvue.models import FOLDER_REGEX, DATETIME_FORMAT class Folder(SimvueObject): @@ -154,6 +155,14 @@ def delete( # should params to this be optional and default to False? recursive=recursive, runs=delete_runs, runs_only=runs_only ) + @property + def created(self) -> datetime.datetime | None: + """Retrieve created datetime for the run""" + _created: str | None = self._get_attribute("created") + return ( + datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None + ) + @pydantic.validate_call def get_folder_from_path( diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py index 4bb5b69b..8725d73b 100644 --- a/simvue/api/objects/tag.py +++ b/simvue/api/objects/tag.py @@ -1,6 +1,17 @@ +""" +Simvue Server Tag +================= + +Contains a class for remotely connecting to a Simvue Tag, or defining +a new tag given relevant arguments. + +""" + import pydantic.color import typing +import datetime from .base import SimvueObject, staging_check, write_only +from simvue.models import DATETIME_FORMAT __all__ = ["Tag"] @@ -53,6 +64,14 @@ def description(self) -> str: def description(self, description: str) -> None: self._staging["description"] = description + @property + def created(self) -> datetime.datetime | None: + """Retrieve created datetime for the run""" + _created: str | None = self._get_attribute("created") + return ( + datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None + ) + @classmethod def get( cls, *, count: int | None = None, offset: int | None = None, **kwargs From 156e4bb1440acff13f3d49086894b5dc3692efee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 17 Jan 2025 12:30:29 +0000 Subject: [PATCH 068/163] Fix datetime format and a few more bugs --- simvue/api/objects/artifact.py | 12 +++++------- simvue/converters.py | 6 ++++-- simvue/models.py | 2 +- tests/functional/test_run_class.py | 1 - 4 files changed, 10 insertions(+), 11 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 2ff7e7c0..4f01532c 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -156,8 +156,7 @@ def new_object( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - storage: str | None, - category: Category, + storage_id: str | None, obj: typing.Any, metadata: dict[str, typing.Any] | None, allow_pickling: bool = True, @@ -171,10 +170,8 @@ def new_object( ---------- name : str the name for this artifact - storage : str | None + storage_id : str | None the identifier for the storage location for this object - category : "code" | "input" | "output" - the category of this artifact obj : Any object to serialize and upload metadata : dict[str, Any] | None @@ -200,8 +197,9 @@ def new_object( _artifact = Artifact.new( name=name, - storage=storage, - size=sys.getsizeof(obj), + storage_id=storage_id, + original_path=None, + size=sys.getsizeof(_serialized), file_type=_data_type, checksum=_checksum, metadata=metadata, diff --git a/simvue/converters.py b/simvue/converters.py index a77d5ac8..59df29e3 100644 --- a/simvue/converters.py +++ b/simvue/converters.py @@ -212,9 +212,11 @@ def to_dataframe(data) -> pandas.DataFrame: } for run in data: run_info = flatdict.FlatDict(run, delimiter=".") - for column, value_ in columns.items(): - value_.append(run_info.get(column)) + try: + value_.append(run_info.get(column)) + except TypeError: + value_.append(None) return pandas.DataFrame(data=columns) diff --git a/simvue/models.py b/simvue/models.py index 31a81bb3..79aa2751 100644 --- a/simvue/models.py +++ b/simvue/models.py @@ -6,7 +6,7 @@ FOLDER_REGEX: str = r"^/.*" NAME_REGEX: str = r"^[a-zA-Z0-9\-\_\s\/\.:]+$" METRIC_KEY_REGEX: str = r"^[a-zA-Z0-9\-\_\s\/\.:=><]+$" -DATETIME_FORMAT: str = "%Y-%m-%d %H:%M:%S.%f" +DATETIME_FORMAT: str = "%Y-%m-%dT%H:%M:%S.%f" MetadataKeyString = typing.Annotated[ str, pydantic.StringConstraints(pattern=r"^[\w\-\s\.]+$") diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 810bcdab..38c53f48 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -457,7 +457,6 @@ def test_set_folder_details(request: pytest.FixtureRequest) -> None: client = sv_cl.Client() _folder = client.get_folder(folder_path=folder_name) - print(_folder) assert _folder.tags assert sorted(_folder.tags) == sorted(tags) From 3cd12c53a62de16a7e3e1f46b7e1e589ba3fccc8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 17 Jan 2025 12:40:42 +0000 Subject: [PATCH 069/163] Add to_dict member to SimvueObject --- simvue/api/objects/base.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 83da3df5..d4f936d1 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -500,6 +500,9 @@ def _cache(self) -> None: with self._local_staging_file.open("w", encoding="utf-8") as out_f: json.dump(_local_data, out_f, indent=2) + def to_dict(self) -> dict[str, typing.Any]: + return {key: getattr(self, key) for key in self._properties} + @property def staged(self) -> dict[str, typing.Any] | None: """Return currently staged changes to this object""" From 2f32a7809e12033c093efa213607eacd51ad6252 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 17 Jan 2025 12:57:09 +0000 Subject: [PATCH 070/163] Allow None as description --- simvue/api/objects/run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index ac3f9a49..75dd5cfe 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -147,7 +147,7 @@ def description(self) -> str: @description.setter @write_only @pydantic.validate_call - def description(self, description: str) -> None: + def description(self, description: str | None) -> None: self._staging["description"] = description @property From a934ee86282f9c51cf61a17d20f40cec9e87c764 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 17 Jan 2025 17:43:52 +0000 Subject: [PATCH 071/163] Added Metrics and Events classes --- simvue/api/objects/__init__.py | 2 + simvue/api/objects/base.py | 26 ++++++-- simvue/api/objects/events.py | 111 ++++++++++++++++++++++++++++++++ simvue/api/objects/metrics.py | 76 ++++++++++++++++++++++ simvue/api/objects/run.py | 13 ++-- simvue/client.py | 10 ++- simvue/run.py | 22 +++++-- simvue/utilities.py | 3 +- tests/functional/test_client.py | 20 +++++- 9 files changed, 262 insertions(+), 21 deletions(-) create mode 100644 simvue/api/objects/events.py create mode 100644 simvue/api/objects/metrics.py diff --git a/simvue/api/objects/__init__.py b/simvue/api/objects/__init__.py index 74bf2d4f..08c7b22b 100644 --- a/simvue/api/objects/__init__.py +++ b/simvue/api/objects/__init__.py @@ -15,3 +15,5 @@ from .run import Run as Run from .tag import Tag as Tag from .folder import Folder as Folder, get_folder_from_path as get_folder_from_path +from .events import Events as Events +from .metrics import Metrics as Metrics diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index d4f936d1..c51b5e7e 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -14,6 +14,9 @@ import json import logging +import msgpack +import pydantic + from simvue.config.user import SimvueConfiguration from simvue.exception import ObjectNotFoundError from simvue.version import __version__ @@ -281,8 +284,12 @@ def ids( return [_entry["id"] for _entry in _data] @classmethod + @pydantic.validate_call def get( - cls, *, count: int | None = None, offset: int | None = None, **kwargs + cls, + count: pydantic.PositiveInt | None = None, + offset: pydantic.PositiveInt | None = None, + **kwargs, ) -> typing.Generator[tuple[str, typing.Optional["SimvueObject"]], None, None]: _class_instance = cls(_read_only=True, _local=True) if (_data := cls._get_all_objects(count, offset, **kwargs).get("data")) is None: @@ -321,10 +328,15 @@ def _get_all_objects( headers=_class_instance._headers, params={"start": offset, "count": count} | kwargs, ) + + _label = _class_instance.__class__.__name__.lower() + if _label.endswith("s"): + _label = _label[:-1] + return get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], - scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", + scenario=f"Retrieval of {_label}s", ) def read_only(self, is_read_only: bool) -> None: @@ -378,9 +390,15 @@ def _base_url(self) -> URL: def url(self) -> URL | None: return None if self._identifier is None else self._base_url / self._identifier - def _post(self, **kwargs) -> dict[str, typing.Any]: + def _post(self, is_json: bool = True, **kwargs) -> dict[str, typing.Any]: + if not is_json: + kwargs = msgpack.packb(kwargs, use_bin_type=True) + _response = sv_post( - url=f"{self._base_url}", headers=self._headers, data=kwargs, is_json=True + url=f"{self._base_url}", + headers=self._headers | {"Content-Type": "application/msgpack"}, + data=kwargs, + is_json=is_json, ) if _response.status_code == http.HTTPStatus.FORBIDDEN: diff --git a/simvue/api/objects/events.py b/simvue/api/objects/events.py new file mode 100644 index 00000000..156521c2 --- /dev/null +++ b/simvue/api/objects/events.py @@ -0,0 +1,111 @@ +""" +Simvue Server Events +==================== + +Contains a class for remotely connecting to Simvue events, or defining +a new set of events given relevant arguments. + +""" + +import http +import typing +import datetime + +import pydantic + +from simvue.api.url import URL + +from .base import SimvueObject +from simvue.models import DATETIME_FORMAT, EventSet +from simvue.api.request import get as sv_get, get_json_from_response + +__all__ = ["Events"] + + +class Events(SimvueObject): + def __init__( + self, + _read_only: bool = True, + _local: bool = False, + **kwargs, + ) -> None: + self._label = "event" + super().__init__( + identifier=None, _read_only=_read_only, _local=_local, **kwargs + ) + self._run_id = self._staging.get("run") + + @classmethod + @pydantic.validate_call + def get( + cls, + run_id: str, + *, + count: pydantic.PositiveInt | None = None, + offset: pydantic.PositiveInt | None = None, + **kwargs, + ) -> typing.Generator[EventSet, None, None]: + _class_instance = cls(_read_only=True, _local=True) + if ( + _data := cls._get_all_objects(count, offset, run=run_id, **kwargs).get( + "data" + ) + ) is None: + raise RuntimeError( + f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" + ) + + for _entry in _data: + yield EventSet(**_entry) + + @classmethod + @pydantic.validate_call + def new(cls, *, run_id: str, offline: bool = False, events: list[EventSet]): + """Create a new Events entry on the Simvue server""" + _events = Events( + run=run_id, + events=[event.model_dump() for event in events], + _read_only=False, + ) + _events.offline_mode(offline) + return _events + + def _post(self, **kwargs) -> dict[str, typing.Any]: + return super()._post(is_json=False, **kwargs) + + def _put(self, **kwargs) -> dict[str, typing.Any]: + raise NotImplementedError("Method 'put' is not available for type Events") + + @pydantic.validate_call + def histogram( + self, + timestamp_begin: datetime.datetime, + timestamp_end: datetime.datetime, + window: int, + filters: list[str] | None, + ) -> list[dict[str, str | int]]: + if timestamp_end - timestamp_begin <= datetime.timedelta(seconds=window): + raise ValueError( + "Invalid arguments for datetime range, " + "value difference must be greater than window" + ) + _url: URL = self._base_url / "histogram" + _time_begin: str = timestamp_begin.strftime(DATETIME_FORMAT) + _time_end: str = timestamp_end.strftime(DATETIME_FORMAT) + _response = sv_get( + url=_url, + headers=self._headers, + params={ + "run": self._run_id, + "window": window, + "timestamp_begin": timestamp_begin, + "timestamp_end": timestamp_end, + } + | ({"filters": filters} if filters else {}), + ) + _json_response = get_json_from_response( + expected_status=[http.HTTPStatus.OK], + scenario="Retrieval of events histogram", + response=_response, + ) + return _json_response.get("data") diff --git a/simvue/api/objects/metrics.py b/simvue/api/objects/metrics.py new file mode 100644 index 00000000..b4ae1823 --- /dev/null +++ b/simvue/api/objects/metrics.py @@ -0,0 +1,76 @@ +""" +Simvue Server Metrics +===================== + +Contains a class for remotely connecting to Simvue metrics, or defining +a new set of metrics given relevant arguments. + +""" + +import typing +import json + +import pydantic + + +from .base import SimvueObject +from simvue.models import MetricSet + +__all__ = ["Metrics"] + + +class Metrics(SimvueObject): + def __init__( + self, + _read_only: bool = True, + _local: bool = False, + **kwargs, + ) -> None: + self._label = "metric" + super().__init__( + identifier=None, _read_only=_read_only, _local=_local, **kwargs + ) + self._run_id = self._staging.get("run") + + @classmethod + @pydantic.validate_call + def new(cls, *, run_id: str, offline: bool = False, metrics: list[MetricSet]): + """Create a new Events entry on the Simvue server""" + _events = Metrics( + run=run_id, + metrics=[metric.model_dump() for metric in metrics], + _read_only=False, + ) + _events.offline_mode(offline) + return _events + + @classmethod + @pydantic.validate_call + def get( + cls, + metrics: list[str], + xaxis: typing.Literal["timestamp", "step", "time"], + *, + count: pydantic.PositiveInt | None = None, + offset: pydantic.PositiveInt | None = None, + **kwargs, + ) -> typing.Generator[MetricSet, None, None]: + _class_instance = cls(_read_only=True, _local=True) + if ( + _data := cls._get_all_objects( + count, + offset, + metrics=json.dumps(metrics), + xaxis=xaxis, + **kwargs, + ).get("data") + ) is None: + raise RuntimeError( + f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" + ) + + for _entry in _data: + yield MetricSet(**_entry) + + def _post(self, **kwargs) -> dict[str, typing.Any]: + return super()._post(is_json=False, **kwargs) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 75dd5cfe..85b1373f 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -16,6 +16,7 @@ get_json_from_response, post as sv_post, ) +from simvue.api.objects.events import Events from simvue.api.url import URL from simvue.models import FOLDER_REGEX, NAME_REGEX, DATETIME_FORMAT, EventSet, MetricSet @@ -272,6 +273,11 @@ def log_entries( self._stage_to_other(entry_type, self._identifier, _validated_entries) return + if entry_type == "events": + _events = Events.new(run_id=self._identifier, events=entries) + _events.commit() + return + _url = URL(self._user_config.server.url) / entry_type _data = {entry_type: _validated_entries, "run": self._identifier} _data_bin = msgpack.packb(_data, use_bin_type=True) @@ -297,18 +303,15 @@ def send_heartbeat(self) -> dict[str, typing.Any] | None: _url = self._base_url _url /= f"{self._identifier}/heartbeat" _response = sv_put(f"{_url}", headers=self._headers, data={}) - _json_response = get_json_from_response( + return get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], scenario="Retrieving heartbeat state", ) - return _json_response @property def _abort_url(self) -> URL | None: - if not self._identifier: - return None - return self.url / "abort" + return self.url / "abort" if self._identifier else None @property def _artifact_url(self) -> URL | None: diff --git a/simvue/client.py b/simvue/client.py index b518bab5..ad1e13ca 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -407,8 +407,7 @@ def delete_folder( return None else: raise RuntimeError( - f"Deletion of folder '{folder_path}' failed, " - "folder does not exist." + f"Deletion of folder '{folder_path}' failed, folder does not exist." ) _response = Folder(identifier=folder_id).delete( delete_runs=remove_runs, recursive=recursive, runs_only=False @@ -633,7 +632,7 @@ def get_folder( if there was a failure when retrieving information from the server """ _folders: typing.Generator[tuple[str, Folder], None, None] = Folder.get( - path=folder_path + filters=json.dumps([f"path = {folder_path}"]) ) # type: ignore try: @@ -724,7 +723,7 @@ def _get_run_metrics_from_server( return get_json_from_response( expected_status=[http.HTTPStatus.OK], - scenario=f"Retrieval of metrics '{metric_names}' in " f"runs '{run_ids}'", + scenario=f"Retrieval of metrics '{metric_names}' in runs '{run_ids}'", response=metrics_response, ) @@ -872,8 +871,7 @@ def plot_metrics( if data is None: raise RuntimeError( - f"Cannot plot metrics {metric_names}, " - f"no data found for runs {run_ids}." + f"Cannot plot metrics {metric_names}, no data found for runs {run_ids}." ) # Undo multi-indexing diff --git a/simvue/run.py b/simvue/run.py index 13c90bbc..65a477bc 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -57,6 +57,8 @@ MetricsRangeAlert, UserAlert, EventsAlert, + Events, + Metrics, ) try: @@ -92,8 +94,7 @@ def _wrapper(self: Self, *args: typing.Any, **kwargs: typing.Any) -> typing.Any: if not self._sv_obj: raise RuntimeError( - "Simvue Run must be initialised before calling " - f"'{function.__name__}'" + f"Simvue Run must be initialised before calling '{function.__name__}'" ) return _function(self, *args, **kwargs) @@ -429,10 +430,23 @@ def _create_dispatch_callback( def _dispatch_callback( buffer: list[typing.Any], - category: str, + category: typing.Literal["events", "metrics"], run_obj: RunObject = self._sv_obj, ) -> None: - run_obj.log_entries(entries=buffer, entry_type=category) + if category == "events": + _events = Events.new( + run_id=self.id, + offline=self._user_config.run.mode == "offline", + events=buffer, + ) + _events.commit() + else: + _metrics = Metrics.new( + run_id=self.id, + offline=self._user_config.run.mode == "offline", + metrics=buffer, + ) + _metrics.commit() return _dispatch_callback diff --git a/simvue/utilities.py b/simvue/utilities.py index 2ef12bb5..4bf3e013 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -16,6 +16,7 @@ import jwt from datetime import timezone +from simvue.models import DATETIME_FORMAT CHECKSUM_BLOCK_SIZE = 4096 @@ -381,7 +382,7 @@ def simvue_timestamp(date_time: typing.Optional[datetime.datetime] = None) -> st """ if not date_time: date_time = datetime.datetime.now(timezone.utc) - return date_time.strftime("%Y-%m-%d %H:%M:%S.%f") + return date_time.strftime(DATETIME_FORMAT) @functools.lru_cache diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index 696f97e9..3cada4d8 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -248,7 +248,25 @@ def test_get_tags(create_plain_run: tuple[sv_run.Run, dict]) -> None: assert all(t in retrieved for t in tags) -@pytest.mark.dependency +@pytest.mark.dependencResults (276.21s (0:04:36)): + 28 passed + 15 failed + - tests/functional/test_client.py:18 test_get_events + - tests/functional/test_client.py:57 test_get_metric_values[aggregate] + - tests/functional/test_client.py:57 test_get_metric_values[complete_ids] + - tests/functional/test_client.py:57 test_get_metric_values[complete_labels] + - tests/functional/test_client.py:95 test_plot_metrics + - tests/functional/test_client.py:111 test_get_artifacts_entries + - tests/functional/test_client.py:119 test_get_artifact_as_file[file_1] + - tests/functional/test_client.py:119 test_get_artifact_as_file[file_2] + - tests/functional/test_client.py:119 test_get_artifact_as_file[file_3] + - tests/functional/test_client.py:188 test_get_metrics_names + - tests/functional/test_client.py:251 test_folder_deletion + - tests/functional/test_client.py:292 test_multiple_metric_retrieval[step-dict-aggregated] + - tests/functional/test_client.py:292 test_multiple_metric_retrieval[step-dataframe-aggregated] + - tests/functional/test_client.py:292 test_multiple_metric_retrieval[time-dict-aggregated] + - tests/functional/test_client.py:292 test_multiple_metric_retrieval[time-dataframe-aggregated] +y @pytest.mark.client(depends=PRE_DELETION_TESTS + ["test_runs_deletion"]) def test_folder_deletion(create_test_run: tuple[sv_run.Run, dict]) -> None: run, run_data = create_test_run From 5efb5b87e63241d0d827518e60275ad0f73cd453 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 20 Jan 2025 08:51:19 +0000 Subject: [PATCH 072/163] More test fixes --- simvue/api/objects/run.py | 4 ++-- tests/functional/test_client.py | 20 +------------------- tests/functional/test_run_execute_process.py | 2 +- tests/functional/test_scenarios.py | 2 +- 4 files changed, 5 insertions(+), 23 deletions(-) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 85b1373f..25de7ab1 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -163,13 +163,13 @@ def system(self, system: dict[str, typing.Any]) -> None: @property @staging_check - def heartbeat_timeout(self) -> int: + def heartbeat_timeout(self) -> int | None: return self._get_attribute("heartbeat_timeout") @heartbeat_timeout.setter @write_only @pydantic.validate_call - def heartbeat_timeout(self, time_seconds: int) -> None: + def heartbeat_timeout(self, time_seconds: int | None) -> None: self._staging["heartbeat_timeout"] = time_seconds @property diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index 3cada4d8..696f97e9 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -248,25 +248,7 @@ def test_get_tags(create_plain_run: tuple[sv_run.Run, dict]) -> None: assert all(t in retrieved for t in tags) -@pytest.mark.dependencResults (276.21s (0:04:36)): - 28 passed - 15 failed - - tests/functional/test_client.py:18 test_get_events - - tests/functional/test_client.py:57 test_get_metric_values[aggregate] - - tests/functional/test_client.py:57 test_get_metric_values[complete_ids] - - tests/functional/test_client.py:57 test_get_metric_values[complete_labels] - - tests/functional/test_client.py:95 test_plot_metrics - - tests/functional/test_client.py:111 test_get_artifacts_entries - - tests/functional/test_client.py:119 test_get_artifact_as_file[file_1] - - tests/functional/test_client.py:119 test_get_artifact_as_file[file_2] - - tests/functional/test_client.py:119 test_get_artifact_as_file[file_3] - - tests/functional/test_client.py:188 test_get_metrics_names - - tests/functional/test_client.py:251 test_folder_deletion - - tests/functional/test_client.py:292 test_multiple_metric_retrieval[step-dict-aggregated] - - tests/functional/test_client.py:292 test_multiple_metric_retrieval[step-dataframe-aggregated] - - tests/functional/test_client.py:292 test_multiple_metric_retrieval[time-dict-aggregated] - - tests/functional/test_client.py:292 test_multiple_metric_retrieval[time-dataframe-aggregated] -y +@pytest.mark.dependency @pytest.mark.client(depends=PRE_DELETION_TESTS + ["test_runs_deletion"]) def test_folder_deletion(create_test_run: tuple[sv_run.Run, dict]) -> None: run, run_data = create_test_run diff --git a/tests/functional/test_run_execute_process.py b/tests/functional/test_run_execute_process.py index 7bd35093..29fc42ec 100644 --- a/tests/functional/test_run_execute_process.py +++ b/tests/functional/test_run_execute_process.py @@ -75,7 +75,7 @@ def test_processes_cwd(create_plain_run: dict[Run, dict]) -> None: client.get_artifact_as_file(run_id, os.path.basename(temp_file.name), output_dir=os.path.join(temp_dir, "downloaded")) assert filecmp.cmp(os.path.join(temp_dir, "downloaded", os.path.basename(temp_file.name)), temp_file.name) - client.get_artifact_as_file(run_id, "new_file.txt", path=os.path.join(temp_dir, "downloaded")) + client.get_artifact_as_file(run_id, "new_file.txt", output_dir=os.path.join(temp_dir, "downloaded")) with open(os.path.join(temp_dir, "downloaded", "new_file.txt"), "r") as new_file: assert new_file.read() == "Test Line" diff --git a/tests/functional/test_scenarios.py b/tests/functional/test_scenarios.py index 84471376..9b9983c5 100644 --- a/tests/functional/test_scenarios.py +++ b/tests/functional/test_scenarios.py @@ -59,7 +59,7 @@ def upload(name: str, values_per_run: int, shared_dict) -> None: def test_uploaded_data_immediately_accessible( values_per_run: int, processing: str, run_deleter ) -> None: - name = f"Test-{str(random.randint(0, 1000000000))}" + name = f"Test-{random.randint(0, 1000000000)}" manager = Manager() shared_dict = manager.dict() From a245815ad089a452ca49ad03b960e52b218299fc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 20 Jan 2025 09:53:39 +0000 Subject: [PATCH 073/163] Added User test --- poetry.lock | 37 +++++++- pyproject.toml | 1 + simvue/api/objects/administrator/__init__.py | 1 + simvue/api/objects/alert/base.py | 17 +++- tests/unit/test_event_alert.py | 9 +- tests/unit/test_metric_range_alert.py | 3 +- tests/unit/test_metric_threshold_alert.py | 3 +- tests/unit/test_tenant.py | 2 +- tests/unit/test_user.py | 89 ++++++++++++++++++++ tests/unit/test_user_alert.py | 10 ++- 10 files changed, 160 insertions(+), 12 deletions(-) create mode 100644 tests/unit/test_user.py diff --git a/poetry.lock b/poetry.lock index a234da2b..99dd0f93 100644 --- a/poetry.lock +++ b/poetry.lock @@ -541,6 +541,41 @@ files = [ graph = ["objgraph (>=1.7.2)"] profile = ["gprof2dot (>=2022.7.29)"] +[[package]] +name = "dnspython" +version = "2.7.0" +description = "DNS toolkit" +optional = false +python-versions = ">=3.9" +files = [ + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "email-validator" +version = "2.2.0" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + [[package]] name = "exceptiongroup" version = "1.2.2" @@ -2404,4 +2439,4 @@ plot = ["matplotlib", "plotly"] [metadata] lock-version = "2.0" python-versions = "^3.10,<3.14" -content-hash = "3ebb1509b1e43ae7fe80b7c9c5bf49f95507de0a7c6907761c795b391620977b" +content-hash = "dfcad61fc8293aca3e816605d366322aeaafa217eb642caac763a58a20fc763c" diff --git a/pyproject.toml b/pyproject.toml index c9510c44..ec89ee12 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,6 +57,7 @@ codecarbon = "^2.7.1" numpy = "^2.1.2" flatdict = "^4.0.1" semver = "^3.0.2" +email-validator = "^2.2.0" [tool.poetry.extras] plot = ["matplotlib", "plotly"] diff --git a/simvue/api/objects/administrator/__init__.py b/simvue/api/objects/administrator/__init__.py index a23f023e..13867771 100644 --- a/simvue/api/objects/administrator/__init__.py +++ b/simvue/api/objects/administrator/__init__.py @@ -1 +1,2 @@ from .tenant import Tenant as Tenant +from .user import User as User diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 94c0ec29..786c8ec0 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -66,9 +66,22 @@ def description(self, description: str | None) -> None: self._staging["description"] = description @property - def auto_tags(self) -> list[str]: + def run_tags(self) -> list[str]: """Retrieve automatically assigned tags from runs""" - return self._get_attribute("auto_tags") + return self._get_attribute("run_tags") + + @property + @staging_check + def auto(self) -> bool: + """Retrieve if alert has run tag auto-assign""" + return self._get_attribute("auto") + + @auto.setter + @write_only + @pydantic.validate_call + def auto(self, auto: bool) -> None: + """Set alert to use run tag auto-assign""" + self._staging["auto"] = auto @property @staging_check diff --git a/tests/unit/test_event_alert.py b/tests/unit/test_event_alert.py index 775e9dab..5853170b 100644 --- a/tests/unit/test_event_alert.py +++ b/tests/unit/test_event_alert.py @@ -13,7 +13,8 @@ def test_event_alert_creation_online() -> None: name=f"events_alert_{_uuid}", frequency=1, pattern="completed", - notification="none" + notification="none", + description=None ) _alert.commit() assert _alert.source == "events" @@ -58,7 +59,8 @@ def test_event_alert_modification_online() -> None: name=f"events_alert_{_uuid}", frequency=1, pattern="completed", - notification="none" + notification="none", + description=None ) _alert.commit() time.sleep(1) @@ -105,7 +107,8 @@ def test_event_alert_properties() -> None: name=f"events_alert_{_uuid}", frequency=1, pattern="completed", - notification="none" + notification="none", + description="event_alert prop alert" ) _alert.commit() diff --git a/tests/unit/test_metric_range_alert.py b/tests/unit/test_metric_range_alert.py index 6869c4c4..721b78d3 100644 --- a/tests/unit/test_metric_range_alert.py +++ b/tests/unit/test_metric_range_alert.py @@ -19,7 +19,8 @@ def test_metric_range_alert_creation_online() -> None: range_high=15, window=1, aggregation="average", - rule="is inside range" + rule="is inside range", + description="a metric range alert" ) _alert.commit() assert _alert.source == "metrics" diff --git a/tests/unit/test_metric_threshold_alert.py b/tests/unit/test_metric_threshold_alert.py index d59d5a11..d0c5ff60 100644 --- a/tests/unit/test_metric_threshold_alert.py +++ b/tests/unit/test_metric_threshold_alert.py @@ -19,7 +19,8 @@ def test_metric_threshold_alert_creation_online() -> None: threshold=10, window=1, rule="is above", - aggregation="average" + aggregation="average", + description="a metric threshold alert" ) _alert.commit() assert _alert.source == "metrics" diff --git a/tests/unit/test_tenant.py b/tests/unit/test_tenant.py index 1cb6b2f2..2c164ca0 100644 --- a/tests/unit/test_tenant.py +++ b/tests/unit/test_tenant.py @@ -39,7 +39,7 @@ def test_create_tenant_offline() -> None: @pytest.mark.api @pytest.mark.online -def test_tag_get_properties() -> None: +def test_tenant_get_properties() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tenant = Tenant.new(name=_uuid) try: diff --git a/tests/unit/test_user.py b/tests/unit/test_user.py new file mode 100644 index 00000000..e08ab3f1 --- /dev/null +++ b/tests/unit/test_user.py @@ -0,0 +1,89 @@ +import pytest +import time +import contextlib +import json +import uuid + +from simvue.api.objects.administrator import User, Tenant + + +@pytest.mark.api +@pytest.mark.online +def test_create_user_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tenant = Tenant.new(name=_uuid) + _user = User.new( + username="jbloggs", + fullname="Joe Bloggs", + email="jbloggs@simvue.io", + manager=False, + admin=False, + readonly=True, + welcome=False, + tenant=_uuid + ) + try: + _tenant.commit() + _user.commit() + except RuntimeError as e: + assert "You do not have permission" in str(e) + return + time.sleep(1) + _new_user = User(_user.id) + assert _new_user.username == "jbloggs" + assert _new_user.enabled + _new_user.delete() + _tenant.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_create_user_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _user = User.new( + username="jbloggs", + fullname="Joe Bloggs", + email="jbloggs@simvue.io", + manager=False, + admin=False, + readonly=True, + welcome=False, + tenant=_uuid, + offline=True + ) + _user.commit() + +@pytest.mark.api +@pytest.mark.online +def test_user_get_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tenant = Tenant.new(name=_uuid) + _user = User.new( + username="jbloggs", + fullname="Joe Bloggs", + email="jbloggs@simvue.io", + manager=False, + admin=False, + readonly=True, + welcome=False, + tenant=_uuid + ) + try: + _tenant.commit() + _user.commit() + except RuntimeError as e: + assert "You do not have permission" in str(e) + return + _failed = [] + + for member in _user._properties: + try: + getattr(_user, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _user.delete() + _tenant.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py index 14d9e1cd..f45a5b4e 100644 --- a/tests/unit/test_user_alert.py +++ b/tests/unit/test_user_alert.py @@ -13,7 +13,8 @@ def test_user_alert_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = UserAlert.new( name=f"users_alert_{_uuid}", - notification="none" + notification="none", + description=None ) _alert.commit() assert _alert.source == "user" @@ -51,6 +52,7 @@ def test_user_alert_modification_online() -> None: _alert = UserAlert.new( name=f"users_alert_{_uuid}", notification="none", + description=None ) _alert.commit() time.sleep(1) @@ -92,7 +94,8 @@ def test_user_alert_properties() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = UserAlert.new( name=f"users_alert_{_uuid}", - notification="none" + notification="none", + description=None ) _alert.commit() @@ -116,7 +119,8 @@ def test_user_alert_status() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = UserAlert.new( name=f"users_alert_{_uuid}", - notification="none" + notification="none", + description=None ) _alert.commit() _folder = Folder.new(path=f"/simvue_unit_tests/{_uuid}") From 6061607aab55664a6b0d30658e3e24d645c720ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 20 Jan 2025 10:49:58 +0000 Subject: [PATCH 074/163] Started updating typing and fixed dependency for typing --- poetry.lock | 1538 ++++++++++++++---------------- pyproject.toml | 2 +- simvue/api/objects/alert/base.py | 2 +- simvue/executor.py | 52 +- simvue/metrics.py | 34 +- simvue/serialization.py | 16 +- 6 files changed, 792 insertions(+), 852 deletions(-) diff --git a/poetry.lock b/poetry.lock index 99dd0f93..9a07cc5f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,13 +13,13 @@ files = [ [[package]] name = "anyio" -version = "4.7.0" +version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" files = [ - {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"}, - {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"}, + {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, + {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, ] [package.dependencies] @@ -30,7 +30,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] [[package]] @@ -54,13 +54,13 @@ test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock [[package]] name = "certifi" -version = "2024.8.30" +version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] @@ -144,127 +144,114 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.4.0" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +python-versions = ">=3.7" +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] name = "click" -version = "8.1.7" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -272,13 +259,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "codecarbon" -version = "2.8.1" +version = "2.8.3" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "codecarbon-2.8.1-py3-none-any.whl", hash = "sha256:18abef94263724639fc1c2af808d5ad77f877c2812d691b30064f76743e2f660"}, - {file = "codecarbon-2.8.1.tar.gz", hash = "sha256:a5652293c82092abe6ee8fac8e4b7b90b7efa3e5eded56382e3ef472471a7395"}, + {file = "codecarbon-2.8.3-py3-none-any.whl", hash = "sha256:d3204852ad0c83d94d0f16b7d922e7f540c1e5f488d911f3e75408fe29f4ef4c"}, + {file = "codecarbon-2.8.3.tar.gz", hash = "sha256:037dd5afa1c5f60154f893ecd1631e0c849786edcfc9ff34a7ef467707891269"}, ] [package.dependencies] @@ -385,73 +372,73 @@ test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist" [[package]] name = "coverage" -version = "7.6.9" +version = "7.6.10" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"}, - {file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"}, - {file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"}, - {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"}, - {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"}, - {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"}, - {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"}, - {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"}, - {file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"}, - {file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"}, - {file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"}, - {file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"}, - {file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"}, - {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"}, - {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"}, - {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"}, - {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"}, - {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"}, - {file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"}, - {file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"}, - {file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"}, - {file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"}, - {file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"}, - {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"}, - {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"}, - {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"}, - {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"}, - {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"}, - {file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"}, - {file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"}, - {file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"}, - {file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"}, - {file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"}, - {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"}, - {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"}, - {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"}, - {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"}, - {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"}, - {file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"}, - {file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"}, - {file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"}, - {file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"}, - {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"}, - {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"}, - {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"}, - {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"}, - {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"}, - {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"}, - {file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"}, - {file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"}, - {file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"}, - {file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"}, - {file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"}, - {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"}, - {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"}, - {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"}, - {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"}, - {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"}, - {file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"}, - {file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"}, - {file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"}, - {file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"}, + {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, + {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, + {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, + {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, + {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, + {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, + {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, + {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, + {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, + {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, + {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, + {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, + {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, + {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, + {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, + {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, ] [package.dependencies] @@ -650,61 +637,61 @@ files = [ [[package]] name = "fonttools" -version = "4.55.2" +version = "4.55.3" description = "Tools to manipulate font files" optional = true python-versions = ">=3.8" files = [ - {file = "fonttools-4.55.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bef0f8603834643b1a6419d57902f18e7d950ec1a998fb70410635c598dc1a1e"}, - {file = "fonttools-4.55.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:944228b86d472612d3b48bcc83b31c25c2271e63fdc74539adfcfa7a96d487fb"}, - {file = "fonttools-4.55.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f0e55f5da594b85f269cfbecd2f6bd3e07d0abba68870bc3f34854de4fa4678"}, - {file = "fonttools-4.55.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b1a6e576db0c83c1b91925bf1363478c4bb968dbe8433147332fb5782ce6190"}, - {file = "fonttools-4.55.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:616368b15716781bc84df5c2191dc0540137aaef56c2771eb4b89b90933f347a"}, - {file = "fonttools-4.55.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7bbae4f3915225c2c37670da68e2bf18a21206060ad31dfb95fec91ef641caa7"}, - {file = "fonttools-4.55.2-cp310-cp310-win32.whl", hash = "sha256:8b02b10648d69d67a7eb055f4d3eedf4a85deb22fb7a19fbd9acbae7c7538199"}, - {file = "fonttools-4.55.2-cp310-cp310-win_amd64.whl", hash = "sha256:bbea0ab841113ac8e8edde067e099b7288ffc6ac2dded538b131c2c0595d5f77"}, - {file = "fonttools-4.55.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d34525e8141286fa976e14806639d32294bfb38d28bbdb5f6be9f46a1cd695a6"}, - {file = "fonttools-4.55.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ecd1c2b1c2ec46bb73685bc5473c72e16ed0930ef79bc2919ccadc43a99fb16"}, - {file = "fonttools-4.55.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9008438ad59e5a8e403a62fbefef2b2ff377eb3857d90a3f2a5f4d674ff441b2"}, - {file = "fonttools-4.55.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:131591ac8d7a47043aaf29581aba755ae151d46e49d2bf49608601efd71e8b4d"}, - {file = "fonttools-4.55.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4c83381c3e3e3d9caa25527c4300543578341f21aae89e4fbbb4debdda8d82a2"}, - {file = "fonttools-4.55.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:42aca564b575252fd9954ed0d91d97a24de24289a16ce8ff74ed0bdf5ecebf11"}, - {file = "fonttools-4.55.2-cp311-cp311-win32.whl", hash = "sha256:c6457f650ebe15baa17fc06e256227f0a47f46f80f27ec5a0b00160de8dc2c13"}, - {file = "fonttools-4.55.2-cp311-cp311-win_amd64.whl", hash = "sha256:5cfa67414d7414442a5635ff634384101c54f53bb7b0e04aa6a61b013fcce194"}, - {file = "fonttools-4.55.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:18f082445b8fe5e91c53e6184f4c1c73f3f965c8bcc614c6cd6effd573ce6c1a"}, - {file = "fonttools-4.55.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c0f91adbbd706e8acd1db73e3e510118e62d0ffb651864567dccc5b2339f90"}, - {file = "fonttools-4.55.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d8ccce035320d63dba0c35f52499322f5531dbe85bba1514c7cea26297e4c54"}, - {file = "fonttools-4.55.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96e126df9615df214ec7f04bebcf60076297fbc10b75c777ce58b702d7708ffb"}, - {file = "fonttools-4.55.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:508ebb42956a7a931c4092dfa2d9b4ffd4f94cea09b8211199090d2bd082506b"}, - {file = "fonttools-4.55.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c1b9de46ef7b683d50400abf9f1578eaceee271ff51c36bf4b7366f2be29f498"}, - {file = "fonttools-4.55.2-cp312-cp312-win32.whl", hash = "sha256:2df61d9fc15199cc86dad29f64dd686874a3a52dda0c2d8597d21f509f95c332"}, - {file = "fonttools-4.55.2-cp312-cp312-win_amd64.whl", hash = "sha256:d337ec087da8216a828574aa0525d869df0a2ac217a2efc1890974ddd1fbc5b9"}, - {file = "fonttools-4.55.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:10aff204e2edee1d312fa595c06f201adf8d528a3b659cfb34cd47eceaaa6a26"}, - {file = "fonttools-4.55.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:09fe922a3eff181fd07dd724cdb441fb6b9fc355fd1c0f1aa79aca60faf1fbdd"}, - {file = "fonttools-4.55.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:487e1e8b524143a799bda0169c48b44a23a6027c1bb1957d5a172a7d3a1dd704"}, - {file = "fonttools-4.55.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b1726872e09268bbedb14dc02e58b7ea31ecdd1204c6073eda4911746b44797"}, - {file = "fonttools-4.55.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6fc88cfb58b0cd7b48718c3e61dd0d0a3ee8e2c86b973342967ce09fbf1db6d4"}, - {file = "fonttools-4.55.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e857fe1859901ad8c5cab32e0eebc920adb09f413d2d73b74b677cf47b28590c"}, - {file = "fonttools-4.55.2-cp313-cp313-win32.whl", hash = "sha256:81ccd2b3a420b8050c7d9db3be0555d71662973b3ef2a1d921a2880b58957db8"}, - {file = "fonttools-4.55.2-cp313-cp313-win_amd64.whl", hash = "sha256:d559eb1744c7dcfa90ae60cb1a4b3595e898e48f4198738c321468c01180cd83"}, - {file = "fonttools-4.55.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6b5917ef79cac8300b88fd6113003fd01bbbbea2ea060a27b95d8f77cb4c65c2"}, - {file = "fonttools-4.55.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:663eba5615d6abaaf616432354eb7ce951d518e43404371bcc2b0694ef21e8d6"}, - {file = "fonttools-4.55.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:803d5cef5fc47f44f5084d154aa3d6f069bb1b60e32390c225f897fa19b0f939"}, - {file = "fonttools-4.55.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bc5f100de0173cc39102c0399bd6c3bd544bbdf224957933f10ee442d43cddd"}, - {file = "fonttools-4.55.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3d9bbc1e380fdaf04ad9eabd8e3e6a4301eaf3487940893e9fd98537ea2e283b"}, - {file = "fonttools-4.55.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:42a9afedff07b6f75aa0f39b5e49922ac764580ef3efce035ca30284b2ee65c8"}, - {file = "fonttools-4.55.2-cp38-cp38-win32.whl", hash = "sha256:f1c76f423f1a241df08f87614364dff6e0b7ce23c962c1b74bd995ec7c0dad13"}, - {file = "fonttools-4.55.2-cp38-cp38-win_amd64.whl", hash = "sha256:25062b6ca03464dd5179fc2040fb19e03391b7cc49b9cc4f879312e638605c5c"}, - {file = "fonttools-4.55.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d1100d8e665fe386a79cab59446992de881ea74d0d6c191bb988642692aa2421"}, - {file = "fonttools-4.55.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbdc251c5e472e5ae6bc816f9b82718b8e93ff7992e7331d6cf3562b96aa268e"}, - {file = "fonttools-4.55.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0bf24d2b02dbc9376d795a63062632ff73e3e9e60c0229373f500aed7e86dd7"}, - {file = "fonttools-4.55.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4ff250ed4ff05015dfd9cf2adf7570c7a383ca80f4d9732ac484a5ed0d8453c"}, - {file = "fonttools-4.55.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:44cf2a98aa661dbdeb8c03f5e405b074e2935196780bb729888639f5276067d9"}, - {file = "fonttools-4.55.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22ef222740eb89d189bf0612eb98fbae592c61d7efeac51bfbc2a1592d469557"}, - {file = "fonttools-4.55.2-cp39-cp39-win32.whl", hash = "sha256:93f439ca27e55f585e7aaa04a74990acd983b5f2245e41d6b79f0a8b44e684d8"}, - {file = "fonttools-4.55.2-cp39-cp39-win_amd64.whl", hash = "sha256:627cf10d6f5af5bec6324c18a2670f134c29e1b7dce3fb62e8ef88baa6cba7a9"}, - {file = "fonttools-4.55.2-py3-none-any.whl", hash = "sha256:8e2d89fbe9b08d96e22c7a81ec04a4e8d8439c31223e2dc6f2f9fc8ff14bdf9f"}, - {file = "fonttools-4.55.2.tar.gz", hash = "sha256:45947e7b3f9673f91df125d375eb57b9a23f2a603f438a1aebf3171bffa7a205"}, + {file = "fonttools-4.55.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1dcc07934a2165ccdc3a5a608db56fb3c24b609658a5b340aee4ecf3ba679dc0"}, + {file = "fonttools-4.55.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f7d66c15ba875432a2d2fb419523f5d3d347f91f48f57b8b08a2dfc3c39b8a3f"}, + {file = "fonttools-4.55.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e4ae3592e62eba83cd2c4ccd9462dcfa603ff78e09110680a5444c6925d841"}, + {file = "fonttools-4.55.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62d65a3022c35e404d19ca14f291c89cc5890032ff04f6c17af0bd1927299674"}, + {file = "fonttools-4.55.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d342e88764fb201286d185093781bf6628bbe380a913c24adf772d901baa8276"}, + {file = "fonttools-4.55.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dd68c87a2bfe37c5b33bcda0fba39b65a353876d3b9006fde3adae31f97b3ef5"}, + {file = "fonttools-4.55.3-cp310-cp310-win32.whl", hash = "sha256:1bc7ad24ff98846282eef1cbeac05d013c2154f977a79886bb943015d2b1b261"}, + {file = "fonttools-4.55.3-cp310-cp310-win_amd64.whl", hash = "sha256:b54baf65c52952db65df39fcd4820668d0ef4766c0ccdf32879b77f7c804d5c5"}, + {file = "fonttools-4.55.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c4491699bad88efe95772543cd49870cf756b019ad56294f6498982408ab03e"}, + {file = "fonttools-4.55.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5323a22eabddf4b24f66d26894f1229261021dacd9d29e89f7872dd8c63f0b8b"}, + {file = "fonttools-4.55.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5480673f599ad410695ca2ddef2dfefe9df779a9a5cda89503881e503c9c7d90"}, + {file = "fonttools-4.55.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da9da6d65cd7aa6b0f806556f4985bcbf603bf0c5c590e61b43aa3e5a0f822d0"}, + {file = "fonttools-4.55.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e894b5bd60d9f473bed7a8f506515549cc194de08064d829464088d23097331b"}, + {file = "fonttools-4.55.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aee3b57643827e237ff6ec6d28d9ff9766bd8b21e08cd13bff479e13d4b14765"}, + {file = "fonttools-4.55.3-cp311-cp311-win32.whl", hash = "sha256:eb6ca911c4c17eb51853143624d8dc87cdcdf12a711fc38bf5bd21521e79715f"}, + {file = "fonttools-4.55.3-cp311-cp311-win_amd64.whl", hash = "sha256:6314bf82c54c53c71805318fcf6786d986461622dd926d92a465199ff54b1b72"}, + {file = "fonttools-4.55.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f9e736f60f4911061235603a6119e72053073a12c6d7904011df2d8fad2c0e35"}, + {file = "fonttools-4.55.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a8aa2c5e5b8b3bcb2e4538d929f6589a5c6bdb84fd16e2ed92649fb5454f11c"}, + {file = "fonttools-4.55.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07f8288aacf0a38d174445fc78377a97fb0b83cfe352a90c9d9c1400571963c7"}, + {file = "fonttools-4.55.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8d5e8916c0970fbc0f6f1bece0063363bb5857a7f170121a4493e31c3db3314"}, + {file = "fonttools-4.55.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ae3b6600565b2d80b7c05acb8e24d2b26ac407b27a3f2e078229721ba5698427"}, + {file = "fonttools-4.55.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:54153c49913f45065c8d9e6d0c101396725c5621c8aee744719300f79771d75a"}, + {file = "fonttools-4.55.3-cp312-cp312-win32.whl", hash = "sha256:827e95fdbbd3e51f8b459af5ea10ecb4e30af50221ca103bea68218e9615de07"}, + {file = "fonttools-4.55.3-cp312-cp312-win_amd64.whl", hash = "sha256:e6e8766eeeb2de759e862004aa11a9ea3d6f6d5ec710551a88b476192b64fd54"}, + {file = "fonttools-4.55.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a430178ad3e650e695167cb53242dae3477b35c95bef6525b074d87493c4bf29"}, + {file = "fonttools-4.55.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:529cef2ce91dc44f8e407cc567fae6e49a1786f2fefefa73a294704c415322a4"}, + {file = "fonttools-4.55.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e75f12c82127486fac2d8bfbf5bf058202f54bf4f158d367e41647b972342ca"}, + {file = "fonttools-4.55.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:859c358ebf41db18fb72342d3080bce67c02b39e86b9fbcf1610cca14984841b"}, + {file = "fonttools-4.55.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:546565028e244a701f73df6d8dd6be489d01617863ec0c6a42fa25bf45d43048"}, + {file = "fonttools-4.55.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:aca318b77f23523309eec4475d1fbbb00a6b133eb766a8bdc401faba91261abe"}, + {file = "fonttools-4.55.3-cp313-cp313-win32.whl", hash = "sha256:8c5ec45428edaa7022f1c949a632a6f298edc7b481312fc7dc258921e9399628"}, + {file = "fonttools-4.55.3-cp313-cp313-win_amd64.whl", hash = "sha256:11e5de1ee0d95af4ae23c1a138b184b7f06e0b6abacabf1d0db41c90b03d834b"}, + {file = "fonttools-4.55.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:caf8230f3e10f8f5d7593eb6d252a37caf58c480b19a17e250a63dad63834cf3"}, + {file = "fonttools-4.55.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b586ab5b15b6097f2fb71cafa3c98edfd0dba1ad8027229e7b1e204a58b0e09d"}, + {file = "fonttools-4.55.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8c2794ded89399cc2169c4d0bf7941247b8d5932b2659e09834adfbb01589aa"}, + {file = "fonttools-4.55.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf4fe7c124aa3f4e4c1940880156e13f2f4d98170d35c749e6b4f119a872551e"}, + {file = "fonttools-4.55.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:86721fbc389ef5cc1e2f477019e5069e8e4421e8d9576e9c26f840dbb04678de"}, + {file = "fonttools-4.55.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:89bdc5d88bdeec1b15af790810e267e8332d92561dce4f0748c2b95c9bdf3926"}, + {file = "fonttools-4.55.3-cp38-cp38-win32.whl", hash = "sha256:bc5dbb4685e51235ef487e4bd501ddfc49be5aede5e40f4cefcccabc6e60fb4b"}, + {file = "fonttools-4.55.3-cp38-cp38-win_amd64.whl", hash = "sha256:cd70de1a52a8ee2d1877b6293af8a2484ac82514f10b1c67c1c5762d38073e56"}, + {file = "fonttools-4.55.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bdcc9f04b36c6c20978d3f060e5323a43f6222accc4e7fcbef3f428e216d96af"}, + {file = "fonttools-4.55.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c3ca99e0d460eff46e033cd3992a969658c3169ffcd533e0a39c63a38beb6831"}, + {file = "fonttools-4.55.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22f38464daa6cdb7b6aebd14ab06609328fe1e9705bb0fcc7d1e69de7109ee02"}, + {file = "fonttools-4.55.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed63959d00b61959b035c7d47f9313c2c1ece090ff63afea702fe86de00dbed4"}, + {file = "fonttools-4.55.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5e8d657cd7326eeaba27de2740e847c6b39dde2f8d7cd7cc56f6aad404ddf0bd"}, + {file = "fonttools-4.55.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:fb594b5a99943042c702c550d5494bdd7577f6ef19b0bc73877c948a63184a32"}, + {file = "fonttools-4.55.3-cp39-cp39-win32.whl", hash = "sha256:dc5294a3d5c84226e3dbba1b6f61d7ad813a8c0238fceea4e09aa04848c3d851"}, + {file = "fonttools-4.55.3-cp39-cp39-win_amd64.whl", hash = "sha256:aedbeb1db64496d098e6be92b2e63b5fac4e53b1b92032dfc6988e1ea9134a4d"}, + {file = "fonttools-4.55.3-py3-none-any.whl", hash = "sha256:f412604ccbeee81b091b420272841e5ec5ef68967a9790e80bffd0e30b8e2977"}, + {file = "fonttools-4.55.3.tar.gz", hash = "sha256:3983313c2a04d6cc1fe9251f8fc647754cf49a61dac6cb1e7249ae67afaafc45"}, ] [package.extras] @@ -723,13 +710,13 @@ woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] [[package]] name = "gitdb" -version = "4.0.11" +version = "4.0.12" description = "Git Object Database" optional = false python-versions = ">=3.7" files = [ - {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, - {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, + {file = "gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf"}, + {file = "gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571"}, ] [package.dependencies] @@ -737,20 +724,20 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.43" +version = "3.1.44" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, - {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, + {file = "GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110"}, + {file = "gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" [package.extras] -doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] +doc = ["sphinx (>=7.1.2,<7.2)", "sphinx-autodoc-typehints", "sphinx_rtd_theme"] test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] [[package]] @@ -851,13 +838,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.4" +version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, ] [package.dependencies] @@ -883,125 +870,91 @@ typing-extensions = ">=4.5.0" [[package]] name = "kiwisolver" -version = "1.4.7" +version = "1.4.8" description = "A fast implementation of the Cassowary constraint solver" optional = true -python-versions = ">=3.8" +python-versions = ">=3.10" files = [ - {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8a9c83f75223d5e48b0bc9cb1bf2776cf01563e00ade8775ffe13b0b6e1af3a6"}, - {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58370b1ffbd35407444d57057b57da5d6549d2d854fa30249771775c63b5fe17"}, - {file = "kiwisolver-1.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa0abdf853e09aff551db11fce173e2177d00786c688203f52c87ad7fcd91ef9"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8d53103597a252fb3ab8b5845af04c7a26d5e7ea8122303dd7a021176a87e8b9"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:88f17c5ffa8e9462fb79f62746428dd57b46eb931698e42e990ad63103f35e6c"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a9ca9c710d598fd75ee5de59d5bda2684d9db36a9f50b6125eaea3969c2599"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f4d742cb7af1c28303a51b7a27aaee540e71bb8e24f68c736f6f2ffc82f2bf05"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28c7fea2196bf4c2f8d46a0415c77a1c480cc0724722f23d7410ffe9842c407"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e968b84db54f9d42046cf154e02911e39c0435c9801681e3fc9ce8a3c4130278"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0c18ec74c0472de033e1bebb2911c3c310eef5649133dd0bedf2a169a1b269e5"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8f0ea6da6d393d8b2e187e6a5e3fb81f5862010a40c3945e2c6d12ae45cfb2ad"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:f106407dda69ae456dd1227966bf445b157ccc80ba0dff3802bb63f30b74e895"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84ec80df401cfee1457063732d90022f93951944b5b58975d34ab56bb150dfb3"}, - {file = "kiwisolver-1.4.7-cp310-cp310-win32.whl", hash = "sha256:71bb308552200fb2c195e35ef05de12f0c878c07fc91c270eb3d6e41698c3bcc"}, - {file = "kiwisolver-1.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:44756f9fd339de0fb6ee4f8c1696cfd19b2422e0d70b4cefc1cc7f1f64045a8c"}, - {file = "kiwisolver-1.4.7-cp310-cp310-win_arm64.whl", hash = "sha256:78a42513018c41c2ffd262eb676442315cbfe3c44eed82385c2ed043bc63210a"}, - {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d2b0e12a42fb4e72d509fc994713d099cbb15ebf1103545e8a45f14da2dfca54"}, - {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2a8781ac3edc42ea4b90bc23e7d37b665d89423818e26eb6df90698aa2287c95"}, - {file = "kiwisolver-1.4.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46707a10836894b559e04b0fd143e343945c97fd170d69a2d26d640b4e297935"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef97b8df011141c9b0f6caf23b29379f87dd13183c978a30a3c546d2c47314cb"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab58c12a2cd0fc769089e6d38466c46d7f76aced0a1f54c77652446733d2d02"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:803b8e1459341c1bb56d1c5c010406d5edec8a0713a0945851290a7930679b51"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9a9e8a507420fe35992ee9ecb302dab68550dedc0da9e2880dd88071c5fb052"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18077b53dc3bb490e330669a99920c5e6a496889ae8c63b58fbc57c3d7f33a18"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6af936f79086a89b3680a280c47ea90b4df7047b5bdf3aa5c524bbedddb9e545"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3abc5b19d24af4b77d1598a585b8a719beb8569a71568b66f4ebe1fb0449460b"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:933d4de052939d90afbe6e9d5273ae05fb836cc86c15b686edd4b3560cc0ee36"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:65e720d2ab2b53f1f72fb5da5fb477455905ce2c88aaa671ff0a447c2c80e8e3"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3bf1ed55088f214ba6427484c59553123fdd9b218a42bbc8c6496d6754b1e523"}, - {file = "kiwisolver-1.4.7-cp311-cp311-win32.whl", hash = "sha256:4c00336b9dd5ad96d0a558fd18a8b6f711b7449acce4c157e7343ba92dd0cf3d"}, - {file = "kiwisolver-1.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:929e294c1ac1e9f615c62a4e4313ca1823ba37326c164ec720a803287c4c499b"}, - {file = "kiwisolver-1.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:e33e8fbd440c917106b237ef1a2f1449dfbb9b6f6e1ce17c94cd6a1e0d438376"}, - {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5360cc32706dab3931f738d3079652d20982511f7c0ac5711483e6eab08efff2"}, - {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942216596dc64ddb25adb215c3c783215b23626f8d84e8eff8d6d45c3f29f75a"}, - {file = "kiwisolver-1.4.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:48b571ecd8bae15702e4f22d3ff6a0f13e54d3d00cd25216d5e7f658242065ee"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad42ba922c67c5f219097b28fae965e10045ddf145d2928bfac2eb2e17673640"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:612a10bdae23404a72941a0fc8fa2660c6ea1217c4ce0dbcab8a8f6543ea9e7f"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e838bba3a3bac0fe06d849d29772eb1afb9745a59710762e4ba3f4cb8424483"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22f499f6157236c19f4bbbd472fa55b063db77a16cd74d49afe28992dff8c258"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693902d433cf585133699972b6d7c42a8b9f8f826ebcaf0132ff55200afc599e"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4e77f2126c3e0b0d055f44513ed349038ac180371ed9b52fe96a32aa071a5107"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:657a05857bda581c3656bfc3b20e353c232e9193eb167766ad2dc58b56504948"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4bfa75a048c056a411f9705856abfc872558e33c055d80af6a380e3658766038"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:34ea1de54beef1c104422d210c47c7d2a4999bdecf42c7b5718fbe59a4cac383"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:90da3b5f694b85231cf93586dad5e90e2d71b9428f9aad96952c99055582f520"}, - {file = "kiwisolver-1.4.7-cp312-cp312-win32.whl", hash = "sha256:18e0cca3e008e17fe9b164b55735a325140a5a35faad8de92dd80265cd5eb80b"}, - {file = "kiwisolver-1.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:58cb20602b18f86f83a5c87d3ee1c766a79c0d452f8def86d925e6c60fbf7bfb"}, - {file = "kiwisolver-1.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:f5a8b53bdc0b3961f8b6125e198617c40aeed638b387913bf1ce78afb1b0be2a"}, - {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2e6039dcbe79a8e0f044f1c39db1986a1b8071051efba3ee4d74f5b365f5226e"}, - {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a1ecf0ac1c518487d9d23b1cd7139a6a65bc460cd101ab01f1be82ecf09794b6"}, - {file = "kiwisolver-1.4.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7ab9ccab2b5bd5702ab0803676a580fffa2aa178c2badc5557a84cc943fcf750"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f816dd2277f8d63d79f9c8473a79fe54047bc0467754962840782c575522224d"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf8bcc23ceb5a1b624572a1623b9f79d2c3b337c8c455405ef231933a10da379"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dea0bf229319828467d7fca8c7c189780aa9ff679c94539eed7532ebe33ed37c"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c06a4c7cf15ec739ce0e5971b26c93638730090add60e183530d70848ebdd34"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913983ad2deb14e66d83c28b632fd35ba2b825031f2fa4ca29675e665dfecbe1"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5337ec7809bcd0f424c6b705ecf97941c46279cf5ed92311782c7c9c2026f07f"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c26ed10c4f6fa6ddb329a5120ba3b6db349ca192ae211e882970bfc9d91420b"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c619b101e6de2222c1fcb0531e1b17bbffbe54294bfba43ea0d411d428618c27"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:073a36c8273647592ea332e816e75ef8da5c303236ec0167196793eb1e34657a"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3ce6b2b0231bda412463e152fc18335ba32faf4e8c23a754ad50ffa70e4091ee"}, - {file = "kiwisolver-1.4.7-cp313-cp313-win32.whl", hash = "sha256:f4c9aee212bc89d4e13f58be11a56cc8036cabad119259d12ace14b34476fd07"}, - {file = "kiwisolver-1.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:8a3ec5aa8e38fc4c8af308917ce12c536f1c88452ce554027e55b22cbbfbff76"}, - {file = "kiwisolver-1.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:76c8094ac20ec259471ac53e774623eb62e6e1f56cd8690c67ce6ce4fcb05650"}, - {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5d5abf8f8ec1f4e22882273c423e16cae834c36856cac348cfbfa68e01c40f3a"}, - {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aeb3531b196ef6f11776c21674dba836aeea9d5bd1cf630f869e3d90b16cfade"}, - {file = "kiwisolver-1.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7d755065e4e866a8086c9bdada157133ff466476a2ad7861828e17b6026e22c"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08471d4d86cbaec61f86b217dd938a83d85e03785f51121e791a6e6689a3be95"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7bbfcb7165ce3d54a3dfbe731e470f65739c4c1f85bb1018ee912bae139e263b"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d34eb8494bea691a1a450141ebb5385e4b69d38bb8403b5146ad279f4b30fa3"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9242795d174daa40105c1d86aba618e8eab7bf96ba8c3ee614da8302a9f95503"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a0f64a48bb81af7450e641e3fe0b0394d7381e342805479178b3d335d60ca7cf"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8e045731a5416357638d1700927529e2b8ab304811671f665b225f8bf8d8f933"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4322872d5772cae7369f8351da1edf255a604ea7087fe295411397d0cfd9655e"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e1631290ee9271dffe3062d2634c3ecac02c83890ada077d225e081aca8aab89"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:edcfc407e4eb17e037bca59be0e85a2031a2ac87e4fed26d3e9df88b4165f92d"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4d05d81ecb47d11e7f8932bd8b61b720bf0b41199358f3f5e36d38e28f0532c5"}, - {file = "kiwisolver-1.4.7-cp38-cp38-win32.whl", hash = "sha256:b38ac83d5f04b15e515fd86f312479d950d05ce2368d5413d46c088dda7de90a"}, - {file = "kiwisolver-1.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:d83db7cde68459fc803052a55ace60bea2bae361fc3b7a6d5da07e11954e4b09"}, - {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9362ecfca44c863569d3d3c033dbe8ba452ff8eed6f6b5806382741a1334bd"}, - {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8df2eb9b2bac43ef8b082e06f750350fbbaf2887534a5be97f6cf07b19d9583"}, - {file = "kiwisolver-1.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f32d6edbc638cde7652bd690c3e728b25332acbadd7cad670cc4a02558d9c417"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e2e6c39bd7b9372b0be21456caab138e8e69cc0fc1190a9dfa92bd45a1e6e904"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dda56c24d869b1193fcc763f1284b9126550eaf84b88bbc7256e15028f19188a"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79849239c39b5e1fd906556c474d9b0439ea6792b637511f3fe3a41158d89ca8"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e3bc157fed2a4c02ec468de4ecd12a6e22818d4f09cde2c31ee3226ffbefab2"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3da53da805b71e41053dc670f9a820d1157aae77b6b944e08024d17bcd51ef88"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8705f17dfeb43139a692298cb6637ee2e59c0194538153e83e9ee0c75c2eddde"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:82a5c2f4b87c26bb1a0ef3d16b5c4753434633b83d365cc0ddf2770c93829e3c"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce8be0466f4c0d585cdb6c1e2ed07232221df101a4c6f28821d2aa754ca2d9e2"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:409afdfe1e2e90e6ee7fc896f3df9a7fec8e793e58bfa0d052c8a82f99c37abb"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b9c3f4ee0b9a439d2415012bd1b1cc2df59e4d6a9939f4d669241d30b414327"}, - {file = "kiwisolver-1.4.7-cp39-cp39-win32.whl", hash = "sha256:a79ae34384df2b615eefca647a2873842ac3b596418032bef9a7283675962644"}, - {file = "kiwisolver-1.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:cf0438b42121a66a3a667de17e779330fc0f20b0d97d59d2f2121e182b0505e4"}, - {file = "kiwisolver-1.4.7-cp39-cp39-win_arm64.whl", hash = "sha256:764202cc7e70f767dab49e8df52c7455e8de0df5d858fa801a11aa0d882ccf3f"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:94252291e3fe68001b1dd747b4c0b3be12582839b95ad4d1b641924d68fd4643"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b7dfa3b546da08a9f622bb6becdb14b3e24aaa30adba66749d38f3cc7ea9706"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3de6481f4ed8b734da5df134cd5a6a64fe32124fe83dde1e5b5f29fe30b1e6"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a91b5f9f1205845d488c928e8570dcb62b893372f63b8b6e98b863ebd2368ff2"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fa14dbd66b8b8f470d5fc79c089a66185619d31645f9b0773b88b19f7223c4"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:eb542fe7933aa09d8d8f9d9097ef37532a7df6497819d16efe4359890a2f417a"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bfa1acfa0c54932d5607e19a2c24646fb4c1ae2694437789129cf099789a3b00"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:eee3ea935c3d227d49b4eb85660ff631556841f6e567f0f7bda972df6c2c9935"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f3160309af4396e0ed04db259c3ccbfdc3621b5559b5453075e5de555e1f3a1b"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a17f6a29cf8935e587cc8a4dbfc8368c55edc645283db0ce9801016f83526c2d"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10849fb2c1ecbfae45a693c070e0320a91b35dd4bcf58172c023b994283a124d"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ac542bf38a8a4be2dc6b15248d36315ccc65f0743f7b1a76688ffb6b5129a5c2"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8b01aac285f91ca889c800042c35ad3b239e704b150cfd3382adfc9dcc780e39"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48be928f59a1f5c8207154f935334d374e79f2b5d212826307d072595ad76a2e"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f37cfe618a117e50d8c240555331160d73d0411422b59b5ee217843d7b693608"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599b5c873c63a1f6ed7eead644a8a380cfbdf5db91dcb6f85707aaab213b1674"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:801fa7802e5cfabe3ab0c81a34c323a319b097dfb5004be950482d882f3d7225"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0c6c43471bc764fad4bc99c5c2d6d16a676b1abf844ca7c8702bdae92df01ee0"}, - {file = "kiwisolver-1.4.7.tar.gz", hash = "sha256:9893ff81bd7107f7b685d3017cc6583daadb4fc26e4a888350df530e41980a60"}, + {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db"}, + {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b"}, + {file = "kiwisolver-1.4.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce2cf1e5688edcb727fdf7cd1bbd0b6416758996826a8be1d958f91880d0809d"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c8bf637892dc6e6aad2bc6d4d69d08764166e5e3f69d469e55427b6ac001b19d"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:034d2c891f76bd3edbdb3ea11140d8510dca675443da7304205a2eaa45d8334c"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47b28d1dfe0793d5e96bce90835e17edf9a499b53969b03c6c47ea5985844c3"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb158fe28ca0c29f2260cca8c43005329ad58452c36f0edf298204de32a9a3ed"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5536185fce131780ebd809f8e623bf4030ce1b161353166c49a3c74c287897f"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:369b75d40abedc1da2c1f4de13f3482cb99e3237b38726710f4a793432b1c5ff"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:641f2ddf9358c80faa22e22eb4c9f54bd3f0e442e038728f500e3b978d00aa7d"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d561d2d8883e0819445cfe58d7ddd673e4015c3c57261d7bdcd3710d0d14005c"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1732e065704b47c9afca7ffa272f845300a4eb959276bf6970dc07265e73b605"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bcb1ebc3547619c3b58a39e2448af089ea2ef44b37988caf432447374941574e"}, + {file = "kiwisolver-1.4.8-cp310-cp310-win_amd64.whl", hash = "sha256:89c107041f7b27844179ea9c85d6da275aa55ecf28413e87624d033cf1f6b751"}, + {file = "kiwisolver-1.4.8-cp310-cp310-win_arm64.whl", hash = "sha256:b5773efa2be9eb9fcf5415ea3ab70fc785d598729fd6057bea38d539ead28271"}, + {file = "kiwisolver-1.4.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a4d3601908c560bdf880f07d94f31d734afd1bb71e96585cace0e38ef44c6d84"}, + {file = "kiwisolver-1.4.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:856b269c4d28a5c0d5e6c1955ec36ebfd1651ac00e1ce0afa3e28da95293b561"}, + {file = "kiwisolver-1.4.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c2b9a96e0f326205af81a15718a9073328df1173a2619a68553decb7097fd5d7"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5020c83e8553f770cb3b5fc13faac40f17e0b205bd237aebd21d53d733adb03"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dace81d28c787956bfbfbbfd72fdcef014f37d9b48830829e488fdb32b49d954"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11e1022b524bd48ae56c9b4f9296bce77e15a2e42a502cceba602f804b32bb79"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b9b4d2892fefc886f30301cdd80debd8bb01ecdf165a449eb6e78f79f0fabd6"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a96c0e790ee875d65e340ab383700e2b4891677b7fcd30a699146f9384a2bb0"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23454ff084b07ac54ca8be535f4174170c1094a4cff78fbae4f73a4bcc0d4dab"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:87b287251ad6488e95b4f0b4a79a6d04d3ea35fde6340eb38fbd1ca9cd35bbbc"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b21dbe165081142b1232a240fc6383fd32cdd877ca6cc89eab93e5f5883e1c25"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:768cade2c2df13db52475bd28d3a3fac8c9eff04b0e9e2fda0f3760f20b3f7fc"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d47cfb2650f0e103d4bf68b0b5804c68da97272c84bb12850d877a95c056bd67"}, + {file = "kiwisolver-1.4.8-cp311-cp311-win_amd64.whl", hash = "sha256:ed33ca2002a779a2e20eeb06aea7721b6e47f2d4b8a8ece979d8ba9e2a167e34"}, + {file = "kiwisolver-1.4.8-cp311-cp311-win_arm64.whl", hash = "sha256:16523b40aab60426ffdebe33ac374457cf62863e330a90a0383639ce14bf44b2"}, + {file = "kiwisolver-1.4.8-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d6af5e8815fd02997cb6ad9bbed0ee1e60014438ee1a5c2444c96f87b8843502"}, + {file = "kiwisolver-1.4.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bade438f86e21d91e0cf5dd7c0ed00cda0f77c8c1616bd83f9fc157fa6760d31"}, + {file = "kiwisolver-1.4.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b83dc6769ddbc57613280118fb4ce3cd08899cc3369f7d0e0fab518a7cf37fdb"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111793b232842991be367ed828076b03d96202c19221b5ebab421ce8bcad016f"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:257af1622860e51b1a9d0ce387bf5c2c4f36a90594cb9514f55b074bcc787cfc"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b5637c3f316cab1ec1c9a12b8c5f4750a4c4b71af9157645bf32830e39c03a"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:782bb86f245ec18009890e7cb8d13a5ef54dcf2ebe18ed65f795e635a96a1c6a"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc978a80a0db3a66d25767b03688f1147a69e6237175c0f4ffffaaedf744055a"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:36dbbfd34838500a31f52c9786990d00150860e46cd5041386f217101350f0d3"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:eaa973f1e05131de5ff3569bbba7f5fd07ea0595d3870ed4a526d486fe57fa1b"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a66f60f8d0c87ab7f59b6fb80e642ebb29fec354a4dfad687ca4092ae69d04f4"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858416b7fb777a53f0c59ca08190ce24e9abbd3cffa18886a5781b8e3e26f65d"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:085940635c62697391baafaaeabdf3dd7a6c3643577dde337f4d66eba021b2b8"}, + {file = "kiwisolver-1.4.8-cp312-cp312-win_amd64.whl", hash = "sha256:01c3d31902c7db5fb6182832713d3b4122ad9317c2c5877d0539227d96bb2e50"}, + {file = "kiwisolver-1.4.8-cp312-cp312-win_arm64.whl", hash = "sha256:a3c44cb68861de93f0c4a8175fbaa691f0aa22550c331fefef02b618a9dcb476"}, + {file = "kiwisolver-1.4.8-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1c8ceb754339793c24aee1c9fb2485b5b1f5bb1c2c214ff13368431e51fc9a09"}, + {file = "kiwisolver-1.4.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a62808ac74b5e55a04a408cda6156f986cefbcf0ada13572696b507cc92fa1"}, + {file = "kiwisolver-1.4.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:68269e60ee4929893aad82666821aaacbd455284124817af45c11e50a4b42e3c"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34d142fba9c464bc3bbfeff15c96eab0e7310343d6aefb62a79d51421fcc5f1b"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc373e0eef45b59197de815b1b28ef89ae3955e7722cc9710fb91cd77b7f47"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77e6f57a20b9bd4e1e2cedda4d0b986ebd0216236f0106e55c28aea3d3d69b16"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08e77738ed7538f036cd1170cbed942ef749137b1311fa2bbe2a7fda2f6bf3cc"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5ce1e481a74b44dd5e92ff03ea0cb371ae7a0268318e202be06c8f04f4f1246"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fc2ace710ba7c1dfd1a3b42530b62b9ceed115f19a1656adefce7b1782a37794"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3452046c37c7692bd52b0e752b87954ef86ee2224e624ef7ce6cb21e8c41cc1b"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7e9a60b50fe8b2ec6f448fe8d81b07e40141bfced7f896309df271a0b92f80f3"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:918139571133f366e8362fa4a297aeba86c7816b7ecf0bc79168080e2bd79957"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e063ef9f89885a1d68dd8b2e18f5ead48653176d10a0e324e3b0030e3a69adeb"}, + {file = "kiwisolver-1.4.8-cp313-cp313-win_amd64.whl", hash = "sha256:a17b7c4f5b2c51bb68ed379defd608a03954a1845dfed7cc0117f1cc8a9b7fd2"}, + {file = "kiwisolver-1.4.8-cp313-cp313-win_arm64.whl", hash = "sha256:3cd3bc628b25f74aedc6d374d5babf0166a92ff1317f46267f12d2ed54bc1d30"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:370fd2df41660ed4e26b8c9d6bbcad668fbe2560462cba151a721d49e5b6628c"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:84a2f830d42707de1d191b9490ac186bf7997a9495d4e9072210a1296345f7dc"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7a3ad337add5148cf51ce0b55642dc551c0b9d6248458a757f98796ca7348712"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7506488470f41169b86d8c9aeff587293f530a23a23a49d6bc64dab66bedc71e"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f0121b07b356a22fb0414cec4666bbe36fd6d0d759db3d37228f496ed67c880"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6d6bd87df62c27d4185de7c511c6248040afae67028a8a22012b010bc7ad062"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:291331973c64bb9cce50bbe871fb2e675c4331dab4f31abe89f175ad7679a4d7"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:893f5525bb92d3d735878ec00f781b2de998333659507d29ea4466208df37bed"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b47a465040146981dc9db8647981b8cb96366fbc8d452b031e4f8fdffec3f26d"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:99cea8b9dd34ff80c521aef46a1dddb0dcc0283cf18bde6d756f1e6f31772165"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:151dffc4865e5fe6dafce5480fab84f950d14566c480c08a53c663a0020504b6"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:577facaa411c10421314598b50413aa1ebcf5126f704f1e5d72d7e4e9f020d90"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:be4816dc51c8a471749d664161b434912eee82f2ea66bd7628bd14583a833e85"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e7a019419b7b510f0f7c9dceff8c5eae2392037eae483a7f9162625233802b0a"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:286b18e86682fd2217a48fc6be6b0f20c1d0ed10958d8dc53453ad58d7be0bf8"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4191ee8dfd0be1c3666ccbac178c5a05d5f8d689bbe3fc92f3c4abec817f8fe0"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cd2785b9391f2873ad46088ed7599a6a71e762e1ea33e87514b1a441ed1da1c"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c07b29089b7ba090b6f1a669f1411f27221c3662b3a1b7010e67b59bb5a6f10b"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:65ea09a5a3faadd59c2ce96dc7bf0f364986a315949dc6374f04396b0d60e09b"}, + {file = "kiwisolver-1.4.8.tar.gz", hash = "sha256:23d5f023bdc8c7e54eb65f03ca5d5bb25b601eac4d7f1a042888a1f45237987e"}, ] [[package]] @@ -1100,52 +1053,45 @@ files = [ [[package]] name = "matplotlib" -version = "3.9.3" +version = "3.10.0" description = "Python plotting package" optional = true -python-versions = ">=3.9" +python-versions = ">=3.10" files = [ - {file = "matplotlib-3.9.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:41b016e3be4e740b66c79a031a0a6e145728dbc248142e751e8dab4f3188ca1d"}, - {file = "matplotlib-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e0143975fc2a6d7136c97e19c637321288371e8f09cff2564ecd73e865ea0b9"}, - {file = "matplotlib-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f459c8ee2c086455744723628264e43c884be0c7d7b45d84b8cd981310b4815"}, - {file = "matplotlib-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:687df7ceff57b8f070d02b4db66f75566370e7ae182a0782b6d3d21b0d6917dc"}, - {file = "matplotlib-3.9.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:edd14cf733fdc4f6e6fe3f705af97676a7e52859bf0044aa2c84e55be739241c"}, - {file = "matplotlib-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:1c40c244221a1adbb1256692b1133c6fb89418df27bf759a31a333e7912a4010"}, - {file = "matplotlib-3.9.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cf2a60daf6cecff6828bc608df00dbc794380e7234d2411c0ec612811f01969d"}, - {file = "matplotlib-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:213d6dc25ce686516208d8a3e91120c6a4fdae4a3e06b8505ced5b716b50cc04"}, - {file = "matplotlib-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c52f48eb75fcc119a4fdb68ba83eb5f71656999420375df7c94cc68e0e14686e"}, - {file = "matplotlib-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3c93796b44fa111049b88a24105e947f03c01966b5c0cc782e2ee3887b790a3"}, - {file = "matplotlib-3.9.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cd1077b9a09b16d8c3c7075a8add5ffbfe6a69156a57e290c800ed4d435bef1d"}, - {file = "matplotlib-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:c96eeeb8c68b662c7747f91a385688d4b449687d29b691eff7068a4602fe6dc4"}, - {file = "matplotlib-3.9.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0a361bd5583bf0bcc08841df3c10269617ee2a36b99ac39d455a767da908bbbc"}, - {file = "matplotlib-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e14485bb1b83eeb3d55b6878f9560240981e7bbc7a8d4e1e8c38b9bd6ec8d2de"}, - {file = "matplotlib-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8d279f78844aad213c4935c18f8292a9432d51af2d88bca99072c903948045"}, - {file = "matplotlib-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6c12514329ac0d03128cf1dcceb335f4fbf7c11da98bca68dca8dcb983153a9"}, - {file = "matplotlib-3.9.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6e9de2b390d253a508dd497e9b5579f3a851f208763ed67fdca5dc0c3ea6849c"}, - {file = "matplotlib-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d796272408f8567ff7eaa00eb2856b3a00524490e47ad505b0b4ca6bb8a7411f"}, - {file = "matplotlib-3.9.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:203d18df84f5288973b2d56de63d4678cc748250026ca9e1ad8f8a0fd8a75d83"}, - {file = "matplotlib-3.9.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b651b0d3642991259109dc0351fc33ad44c624801367bb8307be9bfc35e427ad"}, - {file = "matplotlib-3.9.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66d7b171fecf96940ce069923a08ba3df33ef542de82c2ff4fe8caa8346fa95a"}, - {file = "matplotlib-3.9.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be0ba61f6ff2e6b68e4270fb63b6813c9e7dec3d15fc3a93f47480444fd72f0"}, - {file = "matplotlib-3.9.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d6b2e8856dec3a6db1ae51aec85c82223e834b228c1d3228aede87eee2b34f9"}, - {file = "matplotlib-3.9.3-cp313-cp313-win_amd64.whl", hash = "sha256:90a85a004fefed9e583597478420bf904bb1a065b0b0ee5b9d8d31b04b0f3f70"}, - {file = "matplotlib-3.9.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3119b2f16de7f7b9212ba76d8fe6a0e9f90b27a1e04683cd89833a991682f639"}, - {file = "matplotlib-3.9.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:87ad73763d93add1b6c1f9fcd33af662fd62ed70e620c52fcb79f3ac427cf3a6"}, - {file = "matplotlib-3.9.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:026bdf3137ab6022c866efa4813b6bbeddc2ed4c9e7e02f0e323a7bca380dfa0"}, - {file = "matplotlib-3.9.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760a5e89ebbb172989e8273024a1024b0f084510b9105261b3b00c15e9c9f006"}, - {file = "matplotlib-3.9.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a42b9dc42de2cfe357efa27d9c50c7833fc5ab9b2eb7252ccd5d5f836a84e1e4"}, - {file = "matplotlib-3.9.3-cp313-cp313t-win_amd64.whl", hash = "sha256:e0fcb7da73fbf67b5f4bdaa57d85bb585a4e913d4a10f3e15b32baea56a67f0a"}, - {file = "matplotlib-3.9.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:031b7f5b8e595cc07def77ec5b58464e9bb67dc5760be5d6f26d9da24892481d"}, - {file = "matplotlib-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fa6e193c14d6944e0685cdb527cb6b38b0e4a518043e7212f214113af7391da"}, - {file = "matplotlib-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e6eefae6effa0c35bbbc18c25ee6e0b1da44d2359c3cd526eb0c9e703cf055d"}, - {file = "matplotlib-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d3e5c7a99bd28afb957e1ae661323b0800d75b419f24d041ed1cc5d844a764"}, - {file = "matplotlib-3.9.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:816a966d5d376bf24c92af8f379e78e67278833e4c7cbc9fa41872eec629a060"}, - {file = "matplotlib-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fb0b37c896172899a4a93d9442ffdc6f870165f59e05ce2e07c6fded1c15749"}, - {file = "matplotlib-3.9.3-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5f2a4ea08e6876206d511365b0bc234edc813d90b930be72c3011bbd7898796f"}, - {file = "matplotlib-3.9.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9b081dac96ab19c54fd8558fac17c9d2c9cb5cc4656e7ed3261ddc927ba3e2c5"}, - {file = "matplotlib-3.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a0a63cb8404d1d1f94968ef35738900038137dab8af836b6c21bb6f03d75465"}, - {file = "matplotlib-3.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:896774766fd6be4571a43bc2fcbcb1dcca0807e53cab4a5bf88c4aa861a08e12"}, - {file = "matplotlib-3.9.3.tar.gz", hash = "sha256:cd5dbbc8e25cad5f706845c4d100e2c8b34691b412b93717ce38d8ae803bcfa5"}, + {file = "matplotlib-3.10.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2c5829a5a1dd5a71f0e31e6e8bb449bc0ee9dbfb05ad28fc0c6b55101b3a4be6"}, + {file = "matplotlib-3.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2a43cbefe22d653ab34bb55d42384ed30f611bcbdea1f8d7f431011a2e1c62e"}, + {file = "matplotlib-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:607b16c8a73943df110f99ee2e940b8a1cbf9714b65307c040d422558397dac5"}, + {file = "matplotlib-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01d2b19f13aeec2e759414d3bfe19ddfb16b13a1250add08d46d5ff6f9be83c6"}, + {file = "matplotlib-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e6c6461e1fc63df30bf6f80f0b93f5b6784299f721bc28530477acd51bfc3d1"}, + {file = "matplotlib-3.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:994c07b9d9fe8d25951e3202a68c17900679274dadfc1248738dcfa1bd40d7f3"}, + {file = "matplotlib-3.10.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:fd44fc75522f58612ec4a33958a7e5552562b7705b42ef1b4f8c0818e304a363"}, + {file = "matplotlib-3.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c58a9622d5dbeb668f407f35f4e6bfac34bb9ecdcc81680c04d0258169747997"}, + {file = "matplotlib-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:845d96568ec873be63f25fa80e9e7fae4be854a66a7e2f0c8ccc99e94a8bd4ef"}, + {file = "matplotlib-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5439f4c5a3e2e8eab18e2f8c3ef929772fd5641876db71f08127eed95ab64683"}, + {file = "matplotlib-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4673ff67a36152c48ddeaf1135e74ce0d4bce1bbf836ae40ed39c29edf7e2765"}, + {file = "matplotlib-3.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:7e8632baebb058555ac0cde75db885c61f1212e47723d63921879806b40bec6a"}, + {file = "matplotlib-3.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4659665bc7c9b58f8c00317c3c2a299f7f258eeae5a5d56b4c64226fca2f7c59"}, + {file = "matplotlib-3.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d44cb942af1693cced2604c33a9abcef6205601c445f6d0dc531d813af8a2f5a"}, + {file = "matplotlib-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a994f29e968ca002b50982b27168addfd65f0105610b6be7fa515ca4b5307c95"}, + {file = "matplotlib-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b0558bae37f154fffda54d779a592bc97ca8b4701f1c710055b609a3bac44c8"}, + {file = "matplotlib-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:503feb23bd8c8acc75541548a1d709c059b7184cde26314896e10a9f14df5f12"}, + {file = "matplotlib-3.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:c40ba2eb08b3f5de88152c2333c58cee7edcead0a2a0d60fcafa116b17117adc"}, + {file = "matplotlib-3.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96f2886f5c1e466f21cc41b70c5a0cd47bfa0015eb2d5793c88ebce658600e25"}, + {file = "matplotlib-3.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:12eaf48463b472c3c0f8dbacdbf906e573013df81a0ab82f0616ea4b11281908"}, + {file = "matplotlib-3.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fbbabc82fde51391c4da5006f965e36d86d95f6ee83fb594b279564a4c5d0d2"}, + {file = "matplotlib-3.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad2e15300530c1a94c63cfa546e3b7864bd18ea2901317bae8bbf06a5ade6dcf"}, + {file = "matplotlib-3.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3547d153d70233a8496859097ef0312212e2689cdf8d7ed764441c77604095ae"}, + {file = "matplotlib-3.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:c55b20591ced744aa04e8c3e4b7543ea4d650b6c3c4b208c08a05b4010e8b442"}, + {file = "matplotlib-3.10.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9ade1003376731a971e398cc4ef38bb83ee8caf0aee46ac6daa4b0506db1fd06"}, + {file = "matplotlib-3.10.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95b710fea129c76d30be72c3b38f330269363fbc6e570a5dd43580487380b5ff"}, + {file = "matplotlib-3.10.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdbaf909887373c3e094b0318d7ff230b2ad9dcb64da7ade654182872ab2593"}, + {file = "matplotlib-3.10.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d907fddb39f923d011875452ff1eca29a9e7f21722b873e90db32e5d8ddff12e"}, + {file = "matplotlib-3.10.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3b427392354d10975c1d0f4ee18aa5844640b512d5311ef32efd4dd7db106ede"}, + {file = "matplotlib-3.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5fd41b0ec7ee45cd960a8e71aea7c946a28a0b8a4dcee47d2856b2af051f334c"}, + {file = "matplotlib-3.10.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:81713dd0d103b379de4516b861d964b1d789a144103277769238c732229d7f03"}, + {file = "matplotlib-3.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:359f87baedb1f836ce307f0e850d12bb5f1936f70d035561f90d41d305fdacea"}, + {file = "matplotlib-3.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae80dc3a4add4665cf2faa90138384a7ffe2a4e37c58d83e115b54287c4f06ef"}, + {file = "matplotlib-3.10.0.tar.gz", hash = "sha256:b886d02a581b96704c9d1ffe55709e49b4d2d52709ccebc4be42db856e511278"}, ] [package.dependencies] @@ -1160,7 +1106,7 @@ pyparsing = ">=2.3.1" python-dateutil = ">=2.7" [package.extras] -dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6,!=2.13.3)", "setuptools (>=64)", "setuptools_scm (>=7)"] +dev = ["meson-python (>=0.13.1,<0.17.0)", "pybind11 (>=2.13.2,!=2.13.3)", "setuptools (>=64)", "setuptools_scm (>=7)"] [[package]] name = "mdurl" @@ -1248,66 +1194,66 @@ files = [ [[package]] name = "numpy" -version = "2.2.0" +version = "2.2.2" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" files = [ - {file = "numpy-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e25507d85da11ff5066269d0bd25d06e0a0f2e908415534f3e603d2a78e4ffa"}, - {file = "numpy-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a62eb442011776e4036af5c8b1a00b706c5bc02dc15eb5344b0c750428c94219"}, - {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:b606b1aaf802e6468c2608c65ff7ece53eae1a6874b3765f69b8ceb20c5fa78e"}, - {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:36b2b43146f646642b425dd2027730f99bac962618ec2052932157e213a040e9"}, - {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fe8f3583e0607ad4e43a954e35c1748b553bfe9fdac8635c02058023277d1b3"}, - {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122fd2fcfafdefc889c64ad99c228d5a1f9692c3a83f56c292618a59aa60ae83"}, - {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3f2f5cddeaa4424a0a118924b988746db6ffa8565e5829b1841a8a3bd73eb59a"}, - {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fe4bb0695fe986a9e4deec3b6857003b4cfe5c5e4aac0b95f6a658c14635e31"}, - {file = "numpy-2.2.0-cp310-cp310-win32.whl", hash = "sha256:b30042fe92dbd79f1ba7f6898fada10bdaad1847c44f2dff9a16147e00a93661"}, - {file = "numpy-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dc1d6d66f8d37843ed281773c7174f03bf7ad826523f73435deb88ba60d2d4"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9874bc2ff574c40ab7a5cbb7464bf9b045d617e36754a7bc93f933d52bd9ffc6"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0da8495970f6b101ddd0c38ace92edea30e7e12b9a926b57f5fabb1ecc25bb90"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0557eebc699c1c34cccdd8c3778c9294e8196df27d713706895edc6f57d29608"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3579eaeb5e07f3ded59298ce22b65f877a86ba8e9fe701f5576c99bb17c283da"}, - {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40deb10198bbaa531509aad0cd2f9fadb26c8b94070831e2208e7df543562b74"}, - {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2aed8fcf8abc3020d6a9ccb31dbc9e7d7819c56a348cc88fd44be269b37427e"}, - {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a222d764352c773aa5ebde02dd84dba3279c81c6db2e482d62a3fa54e5ece69b"}, - {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4e58666988605e251d42c2818c7d3d8991555381be26399303053b58a5bbf30d"}, - {file = "numpy-2.2.0-cp311-cp311-win32.whl", hash = "sha256:4723a50e1523e1de4fccd1b9a6dcea750c2102461e9a02b2ac55ffeae09a4410"}, - {file = "numpy-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:16757cf28621e43e252c560d25b15f18a2f11da94fea344bf26c599b9cf54b73"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cff210198bb4cae3f3c100444c5eaa573a823f05c253e7188e1362a5555235b3"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58b92a5828bd4d9aa0952492b7de803135038de47343b2aa3cc23f3b71a3dc4e"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:ebe5e59545401fbb1b24da76f006ab19734ae71e703cdb4a8b347e84a0cece67"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e2b8cd48a9942ed3f85b95ca4105c45758438c7ed28fff1e4ce3e57c3b589d8e"}, - {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57fcc997ffc0bef234b8875a54d4058afa92b0b0c4223fc1f62f24b3b5e86038"}, - {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ad7d11b309bd132d74397fcf2920933c9d1dc865487128f5c03d580f2c3d03"}, - {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cb24cca1968b21355cc6f3da1a20cd1cebd8a023e3c5b09b432444617949085a"}, - {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0798b138c291d792f8ea40fe3768610f3c7dd2574389e37c3f26573757c8f7ef"}, - {file = "numpy-2.2.0-cp312-cp312-win32.whl", hash = "sha256:afe8fb968743d40435c3827632fd36c5fbde633b0423da7692e426529b1759b1"}, - {file = "numpy-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:3a4199f519e57d517ebd48cb76b36c82da0360781c6a0353e64c0cac30ecaad3"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13"}, - {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671"}, - {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571"}, - {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d"}, - {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742"}, - {file = "numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e"}, - {file = "numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d"}, - {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529"}, - {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3"}, - {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab"}, - {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72"}, - {file = "numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066"}, - {file = "numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e12c6c1ce84628c52d6367863773f7c8c8241be554e8b79686e91a43f1733773"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:b6207dc8fb3c8cb5668e885cef9ec7f70189bec4e276f0ff70d5aa078d32c88e"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a50aeff71d0f97b6450d33940c7181b08be1441c6c193e678211bff11aa725e7"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:df12a1f99b99f569a7c2ae59aa2d31724e8d835fc7f33e14f4792e3071d11221"}, - {file = "numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0"}, + {file = "numpy-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7079129b64cb78bdc8d611d1fd7e8002c0a2565da6a47c4df8062349fee90e3e"}, + {file = "numpy-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ec6c689c61df613b783aeb21f945c4cbe6c51c28cb70aae8430577ab39f163e"}, + {file = "numpy-2.2.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:40c7ff5da22cd391944a28c6a9c638a5eef77fcf71d6e3a79e1d9d9e82752715"}, + {file = "numpy-2.2.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:995f9e8181723852ca458e22de5d9b7d3ba4da3f11cc1cb113f093b271d7965a"}, + {file = "numpy-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b78ea78450fd96a498f50ee096f69c75379af5138f7881a51355ab0e11286c97"}, + {file = "numpy-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fbe72d347fbc59f94124125e73fc4976a06927ebc503ec5afbfb35f193cd957"}, + {file = "numpy-2.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8e6da5cffbbe571f93588f562ed130ea63ee206d12851b60819512dd3e1ba50d"}, + {file = "numpy-2.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:09d6a2032faf25e8d0cadde7fd6145118ac55d2740132c1d845f98721b5ebcfd"}, + {file = "numpy-2.2.2-cp310-cp310-win32.whl", hash = "sha256:159ff6ee4c4a36a23fe01b7c3d07bd8c14cc433d9720f977fcd52c13c0098160"}, + {file = "numpy-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:64bd6e1762cd7f0986a740fee4dff927b9ec2c5e4d9a28d056eb17d332158014"}, + {file = "numpy-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:642199e98af1bd2b6aeb8ecf726972d238c9877b0f6e8221ee5ab945ec8a2189"}, + {file = "numpy-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6d9fc9d812c81e6168b6d405bf00b8d6739a7f72ef22a9214c4241e0dc70b323"}, + {file = "numpy-2.2.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:c7d1fd447e33ee20c1f33f2c8e6634211124a9aabde3c617687d8b739aa69eac"}, + {file = "numpy-2.2.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:451e854cfae0febe723077bd0cf0a4302a5d84ff25f0bfece8f29206c7bed02e"}, + {file = "numpy-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd249bc894af67cbd8bad2c22e7cbcd46cf87ddfca1f1289d1e7e54868cc785c"}, + {file = "numpy-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02935e2c3c0c6cbe9c7955a8efa8908dd4221d7755644c59d1bba28b94fd334f"}, + {file = "numpy-2.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a972cec723e0563aa0823ee2ab1df0cb196ed0778f173b381c871a03719d4826"}, + {file = "numpy-2.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d6d6a0910c3b4368d89dde073e630882cdb266755565155bc33520283b2d9df8"}, + {file = "numpy-2.2.2-cp311-cp311-win32.whl", hash = "sha256:860fd59990c37c3ef913c3ae390b3929d005243acca1a86facb0773e2d8d9e50"}, + {file = "numpy-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:da1eeb460ecce8d5b8608826595c777728cdf28ce7b5a5a8c8ac8d949beadcf2"}, + {file = "numpy-2.2.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ac9bea18d6d58a995fac1b2cb4488e17eceeac413af014b1dd26170b766d8467"}, + {file = "numpy-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23ae9f0c2d889b7b2d88a3791f6c09e2ef827c2446f1c4a3e3e76328ee4afd9a"}, + {file = "numpy-2.2.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:3074634ea4d6df66be04f6728ee1d173cfded75d002c75fac79503a880bf3825"}, + {file = "numpy-2.2.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8ec0636d3f7d68520afc6ac2dc4b8341ddb725039de042faf0e311599f54eb37"}, + {file = "numpy-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ffbb1acd69fdf8e89dd60ef6182ca90a743620957afb7066385a7bbe88dc748"}, + {file = "numpy-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0349b025e15ea9d05c3d63f9657707a4e1d471128a3b1d876c095f328f8ff7f0"}, + {file = "numpy-2.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:463247edcee4a5537841d5350bc87fe8e92d7dd0e8c71c995d2c6eecb8208278"}, + {file = "numpy-2.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9dd47ff0cb2a656ad69c38da850df3454da88ee9a6fde0ba79acceee0e79daba"}, + {file = "numpy-2.2.2-cp312-cp312-win32.whl", hash = "sha256:4525b88c11906d5ab1b0ec1f290996c0020dd318af8b49acaa46f198b1ffc283"}, + {file = "numpy-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:5acea83b801e98541619af398cc0109ff48016955cc0818f478ee9ef1c5c3dcb"}, + {file = "numpy-2.2.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b208cfd4f5fe34e1535c08983a1a6803fdbc7a1e86cf13dd0c61de0b51a0aadc"}, + {file = "numpy-2.2.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d0bbe7dd86dca64854f4b6ce2ea5c60b51e36dfd597300057cf473d3615f2369"}, + {file = "numpy-2.2.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:22ea3bb552ade325530e72a0c557cdf2dea8914d3a5e1fecf58fa5dbcc6f43cd"}, + {file = "numpy-2.2.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:128c41c085cab8a85dc29e66ed88c05613dccf6bc28b3866cd16050a2f5448be"}, + {file = "numpy-2.2.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:250c16b277e3b809ac20d1f590716597481061b514223c7badb7a0f9993c7f84"}, + {file = "numpy-2.2.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0c8854b09bc4de7b041148d8550d3bd712b5c21ff6a8ed308085f190235d7ff"}, + {file = "numpy-2.2.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b6fb9c32a91ec32a689ec6410def76443e3c750e7cfc3fb2206b985ffb2b85f0"}, + {file = "numpy-2.2.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:57b4012e04cc12b78590a334907e01b3a85efb2107df2b8733ff1ed05fce71de"}, + {file = "numpy-2.2.2-cp313-cp313-win32.whl", hash = "sha256:4dbd80e453bd34bd003b16bd802fac70ad76bd463f81f0c518d1245b1c55e3d9"}, + {file = "numpy-2.2.2-cp313-cp313-win_amd64.whl", hash = "sha256:5a8c863ceacae696aff37d1fd636121f1a512117652e5dfb86031c8d84836369"}, + {file = "numpy-2.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:b3482cb7b3325faa5f6bc179649406058253d91ceda359c104dac0ad320e1391"}, + {file = "numpy-2.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9491100aba630910489c1d0158034e1c9a6546f0b1340f716d522dc103788e39"}, + {file = "numpy-2.2.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:41184c416143defa34cc8eb9d070b0a5ba4f13a0fa96a709e20584638254b317"}, + {file = "numpy-2.2.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:7dca87ca328f5ea7dafc907c5ec100d187911f94825f8700caac0b3f4c384b49"}, + {file = "numpy-2.2.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bc61b307655d1a7f9f4b043628b9f2b721e80839914ede634e3d485913e1fb2"}, + {file = "numpy-2.2.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fad446ad0bc886855ddf5909cbf8cb5d0faa637aaa6277fb4b19ade134ab3c7"}, + {file = "numpy-2.2.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:149d1113ac15005652e8d0d3f6fd599360e1a708a4f98e43c9c77834a28238cb"}, + {file = "numpy-2.2.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:106397dbbb1896f99e044efc90360d098b3335060375c26aa89c0d8a97c5f648"}, + {file = "numpy-2.2.2-cp313-cp313t-win32.whl", hash = "sha256:0eec19f8af947a61e968d5429f0bd92fec46d92b0008d0a6685b40d6adf8a4f4"}, + {file = "numpy-2.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:97b974d3ba0fb4612b77ed35d7627490e8e3dff56ab41454d9e8b23448940576"}, + {file = "numpy-2.2.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b0531f0b0e07643eb089df4c509d30d72c9ef40defa53e41363eca8a8cc61495"}, + {file = "numpy-2.2.2-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:e9e82dcb3f2ebbc8cb5ce1102d5f1c5ed236bf8a11730fb45ba82e2841ec21df"}, + {file = "numpy-2.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0d4142eb40ca6f94539e4db929410f2a46052a0fe7a2c1c59f6179c39938d2a"}, + {file = "numpy-2.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:356ca982c188acbfa6af0d694284d8cf20e95b1c3d0aefa8929376fea9146f60"}, + {file = "numpy-2.2.2.tar.gz", hash = "sha256:ed6906f61834d687738d25988ae117683705636936cc605be0bb208b23df4d8f"}, ] [[package]] @@ -1420,93 +1366,89 @@ xml = ["lxml (>=4.9.2)"] [[package]] name = "pillow" -version = "11.0.0" +version = "11.1.0" description = "Python Imaging Library (Fork)" optional = true python-versions = ">=3.9" files = [ - {file = "pillow-11.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947"}, - {file = "pillow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba"}, - {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a65149d8ada1055029fcb665452b2814fe7d7082fcb0c5bed6db851cb69b2086"}, - {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a58d8ac0cc0e7f3a014509f0455248a76629ca9b604eca7dc5927cc593c5e9"}, - {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c26845094b1af3c91852745ae78e3ea47abf3dbcd1cf962f16b9a5fbe3ee8488"}, - {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1a61b54f87ab5786b8479f81c4b11f4d61702830354520837f8cc791ebba0f5f"}, - {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:674629ff60030d144b7bca2b8330225a9b11c482ed408813924619c6f302fdbb"}, - {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:598b4e238f13276e0008299bd2482003f48158e2b11826862b1eb2ad7c768b97"}, - {file = "pillow-11.0.0-cp310-cp310-win32.whl", hash = "sha256:9a0f748eaa434a41fccf8e1ee7a3eed68af1b690e75328fd7a60af123c193b50"}, - {file = "pillow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5629742881bcbc1f42e840af185fd4d83a5edeb96475a575f4da50d6ede337c"}, - {file = "pillow-11.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:ee217c198f2e41f184f3869f3e485557296d505b5195c513b2bfe0062dc537f1"}, - {file = "pillow-11.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1c1d72714f429a521d8d2d018badc42414c3077eb187a59579f28e4270b4b0fc"}, - {file = "pillow-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:499c3a1b0d6fc8213519e193796eb1a86a1be4b1877d678b30f83fd979811d1a"}, - {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8b2351c85d855293a299038e1f89db92a2f35e8d2f783489c6f0b2b5f3fe8a3"}, - {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f4dba50cfa56f910241eb7f883c20f1e7b1d8f7d91c750cd0b318bad443f4d5"}, - {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5ddbfd761ee00c12ee1be86c9c0683ecf5bb14c9772ddbd782085779a63dd55b"}, - {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:45c566eb10b8967d71bf1ab8e4a525e5a93519e29ea071459ce517f6b903d7fa"}, - {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b4fd7bd29610a83a8c9b564d457cf5bd92b4e11e79a4ee4716a63c959699b306"}, - {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cb929ca942d0ec4fac404cbf520ee6cac37bf35be479b970c4ffadf2b6a1cad9"}, - {file = "pillow-11.0.0-cp311-cp311-win32.whl", hash = "sha256:006bcdd307cc47ba43e924099a038cbf9591062e6c50e570819743f5607404f5"}, - {file = "pillow-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:52a2d8323a465f84faaba5236567d212c3668f2ab53e1c74c15583cf507a0291"}, - {file = "pillow-11.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:16095692a253047fe3ec028e951fa4221a1f3ed3d80c397e83541a3037ff67c9"}, - {file = "pillow-11.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2c0a187a92a1cb5ef2c8ed5412dd8d4334272617f532d4ad4de31e0495bd923"}, - {file = "pillow-11.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:084a07ef0821cfe4858fe86652fffac8e187b6ae677e9906e192aafcc1b69903"}, - {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8069c5179902dcdce0be9bfc8235347fdbac249d23bd90514b7a47a72d9fecf4"}, - {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f02541ef64077f22bf4924f225c0fd1248c168f86e4b7abdedd87d6ebaceab0f"}, - {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fcb4621042ac4b7865c179bb972ed0da0218a076dc1820ffc48b1d74c1e37fe9"}, - {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:00177a63030d612148e659b55ba99527803288cea7c75fb05766ab7981a8c1b7"}, - {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8853a3bf12afddfdf15f57c4b02d7ded92c7a75a5d7331d19f4f9572a89c17e6"}, - {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3107c66e43bda25359d5ef446f59c497de2b5ed4c7fdba0894f8d6cf3822dafc"}, - {file = "pillow-11.0.0-cp312-cp312-win32.whl", hash = "sha256:86510e3f5eca0ab87429dd77fafc04693195eec7fd6a137c389c3eeb4cfb77c6"}, - {file = "pillow-11.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8ec4a89295cd6cd4d1058a5e6aec6bf51e0eaaf9714774e1bfac7cfc9051db47"}, - {file = "pillow-11.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:27a7860107500d813fcd203b4ea19b04babe79448268403172782754870dac25"}, - {file = "pillow-11.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcd1fb5bb7b07f64c15618c89efcc2cfa3e95f0e3bcdbaf4642509de1942a699"}, - {file = "pillow-11.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0e038b0745997c7dcaae350d35859c9715c71e92ffb7e0f4a8e8a16732150f38"}, - {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ae08bd8ffc41aebf578c2af2f9d8749d91f448b3bfd41d7d9ff573d74f2a6b2"}, - {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d69bfd8ec3219ae71bcde1f942b728903cad25fafe3100ba2258b973bd2bc1b2"}, - {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:61b887f9ddba63ddf62fd02a3ba7add935d053b6dd7d58998c630e6dbade8527"}, - {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:c6a660307ca9d4867caa8d9ca2c2658ab685de83792d1876274991adec7b93fa"}, - {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:73e3a0200cdda995c7e43dd47436c1548f87a30bb27fb871f352a22ab8dcf45f"}, - {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fba162b8872d30fea8c52b258a542c5dfd7b235fb5cb352240c8d63b414013eb"}, - {file = "pillow-11.0.0-cp313-cp313-win32.whl", hash = "sha256:f1b82c27e89fffc6da125d5eb0ca6e68017faf5efc078128cfaa42cf5cb38798"}, - {file = "pillow-11.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:8ba470552b48e5835f1d23ecb936bb7f71d206f9dfeee64245f30c3270b994de"}, - {file = "pillow-11.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:846e193e103b41e984ac921b335df59195356ce3f71dcfd155aa79c603873b84"}, - {file = "pillow-11.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4ad70c4214f67d7466bea6a08061eba35c01b1b89eaa098040a35272a8efb22b"}, - {file = "pillow-11.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:6ec0d5af64f2e3d64a165f490d96368bb5dea8b8f9ad04487f9ab60dc4bb6003"}, - {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c809a70e43c7977c4a42aefd62f0131823ebf7dd73556fa5d5950f5b354087e2"}, - {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:4b60c9520f7207aaf2e1d94de026682fc227806c6e1f55bba7606d1c94dd623a"}, - {file = "pillow-11.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1e2688958a840c822279fda0086fec1fdab2f95bf2b717b66871c4ad9859d7e8"}, - {file = "pillow-11.0.0-cp313-cp313t-win32.whl", hash = "sha256:607bbe123c74e272e381a8d1957083a9463401f7bd01287f50521ecb05a313f8"}, - {file = "pillow-11.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c39ed17edea3bc69c743a8dd3e9853b7509625c2462532e62baa0732163a904"}, - {file = "pillow-11.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:75acbbeb05b86bc53cbe7b7e6fe00fbcf82ad7c684b3ad82e3d711da9ba287d3"}, - {file = "pillow-11.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2e46773dc9f35a1dd28bd6981332fd7f27bec001a918a72a79b4133cf5291dba"}, - {file = "pillow-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2679d2258b7f1192b378e2893a8a0a0ca472234d4c2c0e6bdd3380e8dfa21b6a"}, - {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda2616eb2313cbb3eebbe51f19362eb434b18e3bb599466a1ffa76a033fb916"}, - {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ec184af98a121fb2da42642dea8a29ec80fc3efbaefb86d8fdd2606619045d"}, - {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:8594f42df584e5b4bb9281799698403f7af489fba84c34d53d1c4bfb71b7c4e7"}, - {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:c12b5ae868897c7338519c03049a806af85b9b8c237b7d675b8c5e089e4a618e"}, - {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:70fbbdacd1d271b77b7721fe3cdd2d537bbbd75d29e6300c672ec6bb38d9672f"}, - {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5178952973e588b3f1360868847334e9e3bf49d19e169bbbdfaf8398002419ae"}, - {file = "pillow-11.0.0-cp39-cp39-win32.whl", hash = "sha256:8c676b587da5673d3c75bd67dd2a8cdfeb282ca38a30f37950511766b26858c4"}, - {file = "pillow-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:94f3e1780abb45062287b4614a5bc0874519c86a777d4a7ad34978e86428b8dd"}, - {file = "pillow-11.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:290f2cc809f9da7d6d622550bbf4c1e57518212da51b6a30fe8e0a270a5b78bd"}, - {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1187739620f2b365de756ce086fdb3604573337cc28a0d3ac4a01ab6b2d2a6d2"}, - {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fbbcb7b57dc9c794843e3d1258c0fbf0f48656d46ffe9e09b63bbd6e8cd5d0a2"}, - {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d203af30149ae339ad1b4f710d9844ed8796e97fda23ffbc4cc472968a47d0b"}, - {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a0d3b115009ebb8ac3d2ebec5c2982cc693da935f4ab7bb5c8ebe2f47d36f2"}, - {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73853108f56df97baf2bb8b522f3578221e56f646ba345a372c78326710d3830"}, - {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e58876c91f97b0952eb766123bfef372792ab3f4e3e1f1a2267834c2ab131734"}, - {file = "pillow-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:224aaa38177597bb179f3ec87eeefcce8e4f85e608025e9cfac60de237ba6316"}, - {file = "pillow-11.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5bd2d3bdb846d757055910f0a59792d33b555800813c3b39ada1829c372ccb06"}, - {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:375b8dd15a1f5d2feafff536d47e22f69625c1aa92f12b339ec0b2ca40263273"}, - {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:daffdf51ee5db69a82dd127eabecce20729e21f7a3680cf7cbb23f0829189790"}, - {file = "pillow-11.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7326a1787e3c7b0429659e0a944725e1b03eeaa10edd945a86dead1913383944"}, - {file = "pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739"}, + {file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"}, + {file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"}, + {file = "pillow-11.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07dba04c5e22824816b2615ad7a7484432d7f540e6fa86af60d2de57b0fcee2"}, + {file = "pillow-11.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e267b0ed063341f3e60acd25c05200df4193e15a4a5807075cd71225a2386e26"}, + {file = "pillow-11.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bd165131fd51697e22421d0e467997ad31621b74bfc0b75956608cb2906dda07"}, + {file = "pillow-11.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:abc56501c3fd148d60659aae0af6ddc149660469082859fa7b066a298bde9482"}, + {file = "pillow-11.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:54ce1c9a16a9561b6d6d8cb30089ab1e5eb66918cb47d457bd996ef34182922e"}, + {file = "pillow-11.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:73ddde795ee9b06257dac5ad42fcb07f3b9b813f8c1f7f870f402f4dc54b5269"}, + {file = "pillow-11.1.0-cp310-cp310-win32.whl", hash = "sha256:3a5fe20a7b66e8135d7fd617b13272626a28278d0e578c98720d9ba4b2439d49"}, + {file = "pillow-11.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:b6123aa4a59d75f06e9dd3dac5bf8bc9aa383121bb3dd9a7a612e05eabc9961a"}, + {file = "pillow-11.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:a76da0a31da6fcae4210aa94fd779c65c75786bc9af06289cd1c184451ef7a65"}, + {file = "pillow-11.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:e06695e0326d05b06833b40b7ef477e475d0b1ba3a6d27da1bb48c23209bf457"}, + {file = "pillow-11.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96f82000e12f23e4f29346e42702b6ed9a2f2fea34a740dd5ffffcc8c539eb35"}, + {file = "pillow-11.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3cd561ded2cf2bbae44d4605837221b987c216cff94f49dfeed63488bb228d2"}, + {file = "pillow-11.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f189805c8be5ca5add39e6f899e6ce2ed824e65fb45f3c28cb2841911da19070"}, + {file = "pillow-11.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dd0052e9db3474df30433f83a71b9b23bd9e4ef1de13d92df21a52c0303b8ab6"}, + {file = "pillow-11.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:837060a8599b8f5d402e97197d4924f05a2e0d68756998345c829c33186217b1"}, + {file = "pillow-11.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aa8dd43daa836b9a8128dbe7d923423e5ad86f50a7a14dc688194b7be5c0dea2"}, + {file = "pillow-11.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0a2f91f8a8b367e7a57c6e91cd25af510168091fb89ec5146003e424e1558a96"}, + {file = "pillow-11.1.0-cp311-cp311-win32.whl", hash = "sha256:c12fc111ef090845de2bb15009372175d76ac99969bdf31e2ce9b42e4b8cd88f"}, + {file = "pillow-11.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbd43429d0d7ed6533b25fc993861b8fd512c42d04514a0dd6337fb3ccf22761"}, + {file = "pillow-11.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f7955ecf5609dee9442cbface754f2c6e541d9e6eda87fad7f7a989b0bdb9d71"}, + {file = "pillow-11.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2062ffb1d36544d42fcaa277b069c88b01bb7298f4efa06731a7fd6cc290b81a"}, + {file = "pillow-11.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a85b653980faad27e88b141348707ceeef8a1186f75ecc600c395dcac19f385b"}, + {file = "pillow-11.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9409c080586d1f683df3f184f20e36fb647f2e0bc3988094d4fd8c9f4eb1b3b3"}, + {file = "pillow-11.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fdadc077553621911f27ce206ffcbec7d3f8d7b50e0da39f10997e8e2bb7f6a"}, + {file = "pillow-11.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:93a18841d09bcdd774dcdc308e4537e1f867b3dec059c131fde0327899734aa1"}, + {file = "pillow-11.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9aa9aeddeed452b2f616ff5507459e7bab436916ccb10961c4a382cd3e03f47f"}, + {file = "pillow-11.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3cdcdb0b896e981678eee140d882b70092dac83ac1cdf6b3a60e2216a73f2b91"}, + {file = "pillow-11.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36ba10b9cb413e7c7dfa3e189aba252deee0602c86c309799da5a74009ac7a1c"}, + {file = "pillow-11.1.0-cp312-cp312-win32.whl", hash = "sha256:cfd5cd998c2e36a862d0e27b2df63237e67273f2fc78f47445b14e73a810e7e6"}, + {file = "pillow-11.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:a697cd8ba0383bba3d2d3ada02b34ed268cb548b369943cd349007730c92bddf"}, + {file = "pillow-11.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:4dd43a78897793f60766563969442020e90eb7847463eca901e41ba186a7d4a5"}, + {file = "pillow-11.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ae98e14432d458fc3de11a77ccb3ae65ddce70f730e7c76140653048c71bfcbc"}, + {file = "pillow-11.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cc1331b6d5a6e144aeb5e626f4375f5b7ae9934ba620c0ac6b3e43d5e683a0f0"}, + {file = "pillow-11.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:758e9d4ef15d3560214cddbc97b8ef3ef86ce04d62ddac17ad39ba87e89bd3b1"}, + {file = "pillow-11.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b523466b1a31d0dcef7c5be1f20b942919b62fd6e9a9be199d035509cbefc0ec"}, + {file = "pillow-11.1.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:9044b5e4f7083f209c4e35aa5dd54b1dd5b112b108648f5c902ad586d4f945c5"}, + {file = "pillow-11.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:3764d53e09cdedd91bee65c2527815d315c6b90d7b8b79759cc48d7bf5d4f114"}, + {file = "pillow-11.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:31eba6bbdd27dde97b0174ddf0297d7a9c3a507a8a1480e1e60ef914fe23d352"}, + {file = "pillow-11.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b5d658fbd9f0d6eea113aea286b21d3cd4d3fd978157cbf2447a6035916506d3"}, + {file = "pillow-11.1.0-cp313-cp313-win32.whl", hash = "sha256:f86d3a7a9af5d826744fabf4afd15b9dfef44fe69a98541f666f66fbb8d3fef9"}, + {file = "pillow-11.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:593c5fd6be85da83656b93ffcccc2312d2d149d251e98588b14fbc288fd8909c"}, + {file = "pillow-11.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:11633d58b6ee5733bde153a8dafd25e505ea3d32e261accd388827ee987baf65"}, + {file = "pillow-11.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:70ca5ef3b3b1c4a0812b5c63c57c23b63e53bc38e758b37a951e5bc466449861"}, + {file = "pillow-11.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8000376f139d4d38d6851eb149b321a52bb8893a88dae8ee7d95840431977081"}, + {file = "pillow-11.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ee85f0696a17dd28fbcfceb59f9510aa71934b483d1f5601d1030c3c8304f3c"}, + {file = "pillow-11.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:dd0e081319328928531df7a0e63621caf67652c8464303fd102141b785ef9547"}, + {file = "pillow-11.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e63e4e5081de46517099dc30abe418122f54531a6ae2ebc8680bcd7096860eab"}, + {file = "pillow-11.1.0-cp313-cp313t-win32.whl", hash = "sha256:dda60aa465b861324e65a78c9f5cf0f4bc713e4309f83bc387be158b077963d9"}, + {file = "pillow-11.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ad5db5781c774ab9a9b2c4302bbf0c1014960a0a7be63278d13ae6fdf88126fe"}, + {file = "pillow-11.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:67cd427c68926108778a9005f2a04adbd5e67c442ed21d95389fe1d595458756"}, + {file = "pillow-11.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:bf902d7413c82a1bfa08b06a070876132a5ae6b2388e2712aab3a7cbc02205c6"}, + {file = "pillow-11.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c1eec9d950b6fe688edee07138993e54ee4ae634c51443cfb7c1e7613322718e"}, + {file = "pillow-11.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e275ee4cb11c262bd108ab2081f750db2a1c0b8c12c1897f27b160c8bd57bbc"}, + {file = "pillow-11.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db853948ce4e718f2fc775b75c37ba2efb6aaea41a1a5fc57f0af59eee774b2"}, + {file = "pillow-11.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:ab8a209b8485d3db694fa97a896d96dd6533d63c22829043fd9de627060beade"}, + {file = "pillow-11.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:54251ef02a2309b5eec99d151ebf5c9904b77976c8abdcbce7891ed22df53884"}, + {file = "pillow-11.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5bb94705aea800051a743aa4874bb1397d4695fb0583ba5e425ee0328757f196"}, + {file = "pillow-11.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89dbdb3e6e9594d512780a5a1c42801879628b38e3efc7038094430844e271d8"}, + {file = "pillow-11.1.0-cp39-cp39-win32.whl", hash = "sha256:e5449ca63da169a2e6068dd0e2fcc8d91f9558aba89ff6d02121ca8ab11e79e5"}, + {file = "pillow-11.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:3362c6ca227e65c54bf71a5f88b3d4565ff1bcbc63ae72c34b07bbb1cc59a43f"}, + {file = "pillow-11.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:b20be51b37a75cc54c2c55def3fa2c65bb94ba859dde241cd0a4fd302de5ae0a"}, + {file = "pillow-11.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8c730dc3a83e5ac137fbc92dfcfe1511ce3b2b5d7578315b63dbbb76f7f51d90"}, + {file = "pillow-11.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d33d2fae0e8b170b6a6c57400e077412240f6f5bb2a342cf1ee512a787942bb"}, + {file = "pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8d65b38173085f24bc07f8b6c505cbb7418009fa1a1fcb111b1f4961814a442"}, + {file = "pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:015c6e863faa4779251436db398ae75051469f7c903b043a48f078e437656f83"}, + {file = "pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d44ff19eea13ae4acdaaab0179fa68c0c6f2f45d66a4d8ec1eda7d6cecbcc15f"}, + {file = "pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d3d8da4a631471dfaf94c10c85f5277b1f8e42ac42bade1ac67da4b4a7359b73"}, + {file = "pillow-11.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:4637b88343166249fe8aa94e7c4a62a180c4b3898283bb5d3d2fd5fe10d8e4e0"}, + {file = "pillow-11.1.0.tar.gz", hash = "sha256:368da70808b36d73b4b390a8ffac11069f8a5c85f29eff1f1b01bcf3ef5b2a20"}, ] [package.extras] docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"] typing = ["typing-extensions"] xmp = ["defusedxml"] @@ -1556,13 +1498,13 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.36" +version = "3.0.49" description = "Library for building powerful interactive command lines in Python" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.8.0" files = [ - {file = "prompt_toolkit-3.0.36-py3-none-any.whl", hash = "sha256:aa64ad242a462c5ff0363a7b9cfe696c20d55d9fc60c11fd8e632d064804d305"}, - {file = "prompt_toolkit-3.0.36.tar.gz", hash = "sha256:3e163f254bef5a03b146397d7c1963bd3e2812f0964bb9a24e6ec761fd28db63"}, + {file = "prompt_toolkit-3.0.49-py3-none-any.whl", hash = "sha256:03b25a442496d1aeabcb4b8b8e94563ac6a8eff56467837367206a733c61c2de"}, + {file = "prompt_toolkit-3.0.49.tar.gz", hash = "sha256:372cfcbff483cd5fac6f7870d21face30f1e2f217c6906721616fb1be778774d"}, ] [package.dependencies] @@ -1570,32 +1512,32 @@ wcwidth = "*" [[package]] name = "psutil" -version = "6.1.0" +version = "6.1.1" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "psutil-6.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0"}, - {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942"}, - {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:000d1d1ebd634b4efb383f4034437384e44a6d455260aaee2eca1e9c1b55f047"}, - {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5cd2bcdc75b452ba2e10f0e8ecc0b57b827dd5d7aaffbc6821b2a9a242823a76"}, - {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:045f00a43c737f960d273a83973b2511430d61f283a44c96bf13a6e829ba8fdc"}, - {file = "psutil-6.1.0-cp27-none-win32.whl", hash = "sha256:9118f27452b70bb1d9ab3198c1f626c2499384935aaf55388211ad982611407e"}, - {file = "psutil-6.1.0-cp27-none-win_amd64.whl", hash = "sha256:a8506f6119cff7015678e2bce904a4da21025cc70ad283a53b099e7620061d85"}, - {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, - {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, - {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, - {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, - {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, - {file = "psutil-6.1.0-cp36-cp36m-win32.whl", hash = "sha256:6d3fbbc8d23fcdcb500d2c9f94e07b1342df8ed71b948a2649b5cb060a7c94ca"}, - {file = "psutil-6.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1209036fbd0421afde505a4879dee3b2fd7b1e14fee81c0069807adcbbcca747"}, - {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, - {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, - {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, + {file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"}, + {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"}, + {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8df0178ba8a9e5bc84fed9cfa61d54601b371fbec5c8eebad27575f1e105c0d4"}, + {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:1924e659d6c19c647e763e78670a05dbb7feaf44a0e9c94bf9e14dfc6ba50468"}, + {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:018aeae2af92d943fdf1da6b58665124897cfc94faa2ca92098838f83e1b1bca"}, + {file = "psutil-6.1.1-cp27-none-win32.whl", hash = "sha256:6d4281f5bbca041e2292be3380ec56a9413b790579b8e593b1784499d0005dac"}, + {file = "psutil-6.1.1-cp27-none-win_amd64.whl", hash = "sha256:c777eb75bb33c47377c9af68f30e9f11bc78e0f07fbf907be4a5d70b2fe5f030"}, + {file = "psutil-6.1.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed7fe2231a444fc219b9c42d0376e0a9a1a72f16c5cfa0f68d19f1a0663e8"}, + {file = "psutil-6.1.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0bdd4eab935276290ad3cb718e9809412895ca6b5b334f5a9111ee6d9aff9377"}, + {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6e06c20c05fe95a3d7302d74e7097756d4ba1247975ad6905441ae1b5b66003"}, + {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97f7cb9921fbec4904f522d972f0c0e1f4fabbdd4e0287813b21215074a0f160"}, + {file = "psutil-6.1.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33431e84fee02bc84ea36d9e2c4a6d395d479c9dd9bba2376c1f6ee8f3a4e0b3"}, + {file = "psutil-6.1.1-cp36-cp36m-win32.whl", hash = "sha256:384636b1a64b47814437d1173be1427a7c83681b17a450bfc309a1953e329603"}, + {file = "psutil-6.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8be07491f6ebe1a693f17d4f11e69d0dc1811fa082736500f649f79df7735303"}, + {file = "psutil-6.1.1-cp37-abi3-win32.whl", hash = "sha256:eaa912e0b11848c4d9279a93d7e2783df352b082f40111e078388701fd479e53"}, + {file = "psutil-6.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649"}, + {file = "psutil-6.1.1.tar.gz", hash = "sha256:cf8496728c18f2d0b45198f06895be52f36611711746b7f30c464b422b50e2f5"}, ] [package.extras] -dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"] +dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] test = ["pytest", "pytest-xdist", "setuptools"] [[package]] @@ -1622,18 +1564,18 @@ files = [ [[package]] name = "pydantic" -version = "2.10.3" +version = "2.10.5" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, - {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, + {file = "pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53"}, + {file = "pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.27.1" +pydantic-core = "2.27.2" typing-extensions = ">=4.12.2" [package.extras] @@ -1642,111 +1584,111 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.27.1" +version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, - {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, - {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, - {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, - {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, - {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, - {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, - {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, - {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, - {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, - {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, - {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, - {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, - {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, - {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, - {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, - {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, ] [package.dependencies] @@ -1754,13 +1696,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygments" -version = "2.18.0" +version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] [package.extras] @@ -1802,13 +1744,13 @@ test = ["pytest (>=3.6)", "pytest-cov", "pytest-runner"] [[package]] name = "pyparsing" -version = "3.2.0" +version = "3.2.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = true python-versions = ">=3.9" files = [ - {file = "pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84"}, - {file = "pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c"}, + {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, + {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, ] [package.extras] @@ -1965,17 +1907,17 @@ files = [ [[package]] name = "questionary" -version = "2.0.1" +version = "2.1.0" description = "Python library to build pretty command line user prompts ⭐️" optional = false python-versions = ">=3.8" files = [ - {file = "questionary-2.0.1-py3-none-any.whl", hash = "sha256:8ab9a01d0b91b68444dff7f6652c1e754105533f083cbe27597c8110ecc230a2"}, - {file = "questionary-2.0.1.tar.gz", hash = "sha256:bcce898bf3dbb446ff62830c86c5c6fb9a22a54146f0f5597d3da43b10d8fc8b"}, + {file = "questionary-2.1.0-py3-none-any.whl", hash = "sha256:44174d237b68bc828e4878c763a9ad6790ee61990e0ae72927694ead57bab8ec"}, + {file = "questionary-2.1.0.tar.gz", hash = "sha256:6302cdd645b19667d8f6e6634774e9538bfcd1aad9be287e743d96cacaf95587"}, ] [package.dependencies] -prompt_toolkit = ">=2.0,<=3.0.36" +prompt_toolkit = ">=2.0,<4.0" [[package]] name = "randomname" @@ -1992,99 +1934,99 @@ fire = "*" [[package]] name = "rapidfuzz" -version = "3.10.1" +version = "3.11.0" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.9" files = [ - {file = "rapidfuzz-3.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f17d9f21bf2f2f785d74f7b0d407805468b4c173fa3e52c86ec94436b338e74a"}, - {file = "rapidfuzz-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b31f358a70efc143909fb3d75ac6cd3c139cd41339aa8f2a3a0ead8315731f2b"}, - {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f4f43f2204b56a61448ec2dd061e26fd344c404da99fb19f3458200c5874ba2"}, - {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d81bf186a453a2757472133b24915768abc7c3964194406ed93e170e16c21cb"}, - {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3611c8f45379a12063d70075c75134f2a8bd2e4e9b8a7995112ddae95ca1c982"}, - {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c3b537b97ac30da4b73930fa8a4fe2f79c6d1c10ad535c5c09726612cd6bed9"}, - {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:231ef1ec9cf7b59809ce3301006500b9d564ddb324635f4ea8f16b3e2a1780da"}, - {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ed4f3adc1294834955b7e74edd3c6bd1aad5831c007f2d91ea839e76461a5879"}, - {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7b6015da2e707bf632a71772a2dbf0703cff6525732c005ad24987fe86e8ec32"}, - {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1b35a118d61d6f008e8e3fb3a77674d10806a8972c7b8be433d6598df4d60b01"}, - {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:bc308d79a7e877226f36bdf4e149e3ed398d8277c140be5c1fd892ec41739e6d"}, - {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f017dbfecc172e2d0c37cf9e3d519179d71a7f16094b57430dffc496a098aa17"}, - {file = "rapidfuzz-3.10.1-cp310-cp310-win32.whl", hash = "sha256:36c0e1483e21f918d0f2f26799fe5ac91c7b0c34220b73007301c4f831a9c4c7"}, - {file = "rapidfuzz-3.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:10746c1d4c8cd8881c28a87fd7ba0c9c102346dfe7ff1b0d021cdf093e9adbff"}, - {file = "rapidfuzz-3.10.1-cp310-cp310-win_arm64.whl", hash = "sha256:dfa64b89dcb906835e275187569e51aa9d546a444489e97aaf2cc84011565fbe"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:92958ae075c87fef393f835ed02d4fe8d5ee2059a0934c6c447ea3417dfbf0e8"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba7521e072c53e33c384e78615d0718e645cab3c366ecd3cc8cb732befd94967"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d02cbd75d283c287471b5b3738b3e05c9096150f93f2d2dfa10b3d700f2db9"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efa1582a397da038e2f2576c9cd49b842f56fde37d84a6b0200ffebc08d82350"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f12912acee1f506f974f58de9fdc2e62eea5667377a7e9156de53241c05fdba8"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666d5d8b17becc3f53447bcb2b6b33ce6c2df78792495d1fa82b2924cd48701a"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26f71582c0d62445067ee338ddad99b655a8f4e4ed517a90dcbfbb7d19310474"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8a2ef08b27167bcff230ffbfeedd4c4fa6353563d6aaa015d725dd3632fc3de7"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:365e4fc1a2b95082c890f5e98489b894e6bf8c338c6ac89bb6523c2ca6e9f086"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1996feb7a61609fa842e6b5e0c549983222ffdedaf29644cc67e479902846dfe"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:cf654702f144beaa093103841a2ea6910d617d0bb3fccb1d1fd63c54dde2cd49"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ec108bf25de674781d0a9a935030ba090c78d49def3d60f8724f3fc1e8e75024"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-win32.whl", hash = "sha256:031f8b367e5d92f7a1e27f7322012f3c321c3110137b43cc3bf678505583ef48"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:f98f36c6a1bb9a6c8bbec99ad87c8c0e364f34761739b5ea9adf7b48129ae8cf"}, - {file = "rapidfuzz-3.10.1-cp311-cp311-win_arm64.whl", hash = "sha256:f1da2028cb4e41be55ee797a82d6c1cf589442504244249dfeb32efc608edee7"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1340b56340896bede246f612b6ecf685f661a56aabef3d2512481bfe23ac5835"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2316515169b7b5a453f0ce3adbc46c42aa332cae9f2edb668e24d1fc92b2f2bb"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e06fe6a12241ec1b72c0566c6b28cda714d61965d86569595ad24793d1ab259"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d99c1cd9443b19164ec185a7d752f4b4db19c066c136f028991a480720472e23"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1d9aa156ed52d3446388ba4c2f335e312191d1ca9d1f5762ee983cf23e4ecf6"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:54bcf4efaaee8e015822be0c2c28214815f4f6b4f70d8362cfecbd58a71188ac"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0c955e32afdbfdf6e9ee663d24afb25210152d98c26d22d399712d29a9b976b"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:191633722203f5b7717efcb73a14f76f3b124877d0608c070b827c5226d0b972"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:195baad28057ec9609e40385991004e470af9ef87401e24ebe72c064431524ab"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0fff4a6b87c07366662b62ae994ffbeadc472e72f725923f94b72a3db49f4671"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4ffed25f9fdc0b287f30a98467493d1e1ce5b583f6317f70ec0263b3c97dbba6"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d02cf8e5af89a9ac8f53c438ddff6d773f62c25c6619b29db96f4aae248177c0"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-win32.whl", hash = "sha256:f3bb81d4fe6a5d20650f8c0afcc8f6e1941f6fecdb434f11b874c42467baded0"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:aaf83e9170cb1338922ae42d320699dccbbdca8ffed07faeb0b9257822c26e24"}, - {file = "rapidfuzz-3.10.1-cp312-cp312-win_arm64.whl", hash = "sha256:c5da802a0d085ad81b0f62828fb55557996c497b2d0b551bbdfeafd6d447892f"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fc22d69a1c9cccd560a5c434c0371b2df0f47c309c635a01a913e03bbf183710"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38b0dac2c8e057562b8f0d8ae5b663d2d6a28c5ab624de5b73cef9abb6129a24"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fde3bbb14e92ce8fcb5c2edfff72e474d0080cadda1c97785bf4822f037a309"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9141fb0592e55f98fe9ac0f3ce883199b9c13e262e0bf40c5b18cdf926109d16"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:237bec5dd1bfc9b40bbd786cd27949ef0c0eb5fab5eb491904c6b5df59d39d3c"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18123168cba156ab5794ea6de66db50f21bb3c66ae748d03316e71b27d907b95"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b75fe506c8e02769cc47f5ab21ce3e09b6211d3edaa8f8f27331cb6988779be"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da82aa4b46973aaf9e03bb4c3d6977004648c8638febfc0f9d237e865761270"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c34c022d5ad564f1a5a57a4a89793bd70d7bad428150fb8ff2760b223407cdcf"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e96c84d6c2a0ca94e15acb5399118fff669f4306beb98a6d8ec6f5dccab4412"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e8e154b84a311263e1aca86818c962e1fa9eefdd643d1d5d197fcd2738f88cb9"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:335fee93188f8cd585552bb8057228ce0111bd227fa81bfd40b7df6b75def8ab"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-win32.whl", hash = "sha256:6729b856166a9e95c278410f73683957ea6100c8a9d0a8dbe434c49663689255"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:0e06d99ad1ad97cb2ef7f51ec6b1fedd74a3a700e4949353871cf331d07b382a"}, - {file = "rapidfuzz-3.10.1-cp313-cp313-win_arm64.whl", hash = "sha256:8d1b7082104d596a3eb012e0549b2634ed15015b569f48879701e9d8db959dbb"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:779027d3307e1a2b1dc0c03c34df87a470a368a1a0840a9d2908baf2d4067956"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:440b5608ab12650d0390128d6858bc839ae77ffe5edf0b33a1551f2fa9860651"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82cac41a411e07a6f3dc80dfbd33f6be70ea0abd72e99c59310819d09f07d945"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:958473c9f0bca250590200fd520b75be0dbdbc4a7327dc87a55b6d7dc8d68552"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ef60dfa73749ef91cb6073be1a3e135f4846ec809cc115f3cbfc6fe283a5584"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7fbac18f2c19fc983838a60611e67e3262e36859994c26f2ee85bb268de2355"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a0d519ff39db887cd73f4e297922786d548f5c05d6b51f4e6754f452a7f4296"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bebb7bc6aeb91cc57e4881b222484c26759ca865794187217c9dcea6c33adae6"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe07f8b9c3bb5c5ad1d2c66884253e03800f4189a60eb6acd6119ebaf3eb9894"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bfa48a4a2d45a41457f0840c48e579db157a927f4e97acf6e20df8fc521c79de"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2cf44d01bfe8ee605b7eaeecbc2b9ca64fc55765f17b304b40ed8995f69d7716"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e6bbca9246d9eedaa1c84e04a7f555493ba324d52ae4d9f3d9ddd1b740dcd87"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-win32.whl", hash = "sha256:567f88180f2c1423b4fe3f3ad6e6310fc97b85bdba574801548597287fc07028"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6b2cd7c29d6ecdf0b780deb587198f13213ac01c430ada6913452fd0c40190fc"}, - {file = "rapidfuzz-3.10.1-cp39-cp39-win_arm64.whl", hash = "sha256:9f912d459e46607ce276128f52bea21ebc3e9a5ccf4cccfef30dd5bddcf47be8"}, - {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ac4452f182243cfab30ba4668ef2de101effaedc30f9faabb06a095a8c90fd16"}, - {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:565c2bd4f7d23c32834652b27b51dd711814ab614b4e12add8476be4e20d1cf5"}, - {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187d9747149321607be4ccd6f9f366730078bed806178ec3eeb31d05545e9e8f"}, - {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:616290fb9a8fa87e48cb0326d26f98d4e29f17c3b762c2d586f2b35c1fd2034b"}, - {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073a5b107e17ebd264198b78614c0206fa438cce749692af5bc5f8f484883f50"}, - {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:39c4983e2e2ccb9732f3ac7d81617088822f4a12291d416b09b8a1eadebb3e29"}, - {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ac7adee6bcf0c6fee495d877edad1540a7e0f5fc208da03ccb64734b43522d7a"}, - {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:425f4ac80b22153d391ee3f94bc854668a0c6c129f05cf2eaf5ee74474ddb69e"}, - {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65a2fa13e8a219f9b5dcb9e74abe3ced5838a7327e629f426d333dfc8c5a6e66"}, - {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75561f3df9a906aaa23787e9992b228b1ab69007932dc42070f747103e177ba8"}, - {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edd062490537e97ca125bc6c7f2b7331c2b73d21dc304615afe61ad1691e15d5"}, - {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfcc8feccf63245a22dfdd16e222f1a39771a44b870beb748117a0e09cbb4a62"}, - {file = "rapidfuzz-3.10.1.tar.gz", hash = "sha256:5a15546d847a915b3f42dc79ef9b0c78b998b4e2c53b252e7166284066585979"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb8a54543d16ab1b69e2c5ed96cabbff16db044a50eddfc028000138ca9ddf33"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:231c8b2efbd7f8d2ecd1ae900363ba168b8870644bb8f2b5aa96e4a7573bde19"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54e7f442fb9cca81e9df32333fb075ef729052bcabe05b0afc0441f462299114"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:906f1f2a1b91c06599b3dd1be207449c5d4fc7bd1e1fa2f6aef161ea6223f165"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ed59044aea9eb6c663112170f2399b040d5d7b162828b141f2673e822093fa8"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cb1965a28b0fa64abdee130c788a0bc0bb3cf9ef7e3a70bf055c086c14a3d7e"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b488b244931d0291412917e6e46ee9f6a14376625e150056fe7c4426ef28225"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f0ba13557fec9d5ffc0a22826754a7457cc77f1b25145be10b7bb1d143ce84c6"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3871fa7dfcef00bad3c7e8ae8d8fd58089bad6fb21f608d2bf42832267ca9663"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:b2669eafee38c5884a6e7cc9769d25c19428549dcdf57de8541cf9e82822e7db"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:ffa1bb0e26297b0f22881b219ffc82a33a3c84ce6174a9d69406239b14575bd5"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:45b15b8a118856ac9caac6877f70f38b8a0d310475d50bc814698659eabc1cdb"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-win32.whl", hash = "sha256:22033677982b9c4c49676f215b794b0404073f8974f98739cb7234e4a9ade9ad"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:be15496e7244361ff0efcd86e52559bacda9cd975eccf19426a0025f9547c792"}, + {file = "rapidfuzz-3.11.0-cp310-cp310-win_arm64.whl", hash = "sha256:714a7ba31ba46b64d30fccfe95f8013ea41a2e6237ba11a805a27cdd3bce2573"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8724a978f8af7059c5323d523870bf272a097478e1471295511cf58b2642ff83"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b63cb1f2eb371ef20fb155e95efd96e060147bdd4ab9fc400c97325dfee9fe1"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82497f244aac10b20710448645f347d862364cc4f7d8b9ba14bd66b5ce4dec18"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:339607394941801e6e3f6c1ecd413a36e18454e7136ed1161388de674f47f9d9"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84819390a36d6166cec706b9d8f0941f115f700b7faecab5a7e22fc367408bc3"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eea8d9e20632d68f653455265b18c35f90965e26f30d4d92f831899d6682149b"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b659e1e2ea2784a9a397075a7fc395bfa4fe66424042161c4bcaf6e4f637b38"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1315cd2a351144572e31fe3df68340d4b83ddec0af8b2e207cd32930c6acd037"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a7743cca45b4684c54407e8638f6d07b910d8d811347b9d42ff21262c7c23245"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5bb636b0150daa6d3331b738f7c0f8b25eadc47f04a40e5c23c4bfb4c4e20ae3"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:42f4dd264ada7a9aa0805ea0da776dc063533917773cf2df5217f14eb4429eae"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51f24cb39e64256221e6952f22545b8ce21cacd59c0d3e367225da8fc4b868d8"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-win32.whl", hash = "sha256:aaf391fb6715866bc14681c76dc0308f46877f7c06f61d62cc993b79fc3c4a2a"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:ebadd5b8624d8ad503e505a99b8eb26fe3ea9f8e9c2234e805a27b269e585842"}, + {file = "rapidfuzz-3.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:d895998fec712544c13cfe833890e0226585cf0391dd3948412441d5d68a2b8c"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f382fec4a7891d66fb7163c90754454030bb9200a13f82ee7860b6359f3f2fa8"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dfaefe08af2a928e72344c800dcbaf6508e86a4ed481e28355e8d4b6a6a5230e"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92ebb7c12f682b5906ed98429f48a3dd80dd0f9721de30c97a01473d1a346576"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a1b3ebc62d4bcdfdeba110944a25ab40916d5383c5e57e7c4a8dc0b6c17211a"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c6d7fea39cb33e71de86397d38bf7ff1a6273e40367f31d05761662ffda49e4"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99aebef8268f2bc0b445b5640fd3312e080bd17efd3fbae4486b20ac00466308"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4469307f464ae3089acf3210b8fc279110d26d10f79e576f385a98f4429f7d97"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:eb97c53112b593f89a90b4f6218635a9d1eea1d7f9521a3b7d24864228bbc0aa"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ef8937dae823b889c0273dfa0f0f6c46a3658ac0d851349c464d1b00e7ff4252"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d95f9e9f3777b96241d8a00d6377cc9c716981d828b5091082d0fe3a2924b43e"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:b1d67d67f89e4e013a5295e7523bc34a7a96f2dba5dd812c7c8cb65d113cbf28"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d994cf27e2f874069884d9bddf0864f9b90ad201fcc9cb2f5b82bacc17c8d5f2"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-win32.whl", hash = "sha256:ba26d87fe7fcb56c4a53b549a9e0e9143f6b0df56d35fe6ad800c902447acd5b"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:b1f7efdd7b7adb32102c2fa481ad6f11923e2deb191f651274be559d56fc913b"}, + {file = "rapidfuzz-3.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:ed78c8e94f57b44292c1a0350f580e18d3a3c5c0800e253f1583580c1b417ad2"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e60814edd0c9b511b5f377d48b9782b88cfe8be07a98f99973669299c8bb318a"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3f28952da055dbfe75828891cd3c9abf0984edc8640573c18b48c14c68ca5e06"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e8f93bc736020351a6f8e71666e1f486bb8bd5ce8112c443a30c77bfde0eb68"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76a4a11ba8f678c9e5876a7d465ab86def047a4fcc043617578368755d63a1bc"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc0e0d41ad8a056a9886bac91ff9d9978e54a244deb61c2972cc76b66752de9c"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e8ea35f2419c7d56b3e75fbde2698766daedb374f20eea28ac9b1f668ef4f74"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd340bbd025302276b5aa221dccfe43040c7babfc32f107c36ad783f2ffd8775"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:494eef2c68305ab75139034ea25328a04a548d297712d9cf887bf27c158c388b"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5a167344c1d6db06915fb0225592afdc24d8bafaaf02de07d4788ddd37f4bc2f"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:8c7af25bda96ac799378ac8aba54a8ece732835c7b74cfc201b688a87ed11152"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d2a0f7e17f33e7890257367a1662b05fecaf56625f7dbb6446227aaa2b86448b"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4d0d26c7172bdb64f86ee0765c5b26ea1dc45c52389175888ec073b9b28f4305"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-win32.whl", hash = "sha256:6ad02bab756751c90fa27f3069d7b12146613061341459abf55f8190d899649f"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:b1472986fd9c5d318399a01a0881f4a0bf4950264131bb8e2deba9df6d8c362b"}, + {file = "rapidfuzz-3.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:c408f09649cbff8da76f8d3ad878b64ba7f7abdad1471efb293d2c075e80c822"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1bac4873f6186f5233b0084b266bfb459e997f4c21fc9f029918f44a9eccd304"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f9f12c2d0aa52b86206d2059916153876a9b1cf9dfb3cf2f344913167f1c3d4"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dd501de6f7a8f83557d20613b58734d1cb5f0be78d794cde64fe43cfc63f5f2"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4416ca69af933d4a8ad30910149d3db6d084781d5c5fdedb713205389f535385"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f0821b9bdf18c5b7d51722b906b233a39b17f602501a966cfbd9b285f8ab83cd"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0edecc3f90c2653298d380f6ea73b536944b767520c2179ec5d40b9145e47aa"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4513dd01cee11e354c31b75f652d4d466c9440b6859f84e600bdebfccb17735a"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d9727b85511b912571a76ce53c7640ba2c44c364e71cef6d7359b5412739c570"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ab9eab33ee3213f7751dc07a1a61b8d9a3d748ca4458fffddd9defa6f0493c16"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6b01c1ddbb054283797967ddc5433d5c108d680e8fa2684cf368be05407b07e4"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3857e335f97058c4b46fa39ca831290b70de554a5c5af0323d2f163b19c5f2a6"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d98a46cf07c0c875d27e8a7ed50f304d83063e49b9ab63f21c19c154b4c0d08d"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-win32.whl", hash = "sha256:c36539ed2c0173b053dafb221458812e178cfa3224ade0960599bec194637048"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:ec8d7d8567e14af34a7911c98f5ac74a3d4a743cd848643341fc92b12b3784ff"}, + {file = "rapidfuzz-3.11.0-cp39-cp39-win_arm64.whl", hash = "sha256:62171b270ecc4071be1c1f99960317db261d4c8c83c169e7f8ad119211fe7397"}, + {file = "rapidfuzz-3.11.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f06e3c4c0a8badfc4910b9fd15beb1ad8f3b8fafa8ea82c023e5e607b66a78e4"}, + {file = "rapidfuzz-3.11.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fe7aaf5a54821d340d21412f7f6e6272a9b17a0cbafc1d68f77f2fc11009dcd5"}, + {file = "rapidfuzz-3.11.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25398d9ac7294e99876a3027ffc52c6bebeb2d702b1895af6ae9c541ee676702"}, + {file = "rapidfuzz-3.11.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a52eea839e4bdc72c5e60a444d26004da00bb5bc6301e99b3dde18212e41465"}, + {file = "rapidfuzz-3.11.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c87319b0ab9d269ab84f6453601fd49b35d9e4a601bbaef43743f26fabf496c"}, + {file = "rapidfuzz-3.11.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3048c6ed29d693fba7d2a7caf165f5e0bb2b9743a0989012a98a47b975355cca"}, + {file = "rapidfuzz-3.11.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b04f29735bad9f06bb731c214f27253bd8bedb248ef9b8a1b4c5bde65b838454"}, + {file = "rapidfuzz-3.11.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7864e80a0d4e23eb6194254a81ee1216abdc53f9dc85b7f4d56668eced022eb8"}, + {file = "rapidfuzz-3.11.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3794df87313dfb56fafd679b962e0613c88a293fd9bd5dd5c2793d66bf06a101"}, + {file = "rapidfuzz-3.11.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d71da0012face6f45432a11bc59af19e62fac5a41f8ce489e80c0add8153c3d1"}, + {file = "rapidfuzz-3.11.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff38378346b7018f42cbc1f6d1d3778e36e16d8595f79a312b31e7c25c50bd08"}, + {file = "rapidfuzz-3.11.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6668321f90aa02a5a789d4e16058f2e4f2692c5230252425c3532a8a62bc3424"}, + {file = "rapidfuzz-3.11.0.tar.gz", hash = "sha256:a53ca4d3f52f00b393fab9b5913c5bafb9afc27d030c8a1db1283da6917a860f"}, ] [package.extras] @@ -2132,29 +2074,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.8.2" +version = "0.8.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.8.2-py3-none-linux_armv6l.whl", hash = "sha256:c49ab4da37e7c457105aadfd2725e24305ff9bc908487a9bf8d548c6dad8bb3d"}, - {file = "ruff-0.8.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ec016beb69ac16be416c435828be702ee694c0d722505f9c1f35e1b9c0cc1bf5"}, - {file = "ruff-0.8.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f05cdf8d050b30e2ba55c9b09330b51f9f97d36d4673213679b965d25a785f3c"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60f578c11feb1d3d257b2fb043ddb47501ab4816e7e221fbb0077f0d5d4e7b6f"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbd5cf9b0ae8f30eebc7b360171bd50f59ab29d39f06a670b3e4501a36ba5897"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b402ddee3d777683de60ff76da801fa7e5e8a71038f57ee53e903afbcefdaa58"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:705832cd7d85605cb7858d8a13d75993c8f3ef1397b0831289109e953d833d29"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32096b41aaf7a5cc095fa45b4167b890e4c8d3fd217603f3634c92a541de7248"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e769083da9439508833cfc7c23e351e1809e67f47c50248250ce1ac52c21fb93"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fe716592ae8a376c2673fdfc1f5c0c193a6d0411f90a496863c99cd9e2ae25d"}, - {file = "ruff-0.8.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:81c148825277e737493242b44c5388a300584d73d5774defa9245aaef55448b0"}, - {file = "ruff-0.8.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d261d7850c8367704874847d95febc698a950bf061c9475d4a8b7689adc4f7fa"}, - {file = "ruff-0.8.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1ca4e3a87496dc07d2427b7dd7ffa88a1e597c28dad65ae6433ecb9f2e4f022f"}, - {file = "ruff-0.8.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:729850feed82ef2440aa27946ab39c18cb4a8889c1128a6d589ffa028ddcfc22"}, - {file = "ruff-0.8.2-py3-none-win32.whl", hash = "sha256:ac42caaa0411d6a7d9594363294416e0e48fc1279e1b0e948391695db2b3d5b1"}, - {file = "ruff-0.8.2-py3-none-win_amd64.whl", hash = "sha256:2aae99ec70abf43372612a838d97bfe77d45146254568d94926e8ed5bbb409ea"}, - {file = "ruff-0.8.2-py3-none-win_arm64.whl", hash = "sha256:fb88e2a506b70cfbc2de6fae6681c4f944f7dd5f2fe87233a7233d888bad73e8"}, - {file = "ruff-0.8.2.tar.gz", hash = "sha256:b84f4f414dda8ac7f75075c1fa0b905ac0ff25361f42e6d5da681a465e0f78e5"}, + {file = "ruff-0.8.6-py3-none-linux_armv6l.whl", hash = "sha256:defed167955d42c68b407e8f2e6f56ba52520e790aba4ca707a9c88619e580e3"}, + {file = "ruff-0.8.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:54799ca3d67ae5e0b7a7ac234baa657a9c1784b48ec954a094da7c206e0365b1"}, + {file = "ruff-0.8.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e88b8f6d901477c41559ba540beeb5a671e14cd29ebd5683903572f4b40a9807"}, + {file = "ruff-0.8.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0509e8da430228236a18a677fcdb0c1f102dd26d5520f71f79b094963322ed25"}, + {file = "ruff-0.8.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a7ddb221779871cf226100e677b5ea38c2d54e9e2c8ed847450ebbdf99b32d"}, + {file = "ruff-0.8.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:248b1fb3f739d01d528cc50b35ee9c4812aa58cc5935998e776bf8ed5b251e75"}, + {file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bc3c083c50390cf69e7e1b5a5a7303898966be973664ec0c4a4acea82c1d4315"}, + {file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52d587092ab8df308635762386f45f4638badb0866355b2b86760f6d3c076188"}, + {file = "ruff-0.8.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61323159cf21bc3897674e5adb27cd9e7700bab6b84de40d7be28c3d46dc67cf"}, + {file = "ruff-0.8.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ae4478b1471fc0c44ed52a6fb787e641a2ac58b1c1f91763bafbc2faddc5117"}, + {file = "ruff-0.8.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0c000a471d519b3e6cfc9c6680025d923b4ca140ce3e4612d1a2ef58e11f11fe"}, + {file = "ruff-0.8.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9257aa841e9e8d9b727423086f0fa9a86b6b420fbf4bf9e1465d1250ce8e4d8d"}, + {file = "ruff-0.8.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45a56f61b24682f6f6709636949ae8cc82ae229d8d773b4c76c09ec83964a95a"}, + {file = "ruff-0.8.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:496dd38a53aa173481a7d8866bcd6451bd934d06976a2505028a50583e001b76"}, + {file = "ruff-0.8.6-py3-none-win32.whl", hash = "sha256:e169ea1b9eae61c99b257dc83b9ee6c76f89042752cb2d83486a7d6e48e8f764"}, + {file = "ruff-0.8.6-py3-none-win_amd64.whl", hash = "sha256:f1d70bef3d16fdc897ee290d7d20da3cbe4e26349f62e8a0274e7a3f4ce7a905"}, + {file = "ruff-0.8.6-py3-none-win_arm64.whl", hash = "sha256:7d7fc2377a04b6e04ffe588caad613d0c460eb2ecba4c0ccbbfe2bc973cbc162"}, + {file = "ruff-0.8.6.tar.gz", hash = "sha256:dcad24b81b62650b0eb8814f576fc65cfee8674772a6e24c9b747911801eeaa5"}, ] [[package]] @@ -2170,23 +2112,23 @@ files = [ [[package]] name = "setuptools" -version = "75.6.0" +version = "75.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" files = [ - {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, - {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, + {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, + {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "shellingham" @@ -2212,13 +2154,13 @@ files = [ [[package]] name = "smmap" -version = "5.0.1" +version = "5.0.2" description = "A pure Python implementation of a sliding window memory map manager" optional = false python-versions = ">=3.7" files = [ - {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, - {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, + {file = "smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e"}, + {file = "smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5"}, ] [[package]] @@ -2393,13 +2335,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.3" +version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] @@ -2439,4 +2381,4 @@ plot = ["matplotlib", "plotly"] [metadata] lock-version = "2.0" python-versions = "^3.10,<3.14" -content-hash = "dfcad61fc8293aca3e816605d366322aeaafa217eb642caac763a58a20fc763c" +content-hash = "0d32014272186ae5af34ad2bff2e4af15e5ba487b813996a4a07d72ce329c8b2" diff --git a/pyproject.toml b/pyproject.toml index ec89ee12..3fff231e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,7 @@ pydantic = "^2.5.3" pandas = "^2.2.0" plotly = {version = "^5.18.0", optional = true} matplotlib = {version = "^3.8.2", optional = true} -typing_extensions = { version = "^4.11.0", python = "<3.10" } +typing_extensions = { version = "^4.11.0", python = "<3.11" } toml = "^0.10.2" click = "^8.1.7" gitpython = "^3.1.43" diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 786c8ec0..ac8e09cc 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -22,7 +22,7 @@ class AlertBase(SimvueObject): def new(cls, **kwargs): pass - def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + def __init__(self, identifier: str | None = None, **kwargs) -> None: """Retrieve an alert from the Simvue server by identifier""" self._label = "alert" super().__init__(identifier, **kwargs) diff --git a/simvue/executor.py b/simvue/executor.py index 090b4d79..5a363504 100644 --- a/simvue/executor.py +++ b/simvue/executor.py @@ -35,13 +35,13 @@ def __call__(self, *, status_code: int, std_out: str, std_err: str) -> None: ... def _execute_process( proc_id: str, - command: typing.List[str], + command: list[str], runner_name: str, - completion_callback: typing.Optional[CompletionCallback] = None, - completion_trigger: typing.Optional[multiprocessing.synchronize.Event] = None, - environment: typing.Optional[typing.Dict[str, str]] = None, - cwd: typing.Optional[pathlib.Path] = None, -) -> tuple[subprocess.Popen, typing.Optional[threading.Thread]]: + completion_callback: CompletionCallback | None = None, + completion_trigger: multiprocessing.synchronize.Event | None = None, + environment: dict[str, str] | None = None, + cwd: pathlib.Path | None = None, +) -> tuple[subprocess.Popen, threading.Thread | None]: thread_out = None with open(f"{runner_name}_{proc_id}.err", "w") as err: @@ -58,8 +58,8 @@ def _execute_process( if completion_callback or completion_trigger: def trigger_check( - completion_callback: typing.Optional[CompletionCallback], - trigger_to_set: typing.Optional[multiprocessing.synchronize.Event], + completion_callback: CompletionCallback | None, + trigger_to_set: multiprocessing.synchronize.Event | None, process: subprocess.Popen, ) -> None: while process.poll() is None: @@ -67,12 +67,8 @@ def trigger_check( if trigger_to_set: trigger_to_set.set() if completion_callback: - with open(f"{runner_name}_{proc_id}.err") as err: - std_err = err.read() - - with open(f"{runner_name}_{proc_id}.out") as out: - std_out = out.read() - + std_err = pathlib.Path(f"{runner_name}_{proc_id}.err").read_text() + std_out = pathlib.Path(f"{runner_name}_{proc_id}.out").read_text() completion_callback( status_code=process.returncode, std_out=std_out, @@ -233,15 +229,15 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: arg = arg.replace("_", "-") if len(arg) == 1: - if isinstance(value, bool) and value: - command += [f"-{arg}"] - else: - command += [f"-{arg}", f"{value}"] + command += ( + [f"-{arg}"] + if isinstance(value, bool) and value + else [f"-{arg}", f"{value}"] + ) + elif isinstance(value, bool) and value: + command += [f"--{arg}"] else: - if isinstance(value, bool) and value: - command += [f"--{arg}"] - else: - command += [f"--{arg}", f"{value}"] + command += [f"--{arg}", f"{value}"] command += pos_args @@ -296,11 +292,9 @@ def success(self) -> int: @property def exit_status(self) -> int: """Returns the first non-zero exit status if applicable""" - _non_zero = [ + if _non_zero := [ i.returncode for i in self._processes.values() if i.returncode != 0 - ] - - if _non_zero: + ]: return _non_zero[0] return 0 @@ -344,6 +338,9 @@ def _get_error_status(self, process_id: str) -> typing.Optional[str]: def _update_alerts(self) -> None: """Send log events for the result of each process""" + # Wait for the dispatcher to send the latest information before + # allowing the executor to finish (and as such the run instance to exit) + _wait_limit: float = 1 for proc_id, process in self._processes.items(): if process.returncode != 0: # If the process fails then purge the dispatcher event queue @@ -355,9 +352,6 @@ def _update_alerts(self) -> None: else: self._runner.log_alert(self._alert_ids[proc_id], "ok") - # Wait for the dispatcher to send the latest information before - # allowing the executor to finish (and as such the run instance to exit) - _wait_limit: float = 1 _current_time: float = 0 while ( self._runner._dispatcher diff --git a/simvue/metrics.py b/simvue/metrics.py index 4417e39d..fc345f56 100644 --- a/simvue/metrics.py +++ b/simvue/metrics.py @@ -1,5 +1,14 @@ +""" +CPU/GPU Metrics +=============== + +Get information relating to the usage of the CPU and GPU (where applicable) + +""" + import contextlib import logging +import psutil from .pynvml import ( nvmlDeviceGetComputeRunningProcesses, @@ -15,11 +24,11 @@ logger = logging.getLogger(__name__) -def get_process_memory(processes): +def get_process_memory(processes: list[psutil.Process]) -> int: """ Get the resident set size """ - rss = 0 + rss: int = 0 for process in processes: with contextlib.suppress(Exception): rss += process.memory_info().rss / 1024 / 1024 @@ -27,11 +36,11 @@ def get_process_memory(processes): return rss -def get_process_cpu(processes): +def get_process_cpu(processes: list[psutil.Process]) -> int: """ Get the CPU usage """ - cpu_percent = 0 + cpu_percent: int = 0 for process in processes: with contextlib.suppress(Exception): cpu_percent += process.cpu_percent() @@ -39,27 +48,24 @@ def get_process_cpu(processes): return cpu_percent -def is_gpu_used(handle, processes): +def is_gpu_used(handle, processes: list[psutil.Process]) -> bool: """ Check if the GPU is being used by the list of processes """ pids = [process.pid for process in processes] - gpu_pids = [] - for process in nvmlDeviceGetComputeRunningProcesses(handle): - gpu_pids.append(process.pid) - - for process in nvmlDeviceGetGraphicsRunningProcesses(handle): - gpu_pids.append(process.pid) - + gpu_pids = [process.pid for process in nvmlDeviceGetComputeRunningProcesses(handle)] + gpu_pids.extend( + process.pid for process in nvmlDeviceGetGraphicsRunningProcesses(handle) + ) return len(list(set(gpu_pids) & set(pids))) > 0 -def get_gpu_metrics(processes): +def get_gpu_metrics(processes: list[psutil.Process]) -> dict[str, float]: """ Get GPU metrics """ - gpu_metrics = {} + gpu_metrics: dict[str, float] = {} with contextlib.suppress(Exception): nvmlInit() diff --git a/simvue/serialization.py b/simvue/serialization.py index 5433cba8..52538791 100644 --- a/simvue/serialization.py +++ b/simvue/serialization.py @@ -117,7 +117,7 @@ def _serialize_matplotlib(data: typing.Any) -> typing.Optional[tuple[str, str]]: @check_extra("plot") -def _serialize_matplotlib_figure(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_matplotlib_figure(data: typing.Any) -> tuple[str, str] | None: try: import plotly except ImportError: @@ -131,7 +131,7 @@ def _serialize_matplotlib_figure(data: typing.Any) -> typing.Optional[tuple[str, return data, mimetype -def _serialize_numpy_array(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_numpy_array(data: typing.Any) -> tuple[str, str] | None: mimetype = "application/vnd.simvue.numpy.v1" mfile = BytesIO() numpy.save(mfile, data, allow_pickle=False) @@ -140,7 +140,7 @@ def _serialize_numpy_array(data: typing.Any) -> typing.Optional[tuple[str, str]] return data, mimetype -def _serialize_dataframe(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_dataframe(data: typing.Any) -> tuple[str, str] | None: mimetype = "application/vnd.simvue.df.v1" mfile = BytesIO() data.to_csv(mfile) @@ -150,7 +150,7 @@ def _serialize_dataframe(data: typing.Any) -> typing.Optional[tuple[str, str]]: @check_extra("torch") -def _serialize_torch_tensor(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_torch_tensor(data: typing.Any) -> tuple[str, str] | None: try: import torch except ImportError: @@ -165,7 +165,7 @@ def _serialize_torch_tensor(data: typing.Any) -> typing.Optional[tuple[str, str] return data, mimetype -def _serialize_json(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_json(data: typing.Any) -> tuple[str, str] | None: mimetype = "application/json" try: mfile = BytesIO() @@ -177,7 +177,7 @@ def _serialize_json(data: typing.Any) -> typing.Optional[tuple[str, str]]: return data, mimetype -def _serialize_pickle(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_pickle(data: typing.Any) -> tuple[str, str] | None: mimetype = "application/octet-stream" data = pickle.dumps(data) return data, mimetype @@ -191,8 +191,6 @@ def deserialize_data( """ if mimetype == "application/vnd.plotly.v1+json": return _deserialize_plotly_figure(data) - elif mimetype == "application/vnd.plotly.v1+json": - return _deserialize_matplotlib_figure(data) elif mimetype == "application/vnd.simvue.numpy.v1": return _deserialize_numpy_array(data) elif mimetype == "application/vnd.simvue.df.v1": @@ -226,7 +224,7 @@ def _deserialize_matplotlib_figure(data: "Buffer") -> typing.Optional["Figure"]: return data -def _deserialize_numpy_array(data: "Buffer") -> typing.Optional[typing.Any]: +def _deserialize_numpy_array(data: "Buffer") -> typing.Any | None: mfile = BytesIO(data) mfile.seek(0) data = numpy.load(mfile, allow_pickle=False) From e515fb474bdd2ce335d1ad41bb1f7d76af33a15e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 20 Jan 2025 11:01:13 +0000 Subject: [PATCH 075/163] Remove constraints on tenacity and psutil Fix Self in base API class --- poetry.lock | 2 +- pyproject.toml | 4 ++-- simvue/api/objects/base.py | 7 ++++++- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9a07cc5f..ba7675c2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2381,4 +2381,4 @@ plot = ["matplotlib", "plotly"] [metadata] lock-version = "2.0" python-versions = "^3.10,<3.14" -content-hash = "0d32014272186ae5af34ad2bff2e4af15e5ba487b813996a4a07d72ce329c8b2" +content-hash = "95d0c217aeeaf488ee7d148435b002a29c43542130388ecdaa8a08e8d4607d1c" diff --git a/pyproject.toml b/pyproject.toml index 3fff231e..612d0a4d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,9 +39,7 @@ python = "^3.10,<3.14" dill = "^0.3.7" requests = "^2.31.0" msgpack = "^1.0.7" -tenacity = ">=8.2.3,<10.0.0" PyJWT = "^2.8.0" -psutil = ">=5.9.8,<7.0.0" pydantic = "^2.5.3" pandas = "^2.2.0" plotly = {version = "^5.18.0", optional = true} @@ -58,6 +56,8 @@ numpy = "^2.1.2" flatdict = "^4.0.1" semver = "^3.0.2" email-validator = "^2.2.0" +psutil = "^6.1.1" +tenacity = "^9.0.0" [tool.poetry.extras] plot = ["matplotlib", "plotly"] diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index c51b5e7e..e6776c0d 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -31,6 +31,11 @@ logging.basicConfig(level=logging.INFO) +try: + from typing import Self +except ImportError: + from typing_extensions import Self + def staging_check(member_func: typing.Callable) -> typing.Callable: """Decorator for checking if requested attribute has uncommitted changes""" @@ -290,7 +295,7 @@ def get( count: pydantic.PositiveInt | None = None, offset: pydantic.PositiveInt | None = None, **kwargs, - ) -> typing.Generator[tuple[str, typing.Optional["SimvueObject"]], None, None]: + ) -> typing.Generator[tuple[str, Self | None], None, None]: _class_instance = cls(_read_only=True, _local=True) if (_data := cls._get_all_objects(count, offset, **kwargs).get("data")) is None: raise RuntimeError( From 4ef1a19e3057cb4486f4cc9039813e327cc80115 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 20 Jan 2025 12:57:39 +0000 Subject: [PATCH 076/163] More typing adjustments Added test for metrics object and spans property --- simvue/api/objects/base.py | 2 +- simvue/api/objects/events.py | 5 ++ simvue/api/objects/metrics.py | 20 ++++++ simvue/client.py | 4 +- simvue/executor.py | 16 ++--- simvue/run.py | 125 ++++++++++++++++------------------ simvue/serialization.py | 4 +- 7 files changed, 98 insertions(+), 78 deletions(-) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index e6776c0d..ffddd1b3 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -293,7 +293,7 @@ def ids( def get( cls, count: pydantic.PositiveInt | None = None, - offset: pydantic.PositiveInt | None = None, + offset: pydantic.NonNegativeInt | None = None, **kwargs, ) -> typing.Generator[tuple[str, Self | None], None, None]: _class_instance = cls(_read_only=True, _local=True) diff --git a/simvue/api/objects/events.py b/simvue/api/objects/events.py index 156521c2..d1d6fab7 100644 --- a/simvue/api/objects/events.py +++ b/simvue/api/objects/events.py @@ -109,3 +109,8 @@ def histogram( response=_response, ) return _json_response.get("data") + + def delete( + self, _linked_objects: list[str] | None = None, **kwargs + ) -> dict[str, typing.Any]: + raise NotImplementedError("Cannot delete event set") diff --git a/simvue/api/objects/metrics.py b/simvue/api/objects/metrics.py index b4ae1823..24ba286c 100644 --- a/simvue/api/objects/metrics.py +++ b/simvue/api/objects/metrics.py @@ -7,6 +7,7 @@ """ +import http import typing import json @@ -15,6 +16,7 @@ from .base import SimvueObject from simvue.models import MetricSet +from simvue.api.request import get as sv_get, get_json_from_response __all__ = ["Metrics"] @@ -72,5 +74,23 @@ def get( for _entry in _data: yield MetricSet(**_entry) + @pydantic.validate_call + def span(self, run_ids: list[str]) -> dict[str, int | float]: + """Returns the metrics span for the given runs""" + _url = self._base_url / "span" + _response = sv_get( + url=f"{_url}", headers=self._headers, data={"runs": json.dumps(run_ids)} + ) + return get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario="Retrieving metric spans", + ) + def _post(self, **kwargs) -> dict[str, typing.Any]: return super()._post(is_json=False, **kwargs) + + def delete( + self, _linked_objects: list[str] | None = None, **kwargs + ) -> dict[str, typing.Any]: + raise NotImplementedError("Cannot delete metric set") diff --git a/simvue/client.py b/simvue/client.py index ad1e13ca..12241728 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -184,8 +184,8 @@ def get_runs( alerts: bool = False, metadata: bool = False, output_format: typing.Literal["dict", "objects", "dataframe"] = "objects", - count_limit: typing.Optional[pydantic.PositiveInt] = 100, - start_index: typing.Optional[pydantic.PositiveInt] = 0, + count_limit: pydantic.PositiveInt | None = 100, + start_index: pydantic.NonNegativeInt = 0, show_shared: bool = False, ) -> typing.Union[DataFrame, typing.Generator[tuple[str, Run], None, None], None]: """Retrieve all runs matching filters. diff --git a/simvue/executor.py b/simvue/executor.py index 5a363504..6d411e4f 100644 --- a/simvue/executor.py +++ b/simvue/executor.py @@ -132,13 +132,13 @@ def add_process( self, identifier: str, *args, - executable: typing.Optional[str] = None, - script: typing.Optional[pathlib.Path] = None, - input_file: typing.Optional[pathlib.Path] = None, - env: typing.Optional[typing.Dict[str, str]] = None, - cwd: typing.Optional[pathlib.Path] = None, - completion_callback: typing.Optional[CompletionCallback] = None, - completion_trigger: typing.Optional[multiprocessing.synchronize.Event] = None, + executable: str | None = None, + script: pathlib.Path | None = None, + input_file: pathlib.Path | None = None, + env: dict[str, str] | None = None, + cwd: pathlib.Path | None = None, + completion_callback: CompletionCallback | None = None, + completion_trigger: multiprocessing.synchronize.Event | None = None, **kwargs, ) -> None: """Add a process to be executed to the executor. @@ -183,7 +183,7 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: input_file : str | None, optional the input file to run, note this only work if the input file is not an option, if this is the case you should provide it as such and perform the upload manually, by default None - env : typing.Dict[str, str], optional + env : dict[str, str], optional environment variables for process cwd: typing.Optional[pathlib.Path], optional working directory to execute the process within diff --git a/simvue/run.py b/simvue/run.py index 65a477bc..f257648a 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -113,9 +113,9 @@ class Run: def __init__( self, mode: typing.Literal["online", "offline", "disabled"] = "online", - abort_callback: typing.Optional[typing.Callable[[Self], None]] = None, - server_token: typing.Optional[pydantic.SecretStr] = None, - server_url: typing.Optional[str] = None, + abort_callback: typing.Callable[[Self], None] | None = None, + server_token: pydantic.SecretStr | None = None, + server_url: str | None = None, debug: bool = False, ) -> None: """Initialise a new Simvue run @@ -139,32 +139,31 @@ def __init__( run in debug mode, default is False """ self._uuid: str = f"{uuid.uuid4()}" - self._name: typing.Optional[str] = None + self._name: str | None = None # monitor duration with respect to retention period self._timer: float = 0 - self._retention: typing.Optional[float] = None + self._retention: float | None = None self._testing: bool = False self._abort_on_alert: typing.Literal["run", "terminate", "ignore"] = "terminate" - self._abort_callback: typing.Optional[typing.Callable[[Self], None]] = ( - abort_callback - ) + self._abort_callback: typing.Callable[[Self], None] | None = abort_callback self._dispatch_mode: typing.Literal["direct", "queued"] = "queued" self._executor = Executor(self) - self._dispatcher: typing.Optional[DispatcherBaseClass] = None + self._dispatcher: DispatcherBaseClass | None = None - self._id: typing.Optional[str] = None + self._id: str | None = None self._folder: Folder | None = None self._term_color: bool = True self._suppress_errors: bool = False self._queue_blocking: bool = False - self._status: typing.Optional[ + self._status: ( typing.Literal[ "created", "running", "completed", "failed", "terminated", "lost" ] - ] = None + | None + ) = None self._data: dict[str, typing.Any] = {} self._step: int = 0 self._active: bool = False @@ -180,7 +179,7 @@ def __init__( ) self._aborted: bool = False - self._resources_metrics_interval: typing.Optional[int] = ( + self._resources_metrics_interval: int | None = ( HEARTBEAT_INTERVAL if self._user_config.metrics.resources_metrics_interval < 1 else self._user_config.metrics.resources_metrics_interval @@ -188,13 +187,13 @@ def __init__( self._headers: dict[str, str] = { "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}" } - self._sv_obj: typing.Optional[RunObject] = None - self._pid: typing.Optional[int] = 0 - self._shutdown_event: typing.Optional[threading.Event] = None + self._sv_obj: RunObject | None = None + self._pid: int | None = 0 + self._shutdown_event: threading.Event | None = None self._configuration_lock = threading.Lock() - self._heartbeat_termination_trigger: typing.Optional[threading.Event] = None - self._storage_id: typing.Optional[str] = None - self._heartbeat_thread: typing.Optional[threading.Thread] = None + self._heartbeat_termination_trigger: threading.Event | None = None + self._storage_id: str | None = None + self._heartbeat_thread: threading.Thread | None = None self._heartbeat_interval: int = HEARTBEAT_INTERVAL self._emission_metrics_interval: int | None = ( @@ -205,7 +204,7 @@ def __init__( ) else self._user_config.metrics.emission_metrics_interval ) - self._emissions_tracker: typing.Optional[SimvueEmissionsTracker] = ( + self._emissions_tracker: SimvueEmissionsTracker | None = ( SimvueEmissionsTracker("simvue", self, self._emission_metrics_interval) if self._user_config.metrics.enable_emission_metrics else None @@ -216,15 +215,13 @@ def __enter__(self) -> Self: def _handle_exception_throw( self, - exc_type: typing.Optional[typing.Type[BaseException]], + exc_type: typing.Type[BaseException] | None, value: BaseException, traceback: typing.Optional[ typing.Union[typing.Type[BaseException], BaseException] ], ) -> None: - _exception_thrown: typing.Optional[str] = ( - exc_type.__name__ if exc_type else None - ) + _exception_thrown: str | None = exc_type.__name__ if exc_type else None _is_running: bool = self._status == "running" _is_running_online: bool = self._id is not None and _is_running _is_running_offline: bool = ( @@ -269,7 +266,7 @@ def _handle_exception_throw( def __exit__( self, - exc_type: typing.Optional[typing.Type[BaseException]], + exc_type: typing.Type[BaseException] | None, value: BaseException, traceback: typing.Optional[ typing.Union[typing.Type[BaseException], BaseException] @@ -566,14 +563,14 @@ def init( name: typing.Annotated[str | None, pydantic.Field(pattern=NAME_REGEX)] = None, *, metadata: dict[str, typing.Any] = None, - tags: typing.Optional[list[str]] = None, - description: typing.Optional[str] = None, + tags: list[str] | None = None, + description: str | None = None, folder: typing.Annotated[ str, pydantic.Field(None, pattern=FOLDER_REGEX) ] = None, running: bool = True, - retention_period: typing.Optional[str] = None, - timeout: typing.Optional[int] = 180, + retention_period: str | None = None, + timeout: int | None = 180, visibility: typing.Union[ typing.Literal["public", "tenant"], list[str], None ] = None, @@ -663,7 +660,7 @@ def init( # Parse the time to live/retention time if specified try: if retention_period: - self._retention: typing.Optional[int] = int( + self._retention: int | None = int( humanfriendly.parse_timespan(retention_period) ) else: @@ -739,15 +736,15 @@ def add_process( self, identifier: str, *cmd_args, - executable: typing.Optional[typing.Union[str, pathlib.Path]] = None, - script: typing.Optional[pydantic.FilePath] = None, - input_file: typing.Optional[pydantic.FilePath] = None, + executable: typing.Union[str, pathlib.Path] | None = None, + script: pydantic.FilePath | None = None, + input_file: pydantic.FilePath | None = None, completion_callback: typing.Optional[ typing.Callable[[int, str, str], None] ] = None, - completion_trigger: typing.Optional[multiprocessing.synchronize.Event] = None, - env: typing.Optional[typing.Dict[str, str]] = None, - cwd: typing.Optional[pathlib.Path] = None, + completion_trigger: multiprocessing.synchronize.Event | None = None, + env: typing.Dict[str, str] | None = None, + cwd: pathlib.Path | None = None, **cmd_kwargs, ) -> None: """Add a process to be executed to the executor. @@ -803,7 +800,7 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: this trigger event is set when the processes completes env : typing.Dict[str, str], optional environment variables for process - cwd: typing.Optional[pathlib.Path], optional + cwd: pathlib.Path | None, optional working directory to execute the process within. Note that executable, input and script file paths should be absolute or relative to the directory where this method is called, not relative to the new working directory. **kwargs : Any, ..., optional @@ -820,7 +817,7 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: cmd_list: typing.List[str] = [] pos_args = list(cmd_args) - executable_str: typing.Optional[str] = None + executable_str: str | None = None # Assemble the command for saving to metadata as string if executable: @@ -894,7 +891,7 @@ def executor(self) -> Executor: return self._executor @property - def name(self) -> typing.Optional[str]: + def name(self) -> str | None: """Return the name of the run""" return self._name @@ -904,7 +901,7 @@ def uid(self) -> str: return self._uuid @property - def id(self) -> typing.Optional[str]: + def id(self) -> str | None: """Return the unique id of the run""" return self._id @@ -948,13 +945,13 @@ def set_pid(self, pid: int) -> None: def config( self, *, - suppress_errors: typing.Optional[bool] = None, - queue_blocking: typing.Optional[bool] = None, - resources_metrics_interval: typing.Optional[pydantic.PositiveInt] = None, - emission_metrics_interval: typing.Optional[pydantic.PositiveInt] = None, - enable_emission_metrics: typing.Optional[bool] = None, - disable_resources_metrics: typing.Optional[bool] = None, - storage_id: typing.Optional[str] = None, + suppress_errors: bool | None = None, + queue_blocking: bool | None = None, + resources_metrics_interval: pydantic.PositiveInt | None = None, + emission_metrics_interval: pydantic.PositiveInt | None = None, + enable_emission_metrics: bool | None = None, + disable_resources_metrics: bool | None = None, + storage_id: str | None = None, abort_on_alert: typing.Optional[ typing.Union[typing.Literal["run", "all", "ignore"], bool] ] = None, @@ -1176,9 +1173,9 @@ def log_event(self, message: str, timestamp: str | None = None) -> bool: def _add_metrics_to_dispatch( self, metrics: dict[str, typing.Union[int, float]], - step: typing.Optional[int] = None, - time: typing.Optional[float] = None, - timestamp: typing.Optional[str] = None, + step: int | None = None, + time: float | None = None, + timestamp: str | None = None, join_on_fail: bool = True, ) -> bool: if self._user_config.run.mode == "disabled": @@ -1222,9 +1219,9 @@ def _add_metrics_to_dispatch( def log_metrics( self, metrics: dict[MetricKeyString, typing.Union[int, float]], - step: typing.Optional[int] = None, - time: typing.Optional[float] = None, - timestamp: typing.Optional[str] = None, + step: int | None = None, + time: float | None = None, + timestamp: str | None = None, ) -> bool: """Log metrics to Simvue server @@ -1311,7 +1308,7 @@ def save_file( self, file_path: pydantic.FilePath, category: typing.Literal["input", "output", "code"], - filetype: typing.Optional[str] = None, + filetype: str | None = None, preserve_path: bool = False, name: typing.Optional[ typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] @@ -1379,7 +1376,7 @@ def save_directory( self, directory: pydantic.DirectoryPath, category: typing.Literal["output", "input", "code"], - filetype: typing.Optional[str] = None, + filetype: str | None = None, preserve_path: bool = False, ) -> bool: """Upload files from a whole directory @@ -1425,7 +1422,7 @@ def save_all( self, items: list[typing.Union[pydantic.FilePath, pydantic.DirectoryPath]], category: typing.Literal["input", "output", "code"], - filetype: typing.Optional[str] = None, + filetype: str | None = None, preserve_path: bool = False, ) -> bool: """Save a set of files and directories @@ -1516,9 +1513,7 @@ def _tidy_run(self) -> None: self._dispatcher.join() if _non_zero := self.executor.exit_status: - _error_msgs: dict[str, typing.Optional[str]] = ( - self.executor.get_error_summary() - ) + _error_msgs: dict[str, str] | None = self.executor.get_error_summary() _error_msg = "\n".join( f"{identifier}:\n{msg}" for identifier, msg in _error_msgs.items() ) @@ -1560,9 +1555,9 @@ def close(self) -> bool: @pydantic.validate_call def set_folder_details( self, - metadata: typing.Optional[dict[str, typing.Union[int, str, float]]] = None, - tags: typing.Optional[list[str]] = None, - description: typing.Optional[str] = None, + metadata: dict[str, typing.Union[int, str, float]] | None = None, + tags: list[str] | None = None, + description: str | None = None, ) -> bool: """Add metadata to the specified folder @@ -1609,8 +1604,8 @@ def set_folder_details( @pydantic.validate_call def add_alerts( self, - ids: typing.Optional[list[str]] = None, - names: typing.Optional[list[str]] = None, + ids: list[str] | None = None, + names: list[str] | None = None, ) -> bool: """Add a set of existing alerts to this run by name or id @@ -1657,7 +1652,7 @@ def add_alerts( def _attach_alert_to_run(self, alert: AlertBase) -> str | None: # Check if the alert already exists - _alert_id: typing.Optional[str] = None + _alert_id: str | None = None for _, _existing_alert in Alert.get(): if _existing_alert.compare(alert): diff --git a/simvue/serialization.py b/simvue/serialization.py index 52538791..3079d0ae 100644 --- a/simvue/serialization.py +++ b/simvue/serialization.py @@ -250,11 +250,11 @@ def _deserialize_torch_tensor(data: "Buffer") -> typing.Optional["Tensor"]: return torch.load(mfile) -def _deserialize_pickle(data) -> typing.Optional[typing.Any]: +def _deserialize_pickle(data) -> typing.Any | None: data = pickle.loads(data) return data -def _deserialize_json(data) -> typing.Optional[typing.Any]: +def _deserialize_json(data) -> typing.Any | None: data = json.loads(data) return data From 6c73d70c2118e8bd665595978dd5f03d4a857f2d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 20 Jan 2025 12:58:19 +0000 Subject: [PATCH 077/163] More typing adjustments Added test for metrics object and spans property --- tests/unit/test_metrics.py | 42 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 tests/unit/test_metrics.py diff --git a/tests/unit/test_metrics.py b/tests/unit/test_metrics.py new file mode 100644 index 00000000..c335bfbb --- /dev/null +++ b/tests/unit/test_metrics.py @@ -0,0 +1,42 @@ +import contextlib +import json +import pytest +import time +import datetime +import uuid + +from simvue.api.objects import Metrics, Folder, Run + +@pytest.mark.api +@pytest.mark.online +def test_metrics_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(folder=_folder_name) + _values = { + "x": 1, + "y": 2.0, + "z": True + } + _time: int = 1 + _step: int = 1 + _metrics = Metrics.new( + run_id=_run.id, + metrics=[ + { + "timestamp": datetime.datetime.now(datetime.UTC).strftime( + "%Y-%m-%dT%H:%M:%S.%f" + ), + "time": _time, + "step": _step, + "values": _values, + } + ], + ) + _folder.commit() + _run.commit() + _metrics.commit() + assert _metrics.get(metrics=["x", "y", "z"], xaxis="step") + _run.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) From bdc55e5f8c46a229f3f84b2e349dc05c8d1f6b0a Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 20 Jan 2025 13:38:01 +0000 Subject: [PATCH 078/163] Added descriptions to range alert tests to pass validation --- tests/unit/test_metric_range_alert.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_metric_range_alert.py b/tests/unit/test_metric_range_alert.py index 721b78d3..be9ce9e7 100644 --- a/tests/unit/test_metric_range_alert.py +++ b/tests/unit/test_metric_range_alert.py @@ -12,6 +12,7 @@ def test_metric_range_alert_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsRangeAlert.new( name=f"metrics_range_alert_{_uuid}", + description = "Test metric range alerts", frequency=1, notification="none", metric="x", @@ -20,7 +21,6 @@ def test_metric_range_alert_creation_online() -> None: window=1, aggregation="average", rule="is inside range", - description="a metric range alert" ) _alert.commit() assert _alert.source == "metrics" @@ -36,6 +36,7 @@ def test_metric_range_alert_creation_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsRangeAlert.new( name=f"metrics_range_alert_{_uuid}", + description = "Test metric range alerts", frequency=1, notification="none", metric="x", @@ -65,6 +66,7 @@ def test_metric_range_alert_modification_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsRangeAlert.new( name=f"metrics_range_alert_{_uuid}", + description = "Test metric range alerts", frequency=1, notification="none", metric="x", @@ -93,6 +95,7 @@ def test_metric_range_alert_modification_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsRangeAlert.new( name=f"metrics_range_alert_{_uuid}", + description = "Test metric range alerts", frequency=1, notification="none", metric="x", @@ -119,6 +122,7 @@ def test_metric_range_alert_properties() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsRangeAlert.new( name=f"metrics_range_alert_{_uuid}", + description = "Test metric range alerts", frequency=1, notification="none", metric="x", From ec5068ea8868160cd00e14da9c0d065773b8c34c Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 20 Jan 2025 13:44:51 +0000 Subject: [PATCH 079/163] Added description to threshold alert tests --- tests/unit/test_metric_threshold_alert.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/unit/test_metric_threshold_alert.py b/tests/unit/test_metric_threshold_alert.py index d0c5ff60..e3009165 100644 --- a/tests/unit/test_metric_threshold_alert.py +++ b/tests/unit/test_metric_threshold_alert.py @@ -64,6 +64,7 @@ def test_metric_threshold_alert_modification_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsThresholdAlert.new( name=f"metrics_threshold_alert_{_uuid}", + description="a metric threshold alert", frequency=1, notification="none", threshold=10, @@ -115,6 +116,7 @@ def test_metric_range_alert_properties() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _alert = MetricsThresholdAlert.new( name=f"metrics_threshold_alert_{_uuid}", + description="a metric threshold alert", frequency=1, notification="none", metric="x", From 2d32b1df9f5d9c36adfd2a9da6ed153315802c32 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 20 Jan 2025 16:32:05 +0000 Subject: [PATCH 080/163] Added metric names endpoint --- simvue/api/objects/metrics.py | 13 +++++++++++++ tests/functional/test_dispatch.py | 2 +- tests/unit/test_metrics.py | 3 +++ 3 files changed, 17 insertions(+), 1 deletion(-) diff --git a/simvue/api/objects/metrics.py b/simvue/api/objects/metrics.py index 24ba286c..bdb5d2ab 100644 --- a/simvue/api/objects/metrics.py +++ b/simvue/api/objects/metrics.py @@ -87,6 +87,19 @@ def span(self, run_ids: list[str]) -> dict[str, int | float]: scenario="Retrieving metric spans", ) + @pydantic.validate_call + def names(self, run_ids: list[str]) -> dict[str, int | float]: + """Returns the metric names for the given runs""" + _url = self._base_url / "names" + _response = sv_get( + url=f"{_url}", headers=self._headers, params={"runs": json.dumps(run_ids)} + ) + return get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario="Retrieving metric names", + ) + def _post(self, **kwargs) -> dict[str, typing.Any]: return super()._post(is_json=False, **kwargs) diff --git a/tests/functional/test_dispatch.py b/tests/functional/test_dispatch.py index 02e0d99f..de77ccb2 100644 --- a/tests/functional/test_dispatch.py +++ b/tests/functional/test_dispatch.py @@ -111,7 +111,7 @@ def _main(res_queue, index, dispatch_callback=create_callback, term_event=event, dispatcher.add_item({string.ascii_uppercase[i % 26]: i}, var, False) except(RuntimeError): res_queue.put("AARGHGHGHGHAHSHGHSDHFSEDHSE") - + time.sleep(0.1) while not dispatcher.empty: diff --git a/tests/unit/test_metrics.py b/tests/unit/test_metrics.py index c335bfbb..2048b330 100644 --- a/tests/unit/test_metrics.py +++ b/tests/unit/test_metrics.py @@ -38,5 +38,8 @@ def test_metrics_creation_online() -> None: _run.commit() _metrics.commit() assert _metrics.get(metrics=["x", "y", "z"], xaxis="step") + assert _metrics.span(runs=[_run.id]) + assert _metrics.names(runs=[_run.id]) _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) + From 9f923e4559505beb20c658cbc849a3087e435bbd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 20 Jan 2025 16:33:42 +0000 Subject: [PATCH 081/163] Fix wrong parameter for metrics object functions --- tests/unit/test_metrics.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_metrics.py b/tests/unit/test_metrics.py index 2048b330..0b49ef8e 100644 --- a/tests/unit/test_metrics.py +++ b/tests/unit/test_metrics.py @@ -38,8 +38,8 @@ def test_metrics_creation_online() -> None: _run.commit() _metrics.commit() assert _metrics.get(metrics=["x", "y", "z"], xaxis="step") - assert _metrics.span(runs=[_run.id]) - assert _metrics.names(runs=[_run.id]) + assert _metrics.span(run_ids=[_run.id]) + assert _metrics.names(run_ids=[_run.id]) _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) From 219d2772631c849b21f7e36eebca3c8af3ca2728 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 20 Jan 2025 16:45:47 +0000 Subject: [PATCH 082/163] Added stat file header --- simvue/api/objects/stats.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/simvue/api/objects/stats.py b/simvue/api/objects/stats.py index dcb10f7d..d016cfeb 100644 --- a/simvue/api/objects/stats.py +++ b/simvue/api/objects/stats.py @@ -1,3 +1,11 @@ +""" +Simvue Stats +============ + +Statistics accessible to the current user. + +""" + import typing from .base import SimvueObject From 58fc7215f4b272acd4e3200678dcb2cdac69b5a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 20 Jan 2025 17:04:32 +0000 Subject: [PATCH 083/163] Fix CI using env key --- .../workflows/test_client_macos_nightlies.yml | 15 +++++++++------ .github/workflows/test_client_ubuntu.yml | 18 +++++++++++------- .../workflows/test_client_ubuntu_nightlies.yml | 18 +++++++++++------- .../test_client_windows_nightlies.yml | 16 +++++++++------- .github/workflows/test_multiple_python.yml | 17 +++++++++++------ CHANGELOG.md | 4 ++++ simvue/utilities.py | 7 +------ 7 files changed, 56 insertions(+), 39 deletions(-) diff --git a/.github/workflows/test_client_macos_nightlies.yml b/.github/workflows/test_client_macos_nightlies.yml index 0c5e6b09..6d387598 100644 --- a/.github/workflows/test_client_macos_nightlies.yml +++ b/.github/workflows/test_client_macos_nightlies.yml @@ -26,10 +26,13 @@ jobs: run: | rm poetry.lock python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev requirements.txt --all-extras + python -m pip install torch + python -m pip install -r requirements.txt + python -m pip install . - name: Test with pytest - run: | - export SIMVUE_URL=${{ secrets.SIMVUE_NIGHTLY_URL }} - export SIMVUE_TOKEN=${{ secrets.SIMVUE_NIGHTLY_TOKEN }} - poetry install --all-extras - # poetry run python -m pip install torch #FIXME: Torch tests skipped - poetry run pytest tests/ -m 'not scenario' + env: + SIMVUE_URL: ${{ secrets.SIMVUE_NIGHTLY_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_NIGHTLY_TOKEN }} + run: python -m pytest tests/ -m 'not scenario' diff --git a/.github/workflows/test_client_ubuntu.yml b/.github/workflows/test_client_ubuntu.yml index d9db0131..8af0ce8d 100644 --- a/.github/workflows/test_client_ubuntu.yml +++ b/.github/workflows/test_client_ubuntu.yml @@ -29,14 +29,18 @@ jobs: with: python-version: "3.13" - name: Install dependencies - run: python -m pip install poetry - - name: Test with pytest run: | - export SIMVUE_URL=${{ secrets.SIMVUE_URL }} - export SIMVUE_TOKEN=${{ secrets.SIMVUE_TOKEN }} - poetry install --all-extras - # poetry run python -m pip install torch --index-url https://download.pytorch.org/whl/cpu FIXME: PyTorch current broken for Python3.13 - poetry run pytest tests/ -x --cov --cov-report=xml -m 'not scenario' -c /dev/null -p no:warnings -n 0 -v + python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev -o requirements.txt --all-extras + python -m pip install torch --index-url https://download.pytorch.org/whl/cpu + python -m pip install -r requirements.txt + python -m pip install . + - name: Test with pytest + env: + SIMVUE_URL: ${{ secrets.SIMVUE_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} + run: python -m pytest tests/ -x --cov --cov-report=xml -m 'not scenario' -c /dev/null -p no:warnings -n 0 -v - name: Upload coverage reports to Codecov run: | curl -Os https://uploader.codecov.io/latest/linux/codecov diff --git a/.github/workflows/test_client_ubuntu_nightlies.yml b/.github/workflows/test_client_ubuntu_nightlies.yml index 746c0907..2c022814 100644 --- a/.github/workflows/test_client_ubuntu_nightlies.yml +++ b/.github/workflows/test_client_ubuntu_nightlies.yml @@ -27,11 +27,15 @@ jobs: with: python-version: "3.13" - name: Install dependencies - run: python -m pip install poetry - - name: Test with pytest run: | - export SIMVUE_URL=${{ secrets.SIMVUE_NIGHTLY_URL }} - export SIMVUE_TOKEN=${{ secrets.SIMVUE_NIGHTLY_TOKEN }} - poetry install --all-extras - # poetry run python -m pip install torch --index-url https://download.pytorch.org/whl/cpu FIXME: PyTorch current broken for Python3.13 - poetry run pytest tests/ -m 'not scenario' -c /dev/null -p no:warnings -n 0 -v + python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev -o requirements.txt --all-extras + python -m pip install torch --index-url https://download.pytorch.org/whl/cpu + python -m pip install -r requirements.txt + python -m pip install . + - name: Test with pytest + env: + SIMVUE_URL: ${{ secrets.SIMVUE_NIGHTLY_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_NIGHTLY_TOKEN }} + run: python -m pytest tests/ -m 'not scenario' diff --git a/.github/workflows/test_client_windows_nightlies.yml b/.github/workflows/test_client_windows_nightlies.yml index 8742229d..dd9520ce 100644 --- a/.github/workflows/test_client_windows_nightlies.yml +++ b/.github/workflows/test_client_windows_nightlies.yml @@ -29,11 +29,13 @@ jobs: run: | rm poetry.lock python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev -o requirements.txt --all-extras + python -m pip install torch + python -m pip install -r requirements.txt + python -m pip install . - name: Test with pytest - shell: bash - run: | - export SIMVUE_URL=${{ secrets.SIMVUE_NIGHTLY_URL }} - export SIMVUE_TOKEN=${{ secrets.SIMVUE_NIGHTLY_TOKEN }} - poetry install --all-extras - # poetry run python -m pip install torch #FIXME: Torch tests skipped - poetry run pytest tests/ -m 'not scenario' -m 'not unix' + env: + SIMVUE_URL: ${{ secrets.SIMVUE_NIGHTLY_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_NIGHTLY_TOKEN }} + run: python -m pytest tests/ -m 'not scenario' -m 'not unix' diff --git a/.github/workflows/test_multiple_python.yml b/.github/workflows/test_multiple_python.yml index ec7b9568..0a5aa8d9 100644 --- a/.github/workflows/test_multiple_python.yml +++ b/.github/workflows/test_multiple_python.yml @@ -29,10 +29,15 @@ jobs: with: python-version: "${{ matrix.python-version }}" - name: Install dependencies - run: python -m pip install poetry - - name: Test with pytest run: | - export SIMVUE_URL=${{ secrets.SIMVUE_NIGHTLY_URL }} - export SIMVUE_TOKEN=${{ secrets.SIMVUE_NIGHTLY_TOKEN }} - poetry install --all-extras - poetry run pytest tests/ -m 'not scenario' + python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev -o requirements.txt --all-extras + python -m pip install torch --index-url https://download.pytorch.org/whl/cpu + python -m pip install -r requirements.txt + python -m pip install . + - name: Test with pytest + env: + SIMVUE_URL: ${{ secrets.SIMVUE_NIGHTLY_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_NIGHTLY_TOKEN }} + run: python -m pytest tests/ -m 'not scenario' diff --git a/CHANGELOG.md b/CHANGELOG.md index 1d35aba1..0bb7eb57 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,10 @@ * Add support for defining Simvue run defaults using `tool.simvue` in a project `pyproject.toml` file. * Drop support for INI based configuration files. * Retrieve all metric values if `max_points` is unspecified or set to `None`. +* Add support for PyTorch in Python 3.13 +* Create lower level API for directly interacting with the Simvue RestAPI endpoints. +* **Removes support for Python <3.10 due to dependency constraints.** +* Separates `create_alert` into specific methods `create_event_alert` etc. ## [v1.1.3](https://github.com/simvue-io/client/releases/tag/v1.1.3) - 2024-12-09 diff --git a/simvue/utilities.py b/simvue/utilities.py index 4bf3e013..8428cb7f 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -2,7 +2,6 @@ import hashlib import logging import json -import sys import mimetypes import tabulate import pydantic @@ -143,14 +142,10 @@ def wrapper(self, *args, **kwargs) -> typing.Any: f"Plotting features require the '{extra_name}' extension to Simvue" ) elif extra_name == "torch": - if importlib.util.find_spec("torch"): + if not importlib.util.find_spec("torch"): raise RuntimeError( "PyTorch features require the 'torch' module to be installed" ) - if sys.version_info.minor > 12: - raise RuntimeError( - "PyTorch features are not yet supported for python>3.12" - ) elif extra_name not in EXTRAS: raise RuntimeError(f"Unrecognised extra '{extra_name}'") return class_func(self, *args, **kwargs) if class_func else None From aa958297bd85cbd83eee11db6b94b47ebdecbb0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 21 Jan 2025 08:11:37 +0000 Subject: [PATCH 084/163] Fix bad filter string when updating folder details --- simvue/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simvue/client.py b/simvue/client.py index 12241728..3f4fb2b8 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -632,7 +632,7 @@ def get_folder( if there was a failure when retrieving information from the server """ _folders: typing.Generator[tuple[str, Folder], None, None] = Folder.get( - filters=json.dumps([f"path = {folder_path}"]) + filters=json.dumps([f"path == {folder_path}"]) ) # type: ignore try: From e5fe6c2370444298aa1d0d8b339cfca82f696d9f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 21 Jan 2025 09:49:22 +0000 Subject: [PATCH 085/163] Fix user test --- simvue/api/objects/administrator/user.py | 13 +++++++++++ simvue/api/objects/alert/base.py | 14 ++++++++++++ simvue/api/objects/alert/user.py | 2 +- tests/unit/test_user.py | 28 ++++++++++++------------ 4 files changed, 42 insertions(+), 15 deletions(-) diff --git a/simvue/api/objects/administrator/user.py b/simvue/api/objects/administrator/user.py index 3702ba56..277016a1 100644 --- a/simvue/api/objects/administrator/user.py +++ b/simvue/api/objects/administrator/user.py @@ -116,3 +116,16 @@ def readonly(self) -> bool: @pydantic.validate_call def readonly(self, is_readonly: bool) -> None: self._staging["is_readonly"] = is_readonly + + @property + @staging_check + def enabled(self) -> bool: + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["is_enabled"] + return self._get_attribute("is_enabled") + + @enabled.setter + @write_only + @pydantic.validate_call + def enabled(self, is_enabled: bool) -> None: + self._staging["is_enabled"] = is_enabled diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index ac8e09cc..0dfcf670 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -6,9 +6,12 @@ """ +import http import pydantic import typing from simvue.api.objects.base import SimvueObject, staging_check, write_only +from simvue.api.request import get as sv_get, get_json_from_response +from simvue.api.url import URL from simvue.models import NAME_REGEX @@ -129,6 +132,17 @@ def abort(self, abort: bool) -> None: @pydantic.validate_call def set_status(self, run_id: str, status: typing.Literal["ok", "critical"]) -> None: + """Set the status of this alert for a given run""" raise AttributeError( f"Cannot update state for alert of type '{self.__class__.__name__}'" ) + + def get_status(self, run_id: str) -> typing.Literal["ok", "critical"]: + """Retrieve the status of this alert for a given run""" + _url: URL = URL(self._user_config.server.url) / f"runs/{run_id}/{self.id}" + _response = sv_get(url=f"{_url}") + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieving status for alert '{self.id}' in run '{run_id}'", + ) diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index 5e94a814..ed35c32a 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -71,7 +71,7 @@ def get( @pydantic.validate_call def set_status(self, run_id: str, status: typing.Literal["ok", "critical"]) -> None: - """Set alert name""" + """Set the status of this alert for a given run""" _response = sv_put( url=self.url / "status" / run_id, data={"status": status}, diff --git a/tests/unit/test_user.py b/tests/unit/test_user.py index e08ab3f1..f6bb3398 100644 --- a/tests/unit/test_user.py +++ b/tests/unit/test_user.py @@ -12,6 +12,11 @@ def test_create_user_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tenant = Tenant.new(name=_uuid) + try: + _tenant.commit() + except RuntimeError as e: + assert "You do not have permission" in str(e) + return _user = User.new( username="jbloggs", fullname="Joe Bloggs", @@ -20,14 +25,9 @@ def test_create_user_online() -> None: admin=False, readonly=True, welcome=False, - tenant=_uuid + tenant=_tenant.id ) - try: - _tenant.commit() - _user.commit() - except RuntimeError as e: - assert "You do not have permission" in str(e) - return + _user.commit() time.sleep(1) _new_user = User(_user.id) assert _new_user.username == "jbloggs" @@ -58,6 +58,11 @@ def test_create_user_offline() -> None: def test_user_get_properties() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tenant = Tenant.new(name=_uuid) + try: + _tenant.commit() + except RuntimeError as e: + assert "You do not have permission" in str(e) + return _user = User.new( username="jbloggs", fullname="Joe Bloggs", @@ -66,14 +71,9 @@ def test_user_get_properties() -> None: admin=False, readonly=True, welcome=False, - tenant=_uuid + tenant=_tenant.id ) - try: - _tenant.commit() - _user.commit() - except RuntimeError as e: - assert "You do not have permission" in str(e) - return + _user.commit() _failed = [] for member in _user._properties: From b014219f4378339ae0b6e552d0da00190e8149e9 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 21 Jan 2025 10:40:02 +0000 Subject: [PATCH 086/163] Fixed UTC time --- tests/unit/test_metrics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_metrics.py b/tests/unit/test_metrics.py index c335bfbb..8ef0215d 100644 --- a/tests/unit/test_metrics.py +++ b/tests/unit/test_metrics.py @@ -25,7 +25,7 @@ def test_metrics_creation_online() -> None: run_id=_run.id, metrics=[ { - "timestamp": datetime.datetime.now(datetime.UTC).strftime( + "timestamp": datetime.datetime.now(datetime.timezone.utc).strftime( "%Y-%m-%dT%H:%M:%S.%f" ), "time": _time, From d5a9843fea41b18aab7bd9ccd29db119b8f9b462 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 21 Jan 2025 10:56:27 +0000 Subject: [PATCH 087/163] Changed caching to different files and folders for types and runs --- simvue/api/objects/base.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index ffddd1b3..5b2a356f 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -154,7 +154,9 @@ def __init__( self._user_config = SimvueConfiguration.fetch(**_config_args) self._local_staging_file: pathlib.Path = ( - self._user_config.offline.cache.joinpath("staging.json") + self._user_config.offline.cache.joinpath( + self._endpoint, f"{self._identifier}.json" + ) ) self._headers: dict[str, str] = { @@ -507,7 +509,7 @@ def _get( def _cache(self) -> None: if not (_dir := self._local_staging_file.parent).exists(): - _dir.mkdir() + _dir.mkdir(parents=True) _local_data: dict[str, typing.Any] = {} From 3cdbb5dec67b1b42b4c3200415a8559a0113fe37 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 21 Jan 2025 13:45:51 +0000 Subject: [PATCH 088/163] Fixed test_log_metrics --- tests/functional/test_run_class.py | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 38c53f48..cf00656b 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -47,14 +47,15 @@ def test_check_run_initialised_decorator() -> None: assert "Simvue Run must be initialised" in str(e.value) -@pytest.mark.run -def test_run_with_emissions() -> None: - with sv_run.Run() as run_created: - run_created.init(retention_period="1 min") - run_created.config(enable_emission_metrics=True, emission_metrics_interval=1) - time.sleep(5) - _run = RunObject(identifier=run_created.id) - assert list(_run.metrics) +# @pytest.mark.run +# def test_run_with_emissions() -> None: +# with sv_run.Run() as run_created: +# run_created.init(retention_period="1 min") +# run_created.config(enable_emission_metrics=True, emission_metrics_interval=1) +# time.sleep(5) +# _run = RunObject(identifier=run_created.id) +# import pdb; pdb.set_trace() +# assert list(_run.metrics) @pytest.mark.run @@ -124,8 +125,8 @@ def test_log_metrics( aggregate=False, ) - with contextlib.suppress(RuntimeError): - client.delete_run(run._id) + #with contextlib.suppress(RuntimeError): + # client.delete_run(run._id) assert _data @@ -134,12 +135,13 @@ def test_log_metrics( for entry in _data.values(): _steps += [i[0] for i in entry.keys()] _steps = set(_steps) + assert ( - run._dispatcher._max_buffer_size * 3 if overload_buffer else len(_steps) == 1 + len(_steps) == (run._dispatcher._max_buffer_size * 3 if overload_buffer else 1) ) - - # Check metrics have been set - assert 3 if overload_buffer else setup_logging.counts[0] == 1 + # There are two debug log messages per metric dispatch - 'Executing callback on buffer' and 'Posting staged data' + # Should have done one dispatch if not overloaded, and 3 dispatches if overloaded + assert setup_logging.counts[0] == (6 if overload_buffer else 2) # Check heartbeat has been called at least once (so sysinfo sent) assert setup_logging.counts[1] > 0 From 1c638639b00d701e490825629528f49872c4a6f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 21 Jan 2025 16:46:49 +0000 Subject: [PATCH 089/163] Add to_dict checks --- simvue/api/objects/base.py | 5 ++++- simvue/api/objects/metrics.py | 4 +--- simvue/api/objects/storage/base.py | 4 ++-- simvue/api/request.py | 5 ++++- tests/unit/test_artifact.py | 10 ++++++++++ tests/unit/test_event_alert.py | 1 + tests/unit/test_file_storage.py | 1 + tests/unit/test_folder.py | 1 + tests/unit/test_metrics.py | 5 +++-- tests/unit/test_run.py | 1 + 10 files changed, 28 insertions(+), 9 deletions(-) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index ffddd1b3..dbac323b 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -524,7 +524,10 @@ def _cache(self) -> None: json.dump(_local_data, out_f, indent=2) def to_dict(self) -> dict[str, typing.Any]: - return {key: getattr(self, key) for key in self._properties} + return { + key: value.__str__() if (value := getattr(self, key)) is not None else None + for key in self._properties + } @property def staged(self) -> dict[str, typing.Any] | None: diff --git a/simvue/api/objects/metrics.py b/simvue/api/objects/metrics.py index bdb5d2ab..63ec665b 100644 --- a/simvue/api/objects/metrics.py +++ b/simvue/api/objects/metrics.py @@ -78,9 +78,7 @@ def get( def span(self, run_ids: list[str]) -> dict[str, int | float]: """Returns the metrics span for the given runs""" _url = self._base_url / "span" - _response = sv_get( - url=f"{_url}", headers=self._headers, data={"runs": json.dumps(run_ids)} - ) + _response = sv_get(url=f"{_url}", headers=self._headers, data=run_ids) return get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py index 71bd566c..437297f5 100644 --- a/simvue/api/objects/storage/base.py +++ b/simvue/api/objects/storage/base.py @@ -60,14 +60,14 @@ def default(self, is_default: bool) -> None: @staging_check def tenant_usable(self) -> bool: """Retrieve if this is usable by the current user tenant""" - return self._get_attribute("is_tenant_usable") + return self._get_attribute("is_tenant_useable") @tenant_usable.setter @write_only @pydantic.validate_call def tenant_usable(self, is_tenant_usable: bool) -> None: """Set this storage to be usable by the current user tenant""" - self._staging["tenant_usable"] = is_tenant_usable + self._staging["is_tenant_useable"] = is_tenant_usable @property @staging_check diff --git a/simvue/api/request.py b/simvue/api/request.py index 1c63a149..768cde41 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -167,6 +167,7 @@ def get( headers: dict[str, str], params: dict[str, str | int | float | None] | None = None, timeout: int = DEFAULT_API_TIMEOUT, + data: dict[str, typing.Any] | None = None, ) -> requests.Response: """HTTP GET @@ -178,6 +179,8 @@ def get( headers for the post request timeout : int, optional timeout of request, by default DEFAULT_API_TIMEOUT + data : dict[str, Any] | None, optional + any data to send in request Returns ------- @@ -185,7 +188,7 @@ def get( response from executing GET """ logging.debug(f"GET: {url}\n\tparams={params}") - return requests.get(url, headers=headers, timeout=timeout, params=params) + return requests.get(url, headers=headers, timeout=timeout, params=params, data=data) @retry( diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index 693478ce..3e86089d 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -4,6 +4,7 @@ import time import pathlib import tempfile +import numpy from simvue.api.objects import Artifact, Run from simvue.api.objects.folder import Folder @@ -41,6 +42,15 @@ def test_artifact_creation_online() -> None: assert _artifact.name == f"test_artifact_{_uuid}" _content = b"".join(_artifact.download_content()).decode("UTF-8") assert _content == f"Hello World! {_uuid}" + assert _artifact.to_dict() + _test_array = numpy.array(range(10)) + _artifact = Artifact.new_object( + name=f"test_artifact_obj_{_uuid}", + storage_id=None, + obj=_test_array, + metadata=None + ) + _artifact.attach_to_run(_run.id, "output") _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) if _failed: diff --git a/tests/unit/test_event_alert.py b/tests/unit/test_event_alert.py index 5853170b..c02b5fe8 100644 --- a/tests/unit/test_event_alert.py +++ b/tests/unit/test_event_alert.py @@ -22,6 +22,7 @@ def test_event_alert_creation_online() -> None: assert _alert.alert.pattern == "completed" assert _alert.name == f"events_alert_{_uuid}" assert _alert.notification == "none" + assert _alert.to_dict() _alert.delete() diff --git a/tests/unit/test_file_storage.py b/tests/unit/test_file_storage.py index a2732235..8c0a718b 100644 --- a/tests/unit/test_file_storage.py +++ b/tests/unit/test_file_storage.py @@ -14,6 +14,7 @@ def test_create_file_storage_online() -> None: _storage.commit() assert _storage.enabled assert _storage.name == _uuid + assert _storage.to_dict() _storage.delete() diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index 359d0df9..8a39389e 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -20,6 +20,7 @@ def test_folder_creation_online() -> None: assert _folders assert _folders[_folder.id] assert _folders[_folder.id]._read_only + assert _folder.to_dict() with pytest.raises(AssertionError): _folders[_folder.id].name = "hello" _folder.delete(recursive=True, delete_runs=True, runs_only=False) diff --git a/tests/unit/test_metrics.py b/tests/unit/test_metrics.py index c6a2d564..a9623410 100644 --- a/tests/unit/test_metrics.py +++ b/tests/unit/test_metrics.py @@ -21,6 +21,8 @@ def test_metrics_creation_online() -> None: } _time: int = 1 _step: int = 1 + _folder.commit() + _run.commit() _metrics = Metrics.new( run_id=_run.id, metrics=[ @@ -34,12 +36,11 @@ def test_metrics_creation_online() -> None: } ], ) - _folder.commit() - _run.commit() _metrics.commit() assert _metrics.get(metrics=["x", "y", "z"], xaxis="step") assert _metrics.span(run_ids=[_run.id]) assert _metrics.names(run_ids=[_run.id]) + assert _metrics.to_dict() _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index ef9f3109..be099fb4 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -116,6 +116,7 @@ def test_run_get_properties() -> None: _folder.commit() _run.commit() _failed = [] + assert _run.to_dict() for member in _run._properties: try: From 42477726deb64c10468f0bdb6a92bc16f474a985 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 21 Jan 2025 17:13:42 +0000 Subject: [PATCH 090/163] Add to_dict checks and added correct expect type for metrics names --- simvue/api/objects/metrics.py | 5 +++-- simvue/api/objects/stats.py | 3 +++ simvue/api/request.py | 8 ++++---- simvue/config/user.py | 2 +- simvue/utilities.py | 17 ++++++++++++----- tests/unit/test_s3_storage.py | 1 + tests/unit/test_stats.py | 1 + 7 files changed, 25 insertions(+), 12 deletions(-) diff --git a/simvue/api/objects/metrics.py b/simvue/api/objects/metrics.py index 63ec665b..b31bf667 100644 --- a/simvue/api/objects/metrics.py +++ b/simvue/api/objects/metrics.py @@ -78,7 +78,7 @@ def get( def span(self, run_ids: list[str]) -> dict[str, int | float]: """Returns the metrics span for the given runs""" _url = self._base_url / "span" - _response = sv_get(url=f"{_url}", headers=self._headers, data=run_ids) + _response = sv_get(url=f"{_url}", headers=self._headers, json=run_ids) return get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], @@ -86,7 +86,7 @@ def span(self, run_ids: list[str]) -> dict[str, int | float]: ) @pydantic.validate_call - def names(self, run_ids: list[str]) -> dict[str, int | float]: + def names(self, run_ids: list[str]) -> list[str]: """Returns the metric names for the given runs""" _url = self._base_url / "names" _response = sv_get( @@ -96,6 +96,7 @@ def names(self, run_ids: list[str]) -> dict[str, int | float]: response=_response, expected_status=[http.HTTPStatus.OK], scenario="Retrieving metric names", + expected_type=list, ) def _post(self, **kwargs) -> dict[str, typing.Any]: diff --git a/simvue/api/objects/stats.py b/simvue/api/objects/stats.py index d016cfeb..094c5376 100644 --- a/simvue/api/objects/stats.py +++ b/simvue/api/objects/stats.py @@ -40,6 +40,9 @@ def _get_local_staged(self) -> dict[str, typing.Any]: def _get_visibility(self) -> dict[str, bool | list[str]]: return {} + def to_dict(self) -> dict[str, typing.Any]: + return {"runs": self._get_run_stats()} + class RunStatistics: def __init__(self, sv_obj: Stats) -> None: diff --git a/simvue/api/request.py b/simvue/api/request.py index 768cde41..999fe0db 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -167,7 +167,7 @@ def get( headers: dict[str, str], params: dict[str, str | int | float | None] | None = None, timeout: int = DEFAULT_API_TIMEOUT, - data: dict[str, typing.Any] | None = None, + json: dict[str, typing.Any] | None = None, ) -> requests.Response: """HTTP GET @@ -179,8 +179,8 @@ def get( headers for the post request timeout : int, optional timeout of request, by default DEFAULT_API_TIMEOUT - data : dict[str, Any] | None, optional - any data to send in request + json : dict[str, Any] | None, optional + any json to send in request Returns ------- @@ -188,7 +188,7 @@ def get( response from executing GET """ logging.debug(f"GET: {url}\n\tparams={params}") - return requests.get(url, headers=headers, timeout=timeout, params=params, data=data) + return requests.get(url, headers=headers, timeout=timeout, params=params, json=json) @retry( diff --git a/simvue/config/user.py b/simvue/config/user.py index 53ce59fa..4c704457 100644 --- a/simvue/config/user.py +++ b/simvue/config/user.py @@ -113,7 +113,7 @@ def _check_server( except Exception as err: raise AssertionError( - f"Exception retrieving server version: {str(err)}" + f"Exception retrieving server version:\n {str(err)}" ) from err _version = semver.Version.parse(_version_str) diff --git a/simvue/utilities.py b/simvue/utilities.py index 8428cb7f..f7b966aa 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -159,11 +159,18 @@ def parse_pydantic_error(error: pydantic.ValidationError) -> str: out_table: list[str] = [] for data in json.loads(error.json()): _input = data.get("input") if data["input"] is not None else "None" - _input_str = ( - _input_str - if len((_input_str := f"{_input}")) < 50 - else f"{_input_str[:50]}..." - ) + if isinstance(_input, dict): + _input_str = json.dumps(_input, indent=2) + _input_str = "\n".join( + f"{line[:47]}..." if len(line) > 50 else line + for line in _input_str.split("\n") + ) + else: + _input_str = ( + _input_str + if len((_input_str := f"{_input}")) < 50 + else f"{_input_str[:50]}..." + ) _type: str = data["type"] _skip_type_compare_for = ( diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index 4181d630..743259ae 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -22,6 +22,7 @@ def test_create_s3_online() -> None: bucket="dummy_bucket" ) _storage.commit() + assert _storage.to_dict() assert _storage.name == _uuid assert _storage.config.endpoint_url == "https://not-a-real-url.io/" assert _storage.config.region_name == "fictionsville" diff --git a/tests/unit/test_stats.py b/tests/unit/test_stats.py index 2139cf3d..0cb012ff 100644 --- a/tests/unit/test_stats.py +++ b/tests/unit/test_stats.py @@ -11,6 +11,7 @@ def test_stats() -> None: assert isinstance(_statistics.runs.running, int) assert isinstance(_statistics.runs.completed, int) assert isinstance(_statistics.runs.data, int) + assert _statistics.to_dict() with pytest.raises(AttributeError): Stats.new() From 3ddfe795128a14fe50cbe437572c3250b6843105 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 22 Jan 2025 09:00:43 +0000 Subject: [PATCH 091/163] Undo commenting --- tests/functional/test_run_class.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index cf00656b..304fbad9 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -125,8 +125,8 @@ def test_log_metrics( aggregate=False, ) - #with contextlib.suppress(RuntimeError): - # client.delete_run(run._id) + with contextlib.suppress(RuntimeError): + client.delete_run(run._id) assert _data From c1aca1a74e1c28a8ef5da78912f96c4b6221553a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 22 Jan 2025 11:53:58 +0000 Subject: [PATCH 092/163] Started new sender --- simvue/api/objects/base.py | 33 ++-- simvue/sender.py | 332 ------------------------------------- simvue/sender/__init__.py | 43 +++++ 3 files changed, 58 insertions(+), 350 deletions(-) delete mode 100644 simvue/sender.py create mode 100644 simvue/sender/__init__.py diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 1a996d4f..8dc40c9e 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -153,6 +153,9 @@ def __init__( } self._user_config = SimvueConfiguration.fetch(**_config_args) + + # Use a single file for each object so we can have parallelism + # e.g. multiple runs writing at the same time self._local_staging_file: pathlib.Path = ( self._user_config.offline.cache.joinpath( self._endpoint, f"{self._identifier}.json" @@ -194,23 +197,20 @@ def _stage_to_other(self, obj_label: str, key: str, value: typing.Any) -> None: with self._local_staging_file.open() as in_f: _staged_data = json.load(in_f) - if obj_label not in _staged_data: - _staged_data[obj_label] = {} - if key not in _staged_data[obj_label]: - _staged_data[obj_label][key] = value + _staged_data[key] = value return - if isinstance(_staged_data[obj_label][key], list): - if not _staged_data[obj_label].get(key): - _staged_data[obj_label][key] = [] - _staged_data[obj_label][key] += value - elif isinstance(_staged_data[obj_label][key], dict): - if not _staged_data[obj_label].get(key): - _staged_data[obj_label][key] = {} - _staged_data[obj_label][key] |= value + if isinstance(_staged_data[key], list): + if not _staged_data.get(key): + _staged_data[key] = [] + _staged_data[key] += value + elif isinstance(_staged_data[key], dict): + if not _staged_data.get(key): + _staged_data[key] = {} + _staged_data[key] |= value else: - _staged_data[obj_label][key] = value + _staged_data[key] = value with self._local_staging_file.open("w") as out_f: json.dump(_staged_data, out_f, indent=2) @@ -511,16 +511,13 @@ def _cache(self) -> None: if not (_dir := self._local_staging_file.parent).exists(): _dir.mkdir(parents=True) - _local_data: dict[str, typing.Any] = {} + _local_data: dict[str, typing.Any] = {"obj_type": self.__class__.__name__} if self._local_staging_file.exists(): with self._local_staging_file.open() as in_f: _local_data = json.load(in_f) - if not _local_data.get(self._label): - _local_data[self._label] = {} - - _local_data[self._label][self._identifier] = self._staging + _local_data = self._staging with self._local_staging_file.open("w", encoding="utf-8") as out_f: json.dump(_local_data, out_f, indent=2) diff --git a/simvue/sender.py b/simvue/sender.py deleted file mode 100644 index b9d81672..00000000 --- a/simvue/sender.py +++ /dev/null @@ -1,332 +0,0 @@ -from concurrent.futures import ThreadPoolExecutor -import glob -import json -import pathlib -import typing -import logging -import os -import shutil -import time - -import msgpack - -from simvue.config.user import SimvueConfiguration - -from .factory.proxy.remote import Remote -from .utilities import create_file, remove_file - -logger = logging.getLogger(__name__) - -NUM_PARALLEL_WORKERS = 10 -MAX_RUNS = 10 - - -def set_details(name, id, filename): - """ - Write name & id to file - """ - data = {"name": name, "id": id} - with open(filename, "w") as fh: - json.dump(data, fh) - - -def get_details(name): - """ - Get name & id from file - """ - with open(name) as fh: - data = json.load(fh) - return data["name"], data["id"] - - -def update_name(id, data): - """ - Update id in metrics/events - """ - for item in data: - item["id"] = id - - -def add_name(name, data, filename): - """ - Update name in JSON - """ - if not data["name"]: - data["name"] = name - with open(filename, "w") as fh: - json.dump(data, fh) - - return data - - -def read_json(filename): - with open(filename, "r") as fh: - return json.load(fh) - - -def get_json(filename, run_id=None, artifact=False): - """ - Get JSON from a file - """ - with open(filename, "r") as fh: - data = json.load(fh) - if run_id: - if artifact: - for item in data: - if item == "run": - data[item] = run_id - return data - - if "run" in data: - data["run"] = run_id - else: - data["id"] = run_id - - return data - - -def sender( - server_url: typing.Optional[str] = None, server_token: typing.Optional[str] = None -) -> str: - """ - Asynchronous upload of runs to Simvue server - """ - directory: pathlib.Path = SimvueConfiguration.fetch(mode="offline").offline.cache - - # Clean up old runs after waiting 5 mins - runs = directory.glob("*/sent") - - for run in runs: - id = run.split("/")[len(run.split("/")) - 2] - logger.info("Cleaning up directory with id %s", id) - - if time.time() - os.path.getmtime(run) > 300: - try: - shutil.rmtree(f"{directory.joinpath(id)}") - except Exception: - logger.error("Got exception trying to cleanup run in directory %s", id) - - # Deal with runs in the created, running or a terminal state - runs = ( - directory.glob("*/created") - + directory.glob("*/running") - + directory.glob("*/completed") - + directory.glob("*/failed") - + directory.glob("*/terminated") - ) - - if len(runs) > MAX_RUNS: - logger.info("Lauching %d workers", NUM_PARALLEL_WORKERS) - with ThreadPoolExecutor(NUM_PARALLEL_WORKERS) as executor: - for run in runs: - executor.submit( - process(run, server_token=server_token, server_url=server_url) - ) - return [executor.result() for _ in runs] - else: - return [process(run) for run in runs] - - -def process( - run, server_url: typing.Optional[str], server_token: typing.Optional[str] -) -> typing.Optional[str]: - """ - Handle updates for the specified run - """ - status = None - - if run.endswith("running"): - status = "running" - if run.endswith("created"): - status = "created" - elif run.endswith("completed"): - status = "completed" - elif run.endswith("failed"): - status = "failed" - elif run.endswith("terminated"): - status = "terminated" - - current = ( - run.replace("/running", "") - .replace("/completed", "") - .replace("/failed", "") - .replace("/terminated", "") - .replace("/created", "") - ) - - if os.path.isfile(f"{current}/sent"): - if status == "running": - remove_file(f"{current}/running") - elif status == "completed": - remove_file(f"{current}/completed") - elif status == "failed": - remove_file(f"{current}/failed") - elif status == "terminated": - remove_file(f"{current}/terminated") - elif status == "created": - remove_file(f"{current}/created") - return - - id = run.split("/")[len(run.split("/")) - 2] - - run_init = get_json(f"{current}/run.json") - start_time = os.path.getctime(f"{current}/run.json") - - if run_init["name"]: - logger.info("Considering run with name %s and id %s", run_init["name"], id) - else: - logger.info("Considering run with no name yet and id %s", id) - - # Create run if it hasn't previously been created - created_file = f"{current}/init" - name = None - config = SimvueConfiguration.fetch( - mode="online", server_token=server_token, server_url=server_url - ) - if not os.path.isfile(created_file): - remote = Remote( - name=run_init["name"], uniq_id=id, config=config, suppress_errors=False - ) - - name, run_id = remote.create_run(run_init) - if name: - logger.info("Creating run with name %s and id %s", name, id) - run_init = add_name(name, run_init, f"{current}/run.json") - set_details(name, run_id, created_file) - else: - logger.error("Failure creating run") - return - else: - name, run_id = get_details(created_file) - run_init["name"] = name - remote = Remote( - name=run_init["name"], uniq_id=run_id, config=config, suppress_errors=False - ) - - if status == "running": - # Check for recent heartbeat - heartbeat_filename = f"{current}/heartbeat" - if os.path.isfile(heartbeat_filename): - mtime = os.path.getmtime(heartbeat_filename) - if time.time() - mtime > 180: - status = "lost" - - # Check for no recent heartbeat - if not os.path.isfile(heartbeat_filename): - if time.time() - start_time > 180: - status = "lost" - - # Handle lost runs - if status == "lost": - logger.info("Changing status to lost, name %s and id %s", run_init["name"], id) - status = "lost" - create_file(f"{current}/lost") - remove_file(f"{current}/running") - - # Send heartbeat if the heartbeat file was touched recently - heartbeat_filename = f"{current}/heartbeat" - if os.path.isfile(heartbeat_filename): - if ( - status == "running" - and time.time() - os.path.getmtime(heartbeat_filename) < 120 - ): - logger.info("Sending heartbeat for run with name %s", run_init["name"]) - remote.send_heartbeat() - - metrics_gathered = [] - events_gathered = [] - - # Upload metrics, events, files & metadata as necessary - files = sorted(glob.glob(f"{current}/*"), key=os.path.getmtime) - updates = 0 - for record in files: - if ( - record.endswith("/run.json") - or record.endswith("/running") - or record.endswith("/completed") - or record.endswith("/failed") - or record.endswith("/terminated") - or record.endswith("/lost") - or record.endswith("/sent") - or record.endswith("-proc") - ): - continue - - rename = False - - # Handle metrics - if "/metrics-" in record: - logger.info("Gathering metrics for run %s", run_init["name"]) - data = get_json(record, run_id) - metrics_gathered = metrics_gathered + data["metrics"] - rename = True - - # Handle events - if "/events-" in record: - logger.info("Gathering events for run %s", run_init["name"]) - data = get_json(record, run_id) - events_gathered = events_gathered + data["events"] - rename = True - - # Handle updates - if "/update-" in record: - logger.info("Sending update for run %s", run_init["name"]) - data = get_json(record, run_id) - if remote.update(data): - for item in data: - if item == "status" and data[item] in ( - "completed", - "failed", - "terminated", - ): - create_file(f"{current}/sent") - remove_file(f"{current}/{status}") - rename = True - - # Handle folders - if "/folder-" in record: - logger.info("Sending folder details for run %s", run_init["name"]) - if remote.set_folder_details(get_json(record, run_id)): - rename = True - - # Handle alerts - if "/alert-" in record: - logger.info("Sending alert details for run %s", run_init["name"]) - if remote.add_alert(get_json(record, run_id)): - rename = True - - # Handle files - if "/file-" in record: - logger.info("Saving file for run %s", run_init["name"]) - if remote.save_file(get_json(record, run_id, True)): - rename = True - - # Rename processed files - if rename: - os.rename(record, f"{record}-proc") - updates += 1 - - # Send metrics if necessary - if metrics_gathered: - logger.info("Sending metrics for run %s", run_init["name"]) - data = {"metrics": metrics_gathered, "run": run_id} - remote.send_metrics(msgpack.packb(data, use_bin_type=True)) - - # Send events if necessary - if events_gathered: - logger.info("Sending events for run %s", run_init["name"]) - data = {"events": events_gathered, "run": run_id} - remote.send_event(msgpack.packb(data, use_bin_type=True)) - - # If the status is completed and there were no updates, the run must have completely finished - if updates == 0 and status in ("completed", "failed", "terminated"): - logger.info("Finished sending run %s", run_init["name"]) - data = {"id": run_id, "status": status} - if remote.update(data): - create_file(f"{current}/sent") - remove_file(f"{current}/{status}") - elif updates == 0 and status == "lost": - logger.info("Finished sending run %s as it was lost", run_init["name"]) - create_file(f"{current}/sent") - - return run_id diff --git a/simvue/sender/__init__.py b/simvue/sender/__init__.py new file mode 100644 index 00000000..b5a921e1 --- /dev/null +++ b/simvue/sender/__init__.py @@ -0,0 +1,43 @@ +# Collator +import json +import pathlib +import pydantic +import typing +from simvue.api.objects.base import SimvueObject +from simvue.api.objects.storage.file import FileStorage + +UPLOAD_ORDER: tuple[str, ...] = ( + "tenants", + "users", + "storage", + "folders", + "tags", + "alerts", + "runs", + "artifacts", +) + + +@pydantic.validate_call +def _check_local_staging(cache_dir: pydantic.DirectoryPath) -> None: + """Check local cache and assemble any objects for sending""" + _upload_data: dict[str, dict[str, typing.Any]] = {} + for obj_type in UPLOAD_ORDER: + _cache_files: list[pathlib.Path] = cache_dir.glob(f"{obj_type}/*.json") + _upload_data[obj_type] = { + _path.name.split(".")[0]: json.load(_path.open()) for _path in _cache_files + } + return _upload_data + + +# Create instances from local cache +# We have to link created IDs to other objects +def _assemble_objects( + locally_staged: dict[str, dict[str, typing.Any]], +) -> typing.Generator[SimvueObject, None, None]: + for obj_type, data in locally_staged.items(): + if obj_type == "storage" and data.pop("type") == "File": + FileStorage.new(**data) + + +# Rather than a script with API calls each object will send itself From 126d409e72b426ec8897332ef5d33968e609887e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 22 Jan 2025 12:56:49 +0000 Subject: [PATCH 093/163] Add obj_type to cached info --- simvue/api/objects/base.py | 2 +- simvue/sender/__init__.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 8dc40c9e..bbf502b1 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -517,7 +517,7 @@ def _cache(self) -> None: with self._local_staging_file.open() as in_f: _local_data = json.load(in_f) - _local_data = self._staging + _local_data |= self._staging with self._local_staging_file.open("w", encoding="utf-8") as out_f: json.dump(_local_data, out_f, indent=2) diff --git a/simvue/sender/__init__.py b/simvue/sender/__init__.py index b5a921e1..d43b4d86 100644 --- a/simvue/sender/__init__.py +++ b/simvue/sender/__init__.py @@ -4,7 +4,6 @@ import pydantic import typing from simvue.api.objects.base import SimvueObject -from simvue.api.objects.storage.file import FileStorage UPLOAD_ORDER: tuple[str, ...] = ( "tenants", @@ -35,9 +34,10 @@ def _check_local_staging(cache_dir: pydantic.DirectoryPath) -> None: def _assemble_objects( locally_staged: dict[str, dict[str, typing.Any]], ) -> typing.Generator[SimvueObject, None, None]: - for obj_type, data in locally_staged.items(): - if obj_type == "storage" and data.pop("type") == "File": - FileStorage.new(**data) + for obj_type in UPLOAD_ORDER: + _data = locally_staged.get(obj_type, {}) + for _local_id, _obj in _data.items(): + _exact_type: str = _data["obj_type"] # Rather than a script with API calls each object will send itself From cb6a1c7af76e99bb3822aec0d8b7d48076157e4c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 22 Jan 2025 13:06:56 +0000 Subject: [PATCH 094/163] Created offline object initialiser function --- simvue/sender/__init__.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/simvue/sender/__init__.py b/simvue/sender/__init__.py index d43b4d86..3fb4ba9f 100644 --- a/simvue/sender/__init__.py +++ b/simvue/sender/__init__.py @@ -4,6 +4,7 @@ import pydantic import typing from simvue.api.objects.base import SimvueObject +import simvue.api.objects UPLOAD_ORDER: tuple[str, ...] = ( "tenants", @@ -37,7 +38,14 @@ def _assemble_objects( for obj_type in UPLOAD_ORDER: _data = locally_staged.get(obj_type, {}) for _local_id, _obj in _data.items(): - _exact_type: str = _data["obj_type"] + _exact_type: str = _obj.pop("obj_type") + try: + _instance_class = getattr(simvue.api.objects, _exact_type) + except AttributeError as e: + raise RuntimeError( + f"Attempt to initialise unknown type '{_exact_type}'" + ) from e + yield _instance_class(**_obj) # Rather than a script with API calls each object will send itself From b0e6e8c872a26ba68c9b8163077dafea9e34ada5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 22 Jan 2025 22:52:11 +0000 Subject: [PATCH 095/163] Allow arbitary number of args for each low level class new method --- simvue/api/objects/alert/events.py | 1 + simvue/api/objects/alert/metrics.py | 1 + simvue/api/objects/alert/user.py | 1 + simvue/api/objects/artifact.py | 15 +++++++++------ simvue/api/objects/folder.py | 1 + simvue/api/objects/run.py | 1 + simvue/api/objects/tag.py | 7 +------ simvue/sender/__init__.py | 25 ++++++++++++++++++++++--- 8 files changed, 37 insertions(+), 15 deletions(-) diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index 3489dbd1..af166b43 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -44,6 +44,7 @@ def new( frequency: pydantic.PositiveInt, enabled: bool = True, offline: bool = False, + **_, ) -> Self: """Create a new event-based alert diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index f8c4ea91..df17ffa8 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -53,6 +53,7 @@ def new( frequency: pydantic.PositiveInt, enabled: bool = True, offline: bool = False, + **_, ) -> Self: """Create a new metric threshold alert either locally or on the server diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index ed35c32a..03faff8e 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -33,6 +33,7 @@ def new( notification: typing.Literal["none", "email"], enabled: bool = True, offline: bool = False, + **_, ) -> Self: """Create a new user-defined alert diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 4f01532c..ca182f1f 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -59,22 +59,25 @@ def new( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - storage_id: str | None, checksum: str, size: int, - file_type: str, - original_path: pathlib.Path | None, + storage_id: str | None = None, + file_type: str | None = None, + original_path: pathlib.Path | None = None, metadata: dict[str, typing.Any] | None, offline: bool = False, **kwargs, ) -> Self: + _storage = kwargs.pop("storage", None) + _orig_path = original_path or kwargs.pop("originalPath", None) + _file_type = kwargs.pop("type", None) _artifact = Artifact( name=name, checksum=checksum, size=size, - originalPath=f"{original_path or ''}", - storage=storage_id, - type=file_type, + originalPath=f"{_orig_path or ''}", + storage=_storage, + type=_file_type, metadata=metadata, _read_only=False, ) diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index 357bdfdf..f3a04276 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -55,6 +55,7 @@ def new( *, path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], offline: bool = False, + **_, ): """Create a new Folder on the Simvue server with the given path""" _folder = Folder(path=path, _read_only=False) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 25de7ab1..9d322540 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -58,6 +58,7 @@ def new( *, folder: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], offline: bool = False, + **_, ) -> Self: """Create a new Folder on the Simvue server with the given path""" _run = Run(folder=folder, system=None, status="created", _read_only=False) diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py index 8725d73b..0e1f6c79 100644 --- a/simvue/api/objects/tag.py +++ b/simvue/api/objects/tag.py @@ -19,12 +19,7 @@ class Tag(SimvueObject): @classmethod @pydantic.validate_call - def new( - cls, - *, - name: str, - offline: bool = False, - ): + def new(cls, *, name: str, offline: bool = False, **_): """Create a new Tag on the Simvue server""" _data: dict[str, typing.Any] = {"name": name} _tag = Tag(name=name, _read_only=False) diff --git a/simvue/sender/__init__.py b/simvue/sender/__init__.py index 3fb4ba9f..23170f8d 100644 --- a/simvue/sender/__init__.py +++ b/simvue/sender/__init__.py @@ -2,8 +2,10 @@ import json import pathlib import pydantic +import logging import typing from simvue.api.objects.base import SimvueObject +from simvue.utilities import prettify_pydantic import simvue.api.objects UPLOAD_ORDER: tuple[str, ...] = ( @@ -17,9 +19,10 @@ "artifacts", ) +_logger = logging.getLogger(__name__) -@pydantic.validate_call -def _check_local_staging(cache_dir: pydantic.DirectoryPath) -> None: + +def _check_local_staging(cache_dir: pathlib.Path) -> None: """Check local cache and assemble any objects for sending""" _upload_data: dict[str, dict[str, typing.Any]] = {} for obj_type in UPLOAD_ORDER: @@ -45,7 +48,23 @@ def _assemble_objects( raise RuntimeError( f"Attempt to initialise unknown type '{_exact_type}'" ) from e - yield _instance_class(**_obj) + yield _instance_class.new(**_obj) # Rather than a script with API calls each object will send itself +@prettify_pydantic +@pydantic.validate_call +def uploader(cache_dir: pydantic.DirectoryPath) -> None: + _locally_staged = _check_local_staging(cache_dir) + _offline_to_online_id_mapping: dict[str, str] = {} + for obj in _assemble_objects(_locally_staged): + _current_id = obj._identifier + try: + obj.commit() + _logger.info(f"Created {obj.__class__.__name__} '{obj.id}'") + except RuntimeError as e: + if "status 409" in e.args[0]: + continue + else: + raise e + _offline_to_online_id_mapping[_current_id] = obj.id From 0a6045fe87b006bb4fa6c4d22a2bd438e94c1e20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 22 Jan 2025 23:17:08 +0000 Subject: [PATCH 096/163] Add cache file removal to uploading Few type fixes --- simvue/api/objects/base.py | 8 +++++-- simvue/api/objects/run.py | 42 +++--------------------------------- simvue/api/request.py | 7 +++--- simvue/sender/__init__.py | 44 ++++++++++++++++++++++++-------------- 4 files changed, 41 insertions(+), 60 deletions(-) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index bbf502b1..9137d234 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -274,8 +274,9 @@ def _get_visibility(self) -> dict[str, bool | list[str]]: except AttributeError: return {} - @abc.abstractclassmethod - def new(cls, **_): + @classmethod + @abc.abstractmethod + def new(cls, **_) -> Self: pass @classmethod @@ -419,6 +420,9 @@ def _post(self, is_json: bool = True, **kwargs) -> dict[str, typing.Any]: scenario=f"Creation of {self._label}", ) + if isinstance(_json_response, list): + raise RuntimeError("Expected dictionary from JSON response but got type list") + if _id := _json_response.get("id"): self._logger.debug("'%s' created successfully", _id) self._identifier = _id diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 9d322540..6d1aaa00 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -258,44 +258,6 @@ def events( self._logger.warning(f"Uncommitted metrics found for run '{self.id}'") yield from self._get_attribute("events").items() - @pydantic.validate_call - def log_entries( - self, - entry_type: typing.Literal["metrics", "events"], - entries: list[MetricSet | EventSet], - ) -> None: - """Add entries to server or local staging""" - if not self._identifier: - raise RuntimeError("Cannot stage metrics, no identifier found") - - _validated_entries: list[dict] = [entry.model_dump() for entry in entries] - - if self._offline or self._identifier.startswith("offline_"): - self._stage_to_other(entry_type, self._identifier, _validated_entries) - return - - if entry_type == "events": - _events = Events.new(run_id=self._identifier, events=entries) - _events.commit() - return - - _url = URL(self._user_config.server.url) / entry_type - _data = {entry_type: _validated_entries, "run": self._identifier} - _data_bin = msgpack.packb(_data, use_bin_type=True) - - _msgpack_header = self._headers | {"Content-Type": "application/msgpack"} - - _response = sv_post( - f"{_url}", headers=_msgpack_header, data=_data_bin, is_json=False - ) - - get_json_from_response( - response=_response, - expected_status=[http.HTTPStatus.OK], - scenario=f"Logging of {entry_type} '{entries}' for run '{self.id}'", - allow_parse_failure=True, - ) - @write_only def send_heartbeat(self) -> dict[str, typing.Any] | None: if self._offline or not self._identifier: @@ -312,7 +274,9 @@ def send_heartbeat(self) -> dict[str, typing.Any] | None: @property def _abort_url(self) -> URL | None: - return self.url / "abort" if self._identifier else None + if not self.url: + return None + return self.url / "abort" @property def _artifact_url(self) -> URL | None: diff --git a/simvue/api/request.py b/simvue/api/request.py index 999fe0db..2b24b8fc 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -230,7 +230,7 @@ def get_json_from_response( scenario: str, response: requests.Response, allow_parse_failure: bool = False, - expected_type: list | dict = dict, + expected_type: typing.Type[dict | list] = dict, ) -> typing.Union[dict, list]: try: json_response = response.json() @@ -241,6 +241,7 @@ def get_json_from_response( decode_error = f"{e}" error_str = f"{scenario} failed for url '{response.url}'" + details: str | None = None if (_status_code := response.status_code) in expected_status: if not isinstance(json_response, expected_type): @@ -249,9 +250,9 @@ def get_json_from_response( return json_response else: details = f"could not request JSON response: {decode_error}" - else: + elif isinstance(json_response, dict): error_str += f" with status {_status_code}" - details = (json_response or ({} if expected_type is dict else [])).get("detail") + details = (json_response or {}).get("detail") try: txt_response = response.text diff --git a/simvue/sender/__init__.py b/simvue/sender/__init__.py index 23170f8d..65823b52 100644 --- a/simvue/sender/__init__.py +++ b/simvue/sender/__init__.py @@ -22,13 +22,15 @@ _logger = logging.getLogger(__name__) -def _check_local_staging(cache_dir: pathlib.Path) -> None: +def _check_local_staging( + cache_dir: pathlib.Path, +) -> dict[str, dict[pathlib.Path, dict[str, typing.Any]]]: """Check local cache and assemble any objects for sending""" - _upload_data: dict[str, dict[str, typing.Any]] = {} + _upload_data: dict[str, dict[pathlib.Path, dict[str, typing.Any]]] = {} for obj_type in UPLOAD_ORDER: - _cache_files: list[pathlib.Path] = cache_dir.glob(f"{obj_type}/*.json") _upload_data[obj_type] = { - _path.name.split(".")[0]: json.load(_path.open()) for _path in _cache_files + _path: json.load(_path.open()) + for _path in cache_dir.glob(f"{obj_type}/*.json") } return _upload_data @@ -36,19 +38,21 @@ def _check_local_staging(cache_dir: pathlib.Path) -> None: # Create instances from local cache # We have to link created IDs to other objects def _assemble_objects( - locally_staged: dict[str, dict[str, typing.Any]], -) -> typing.Generator[SimvueObject, None, None]: + locally_staged: dict[str, dict[pathlib.Path, typing.Any]], +) -> typing.Generator[tuple[pathlib.Path, SimvueObject], None, None]: for obj_type in UPLOAD_ORDER: - _data = locally_staged.get(obj_type, {}) - for _local_id, _obj in _data.items(): + _data: dict[pathlib.Path, dict[str, typing.Any]] = locally_staged.get( + obj_type, {} + ) + for _file_path, _obj in _data.items(): _exact_type: str = _obj.pop("obj_type") try: - _instance_class = getattr(simvue.api.objects, _exact_type) + _instance_class: SimvueObject = getattr(simvue.api.objects, _exact_type) except AttributeError as e: raise RuntimeError( f"Attempt to initialise unknown type '{_exact_type}'" ) from e - yield _instance_class.new(**_obj) + yield _file_path, _instance_class.new(**_obj) # Rather than a script with API calls each object will send itself @@ -57,14 +61,22 @@ def _assemble_objects( def uploader(cache_dir: pydantic.DirectoryPath) -> None: _locally_staged = _check_local_staging(cache_dir) _offline_to_online_id_mapping: dict[str, str] = {} - for obj in _assemble_objects(_locally_staged): - _current_id = obj._identifier + for _file_path, obj in _assemble_objects(_locally_staged): + if not (_current_id := obj._identifier): + raise RuntimeError( + f"Object of type '{obj.__class__.__name__}' has no identifier" + ) try: obj.commit() - _logger.info(f"Created {obj.__class__.__name__} '{obj.id}'") + _new_id = obj.id except RuntimeError as e: if "status 409" in e.args[0]: continue - else: - raise e - _offline_to_online_id_mapping[_current_id] = obj.id + raise e + if not _new_id: + raise RuntimeError( + f"Object of type '{obj.__class__.__name__}' has no identifier" + ) + _logger.info(f"Created {obj.__class__.__name__} '{_new_id}'") + _file_path.unlink(missing_ok=True) + _offline_to_online_id_mapping[_current_id] = _new_id From 650b2822538abe8d3ffbbed431c363f68201d4f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 24 Jan 2025 09:06:10 +0000 Subject: [PATCH 097/163] Add further tweaks for server changes and test fixes --- simvue/api/objects/alert/base.py | 4 +- simvue/api/objects/alert/metrics.py | 2 +- simvue/api/objects/artifact.py | 39 ++++++------ simvue/api/objects/storage/base.py | 6 +- simvue/api/objects/storage/fetch.py | 14 +++-- simvue/api/objects/storage/file.py | 2 +- simvue/api/objects/storage/s3.py | 2 +- simvue/api/request.py | 2 +- simvue/client.py | 61 +++++++++---------- simvue/run.py | 2 +- tests/functional/test_client.py | 1 - tests/functional/test_run_artifact_upload.py | 2 +- tests/functional/test_run_class.py | 21 ++++--- tests/unit/test_artifact.py | 2 +- .../unit/test_matplotlib_figure_mime_type.py | 2 +- tests/unit/test_numpy_array_mime_type.py | 2 +- tests/unit/test_numpy_array_serialization.py | 2 +- tests/unit/test_pandas_dataframe_mimetype.py | 2 +- .../test_pandas_dataframe_serialization.py | 2 +- tests/unit/test_pickle_serialization.py | 2 +- tests/unit/test_plotly_figure_mime_type.py | 2 +- tests/unit/test_pytorch_tensor_mime_type.py | 2 +- .../unit/test_pytorch_tensor_serialization.py | 2 +- tests/unit/test_run_init_folder.py | 2 +- tests/unit/test_run_init_tags.py | 2 +- tests/unit/test_suppress_errors.py | 6 +- 26 files changed, 93 insertions(+), 95 deletions(-) diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 0dfcf670..1cbbe34d 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -139,8 +139,8 @@ def set_status(self, run_id: str, status: typing.Literal["ok", "critical"]) -> N def get_status(self, run_id: str) -> typing.Literal["ok", "critical"]: """Retrieve the status of this alert for a given run""" - _url: URL = URL(self._user_config.server.url) / f"runs/{run_id}/{self.id}" - _response = sv_get(url=f"{_url}") + _url: URL = self.url / f"status/{run_id}" + _response = sv_get(url=f"{_url}", headers=self._headers) _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index df17ffa8..3a7344f1 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -66,7 +66,7 @@ def new( description : str | None description for this alert metric : str - the metric to monitor + the metric to monitor, or a globular expression to match multiple metrics notification : "none" | "email" the notification settings for this alert aggregation : "average" | "sum" | "at least one" | "all" diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index ca182f1f..203841e9 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -62,22 +62,19 @@ def new( checksum: str, size: int, storage_id: str | None = None, - file_type: str | None = None, + mime_type: str | None = None, original_path: pathlib.Path | None = None, metadata: dict[str, typing.Any] | None, offline: bool = False, **kwargs, ) -> Self: - _storage = kwargs.pop("storage", None) - _orig_path = original_path or kwargs.pop("originalPath", None) - _file_type = kwargs.pop("type", None) _artifact = Artifact( name=name, checksum=checksum, size=size, - originalPath=f"{_orig_path or ''}", - storage=_storage, - type=_file_type, + original_path=f"{original_path or ''}", + storage=storage_id, + mime_type=mime_type, metadata=metadata, _read_only=False, ) @@ -102,7 +99,7 @@ def new_file( name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], storage_id: str | None, file_path: pydantic.FilePath, - file_type: str | None, + mime_type: str | None, metadata: dict[str, typing.Any] | None, offline: bool = False, ) -> Self: @@ -120,7 +117,7 @@ def new_file( the category of this artifact file_path : pathlib.Path | str path to the file this artifact represents - file_type : str | None + mime_type : str | None the mime type for this file, else this is determined metadata : dict[str, Any] | None supply metadata information for this artifact @@ -128,10 +125,10 @@ def new_file( whether to define this artifact locally, default is False """ - _file_type = file_type or get_mimetype_for_file(file_path) + _mime_type = mime_type or get_mimetype_for_file(file_path) - if _file_type not in get_mimetypes(): - raise ValueError(f"Invalid MIME type '{file_type}' specified") + if _mime_type not in get_mimetypes(): + raise ValueError(f"Invalid MIME type '{mime_type}' specified") _file_size = file_path.stat().st_size _file_orig_path = file_path.expanduser().absolute() @@ -142,7 +139,7 @@ def new_file( storage_id=storage_id, original_path=os.path.expandvars(_file_orig_path), size=_file_size, - file_type=_file_type, + mime_type=_mime_type, checksum=_file_checksum, offline=offline, metadata=metadata, @@ -203,7 +200,7 @@ def new_object( storage_id=storage_id, original_path=None, size=sys.getsizeof(_serialized), - file_type=_data_type, + mime_type=_data_type, checksum=_checksum, metadata=metadata, ) @@ -272,7 +269,7 @@ def _get( self, storage: str | None = None, url: str | None = None, **kwargs ) -> dict[str, typing.Any]: return super()._get( - storage=storage or self._staging.get("server", {}).get("storage"), + storage=storage or self._staging.get("server", {}).get("storage_id"), url=url, **kwargs, ) @@ -290,17 +287,17 @@ def storage_url(self) -> URL | None: @property def original_path(self) -> str: """Retrieve the original path of the file associated with this artifact""" - return self._get_attribute("originalPath") + return self._get_attribute("original_path") @property - def storage(self) -> str | None: - """Retrieve the storage identifier for this artifact""" - return self._get_attribute("storage") + def storage_id(self) -> str | None: + """Retrieve the storage_id identifier for this artifact""" + return self._get_attribute("storage_id") @property - def type(self) -> str: + def mime_type(self) -> str: """Retrieve the MIME type for this artifact""" - return self._get_attribute("type") + return self._get_attribute("mime_type") @property def size(self) -> int: diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py index 437297f5..5609750a 100644 --- a/simvue/api/objects/storage/base.py +++ b/simvue/api/objects/storage/base.py @@ -39,9 +39,9 @@ def name( self._staging["name"] = name @property - def type(self) -> str: - """Retrieve the type of storage""" - return self._get_attribute("type") + def backend(self) -> str: + """Retrieve the backend of storage""" + return self._get_attribute("backend") @property @staging_check diff --git a/simvue/api/objects/storage/fetch.py b/simvue/api/objects/storage/fetch.py index 180d5303..40fe6ae1 100644 --- a/simvue/api/objects/storage/fetch.py +++ b/simvue/api/objects/storage/fetch.py @@ -24,12 +24,12 @@ class Storage: def __new__(cls, identifier: str | None = None, **kwargs): """Retrieve an object representing an storage either locally or on the server by id""" _storage_pre = StorageBase(identifier=identifier, **kwargs) - if _storage_pre.type == "S3": + if _storage_pre.backend == "S3": return S3Storage(identifier=identifier, **kwargs) - elif _storage_pre.type == "File": + elif _storage_pre.backend == "File": return FileStorage(identifier=identifier, **kwargs) - raise RuntimeError(f"Unknown type '{_storage_pre.type}'") + raise RuntimeError(f"Unknown backend '{_storage_pre.backend}'") @classmethod @pydantic.validate_call @@ -57,15 +57,17 @@ def get( for _entry in _json_response: _id = _entry.pop("id") - if _entry["type"] == "S3": + if _entry["backend"] == "S3": yield ( _id, S3Storage(_local=True, _read_only=True, identifier=_id, **_entry), ) - elif _entry["type"] == "File": + elif _entry["backend"] == "File": yield ( _id, FileStorage(_local=True, _read_only=True, identifier=_id, **_entry), ) else: - raise RuntimeError(f"Unrecognised storage type '{_entry['type']}'") + raise RuntimeError( + f"Unrecognised storage backend '{_entry['backend']}'" + ) diff --git a/simvue/api/objects/storage/file.py b/simvue/api/objects/storage/file.py index 24031897..4981132d 100644 --- a/simvue/api/objects/storage/file.py +++ b/simvue/api/objects/storage/file.py @@ -26,7 +26,7 @@ def new( """Create a new file storage object""" _storage = FileStorage( name=name, - type="File", + backend="File", disable_check=disable_check, is_tenant_useable=tenant_usable, is_default=default, diff --git a/simvue/api/objects/storage/s3.py b/simvue/api/objects/storage/s3.py index 9907910e..7347e74b 100644 --- a/simvue/api/objects/storage/s3.py +++ b/simvue/api/objects/storage/s3.py @@ -43,7 +43,7 @@ def new( } _storage = S3Storage( name=name, - type="S3", + backend="S3", config=_config, disable_check=disable_check, tenant_useable=tenant_usable, diff --git a/simvue/api/request.py b/simvue/api/request.py index 999fe0db..fcdd6fcd 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -164,7 +164,7 @@ def put( ) def get( url: str, - headers: dict[str, str], + headers: dict[str, str] | None = None, params: dict[str, str | int | float | None] | None = None, timeout: int = DEFAULT_API_TIMEOUT, json: dict[str, typing.Any] | None = None, diff --git a/simvue/client.py b/simvue/client.py index 47d56360..72530552 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -513,8 +513,8 @@ def get_artifact( _content = b"".join(_artifact.download_content()) - _deserialized_content: typing.Optional[DeserializedContent] = deserialize_data( - _content, _artifact.type, allow_pickle + _deserialized_content: DeserializedContent | None = deserialize_data( + _content, _artifact.mime_type, allow_pickle ) # Numpy array return means just 'if content' will be ambiguous @@ -527,7 +527,7 @@ def get_artifact_as_file( self, run_id: str, name: str, - output_dir: typing.Optional[pydantic.DirectoryPath] = None, + output_dir: pydantic.DirectoryPath | None = None, ) -> None: """Retrieve the specified artifact in the form of a file @@ -645,7 +645,7 @@ def get_folder( def get_folders( self, *, - filters: typing.Optional[list[str]] = None, + filters: list[str] | None = None, count: pydantic.PositiveInt = 100, start_index: pydantic.NonNegativeInt = 0, ) -> typing.Generator[tuple[str, Folder], None, None]: @@ -705,7 +705,7 @@ def _get_run_metrics_from_server( run_ids: list[str], xaxis: str, aggregate: bool, - max_points: typing.Optional[int] = None, + max_points: int | None = None, ) -> dict[str, typing.Any]: params: dict[str, typing.Union[str, int, None]] = { "runs": json.dumps(run_ids), @@ -735,11 +735,11 @@ def get_metric_values( xaxis: typing.Literal["step", "time", "timestamp"], *, output_format: typing.Literal["dataframe", "dict"] = "dict", - run_ids: typing.Optional[list[str]] = None, - run_filters: typing.Optional[list[str]] = None, + run_ids: list[str] | None = None, + run_filters: list[str] | None = None, use_run_names: bool = False, aggregate: bool = False, - max_points: typing.Optional[pydantic.PositiveInt] = None, + max_points: pydantic.PositiveInt | None = None, ) -> typing.Union[dict, DataFrame, None]: """Retrieve the values for a given metric across multiple runs @@ -794,31 +794,30 @@ def get_metric_values( _run_data = dict(Run.get(**_args)) - if _run_metrics := self._get_run_metrics_from_server( - metric_names=metric_names, - run_ids=run_ids or list(_run_data.keys()), - xaxis=xaxis, - aggregate=aggregate, - max_points=max_points, + if not ( + _run_metrics := self._get_run_metrics_from_server( + metric_names=metric_names, + run_ids=run_ids or list(_run_data.keys()), + xaxis=xaxis, + aggregate=aggregate, + max_points=max_points, + ) ): - if aggregate: - return aggregated_metrics_to_dataframe( - _run_metrics, xaxis=xaxis, parse_to=output_format - ) - else: - if use_run_names: - _run_metrics = { - _run_data[key].name: _run_metrics[key] - for key in _run_metrics.keys() - } - return parse_run_set_metrics( - _run_metrics, - xaxis=xaxis, - run_labels=list(_run_data.keys()), - parse_to=output_format, - ) - else: return None + if aggregate: + return aggregated_metrics_to_dataframe( + _run_metrics, xaxis=xaxis, parse_to=output_format + ) + if use_run_names: + _run_metrics = { + _run_data[key].name: _run_metrics[key] for key in _run_metrics.keys() + } + return parse_run_set_metrics( + _run_metrics, + xaxis=xaxis, + run_labels=list(_run_data.keys()), + parse_to=output_format, + ) @check_extra("plot") @prettify_pydantic diff --git a/simvue/run.py b/simvue/run.py index f257648a..87a91dae 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1359,7 +1359,7 @@ def save_file( storage_id=self._storage_id, file_path=file_path, offline=self._user_config.run.mode == "offline", - file_type=filetype, + mime_type=filetype, metadata=metadata, ) _artifact.attach_to_run(self.id, category) diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index 696f97e9..8e62aef3 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -1,5 +1,4 @@ from logging import critical -from numpy import tri import pytest import uuid import random diff --git a/tests/functional/test_run_artifact_upload.py b/tests/functional/test_run_artifact_upload.py index 9d571dbd..f19c47c4 100644 --- a/tests/functional/test_run_artifact_upload.py +++ b/tests/functional/test_run_artifact_upload.py @@ -28,7 +28,7 @@ def test_add_artifact_to_run() -> None: name=f"test_{_uuid}", storage_id=None, file_path=pathlib.Path(tempf.name), - file_type=None, + mime_type=None, metadata=None ) _artifact.attach_to_run(_run.id, "input") diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 304fbad9..c001c3a6 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -47,15 +47,16 @@ def test_check_run_initialised_decorator() -> None: assert "Simvue Run must be initialised" in str(e.value) -# @pytest.mark.run -# def test_run_with_emissions() -> None: -# with sv_run.Run() as run_created: -# run_created.init(retention_period="1 min") -# run_created.config(enable_emission_metrics=True, emission_metrics_interval=1) -# time.sleep(5) -# _run = RunObject(identifier=run_created.id) -# import pdb; pdb.set_trace() -# assert list(_run.metrics) +@pytest.mark.run +@pytest.mark.codecarbon +def test_run_with_emissions() -> None: + with sv_run.Run() as run_created: + run_created.init(retention_period="1 min") + run_created.config(enable_emission_metrics=True, emission_metrics_interval=1) + time.sleep(5) + _run = RunObject(identifier=run_created.id) + import pdb; pdb.set_trace() + assert list(_run.metrics) @pytest.mark.run @@ -731,7 +732,7 @@ def testing_exit(status: int) -> None: time.sleep(2) run.log_alert(alert_id, "critical") _alert = Alert(identifier=alert_id) - assert _alert.state == "critical" + assert _alert.get_status(run.id) counter = 0 while run._status != "terminated" and counter < 15: time.sleep(1) diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index 3e86089d..a2aa680b 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -29,7 +29,7 @@ def test_artifact_creation_online() -> None: name=f"test_artifact_{_uuid}", file_path=_path, storage_id=None, - file_type=None, + mime_type=None, metadata=None ) _artifact.attach_to_run(_run.id, "input") diff --git a/tests/unit/test_matplotlib_figure_mime_type.py b/tests/unit/test_matplotlib_figure_mime_type.py index 63ce5d5c..d984f676 100644 --- a/tests/unit/test_matplotlib_figure_mime_type.py +++ b/tests/unit/test_matplotlib_figure_mime_type.py @@ -9,7 +9,7 @@ @pytest.mark.skipif(not plt, reason="Matplotlib is not installed") @pytest.mark.local -def test_matplotlib_figure_mime_type(): +def test_matplotlib_figure_mime_type() -> None: """ Check that a matplotlib figure has the correct mime-type """ diff --git a/tests/unit/test_numpy_array_mime_type.py b/tests/unit/test_numpy_array_mime_type.py index bcbd6e52..01c47295 100644 --- a/tests/unit/test_numpy_array_mime_type.py +++ b/tests/unit/test_numpy_array_mime_type.py @@ -3,7 +3,7 @@ import pytest @pytest.mark.local -def test_numpy_array_mime_type(): +def test_numpy_array_mime_type() -> None: """ Check that the mimetype for numpy arrays is correct """ diff --git a/tests/unit/test_numpy_array_serialization.py b/tests/unit/test_numpy_array_serialization.py index ede5ccae..52d6e6d0 100644 --- a/tests/unit/test_numpy_array_serialization.py +++ b/tests/unit/test_numpy_array_serialization.py @@ -3,7 +3,7 @@ import pytest @pytest.mark.local -def test_numpy_array_serialization(): +def test_numpy_array_serialization() -> None: """ Check that a numpy array can be serialized then deserialized successfully """ diff --git a/tests/unit/test_pandas_dataframe_mimetype.py b/tests/unit/test_pandas_dataframe_mimetype.py index ac645b48..1d3de890 100644 --- a/tests/unit/test_pandas_dataframe_mimetype.py +++ b/tests/unit/test_pandas_dataframe_mimetype.py @@ -9,7 +9,7 @@ @pytest.mark.local @pytest.mark.skipif(not pd, reason="Pandas is not installed") -def test_pandas_dataframe_mimetype(): +def test_pandas_dataframe_mimetype() -> None: """ Check that the mime-type of a Pandas dataframe is correct """ diff --git a/tests/unit/test_pandas_dataframe_serialization.py b/tests/unit/test_pandas_dataframe_serialization.py index 3dea46b0..79c524e1 100644 --- a/tests/unit/test_pandas_dataframe_serialization.py +++ b/tests/unit/test_pandas_dataframe_serialization.py @@ -9,7 +9,7 @@ @pytest.mark.local @pytest.mark.skipif(not pd, reason="Pandas is not installed") -def test_pandas_dataframe_serialization(): +def test_pandas_dataframe_serialization() -> None: """ Check that a Pandas dataframe can be serialized then deserialized successfully """ diff --git a/tests/unit/test_pickle_serialization.py b/tests/unit/test_pickle_serialization.py index c3191c58..12e89ae8 100644 --- a/tests/unit/test_pickle_serialization.py +++ b/tests/unit/test_pickle_serialization.py @@ -1,7 +1,7 @@ from simvue.serialization import deserialize_data, serialize_object import pytest @pytest.mark.local -def test_pickle_serialization(): +def test_pickle_serialization() -> None: """ Check that a dictionary can be serialized then deserialized successfully """ diff --git a/tests/unit/test_plotly_figure_mime_type.py b/tests/unit/test_plotly_figure_mime_type.py index f520f2dc..6884a440 100644 --- a/tests/unit/test_plotly_figure_mime_type.py +++ b/tests/unit/test_plotly_figure_mime_type.py @@ -16,7 +16,7 @@ @pytest.mark.local @pytest.mark.skipif(not plt, reason="Matplotlib is not installed") @pytest.mark.skipif(not plotly, reason="Plotly is not installed") -def test_plotly_figure_mime_type(): +def test_plotly_figure_mime_type() -> None: """ Check that a plotly figure has the correct mime-type """ diff --git a/tests/unit/test_pytorch_tensor_mime_type.py b/tests/unit/test_pytorch_tensor_mime_type.py index 38aef668..35391850 100644 --- a/tests/unit/test_pytorch_tensor_mime_type.py +++ b/tests/unit/test_pytorch_tensor_mime_type.py @@ -9,7 +9,7 @@ @pytest.mark.local @pytest.mark.skipif(not torch, reason="Torch is not installed") -def test_pytorch_tensor_mime_type(): +def test_pytorch_tensor_mime_type() -> None: """ Check that a PyTorch tensor has the correct mime-type """ diff --git a/tests/unit/test_pytorch_tensor_serialization.py b/tests/unit/test_pytorch_tensor_serialization.py index 26022259..18a36e1b 100644 --- a/tests/unit/test_pytorch_tensor_serialization.py +++ b/tests/unit/test_pytorch_tensor_serialization.py @@ -8,7 +8,7 @@ @pytest.mark.local @pytest.mark.skipif(not torch, reason="Torch is not installed") -def test_pytorch_tensor_serialization(): +def test_pytorch_tensor_serialization() -> None: """ Check that a PyTorch tensor can be serialized then deserialized successfully """ diff --git a/tests/unit/test_run_init_folder.py b/tests/unit/test_run_init_folder.py index 0ca2c383..0bcd5b0c 100644 --- a/tests/unit/test_run_init_folder.py +++ b/tests/unit/test_run_init_folder.py @@ -2,7 +2,7 @@ import pytest @pytest.mark.local -def test_run_init_folder(): +def test_run_init_folder() -> None: """ Check that run.init throws an exception if folder input is not specified correctly """ diff --git a/tests/unit/test_run_init_tags.py b/tests/unit/test_run_init_tags.py index 7697250a..377594fe 100644 --- a/tests/unit/test_run_init_tags.py +++ b/tests/unit/test_run_init_tags.py @@ -2,7 +2,7 @@ import pytest @pytest.mark.local -def test_run_init_tags(): +def test_run_init_tags() -> None: """ Check that run.init throws an exception if tags are not a list """ diff --git a/tests/unit/test_suppress_errors.py b/tests/unit/test_suppress_errors.py index 5a5f9e6d..73c114dc 100644 --- a/tests/unit/test_suppress_errors.py +++ b/tests/unit/test_suppress_errors.py @@ -3,7 +3,7 @@ import logging @pytest.mark.local -def test_suppress_errors_false(): +def test_suppress_errors_false() -> None: """ Check that exceptions are thrown if suppress_errors disabled """ @@ -17,7 +17,7 @@ def test_suppress_errors_false(): assert "Input should be a valid boolean, unable to interpret input" in f"{e.value}" @pytest.mark.local -def test_suppress_errors_true(caplog): +def test_suppress_errors_true(caplog) -> None: """ Check that no exceptions are thrown and messages are added to log if suppress_errors enabled """ @@ -33,7 +33,7 @@ def test_suppress_errors_true(caplog): assert "Input should be a valid boolean, unable to interpret input" in caplog.text @pytest.mark.local -def test_suppress_errors_default(caplog): +def test_suppress_errors_default(caplog) -> None: """ Check that by default no exceptions are thrown and messages are added to log """ From f190dda0ebb1b260a4d8a4fae7ad3ab9b904a138 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 24 Jan 2025 09:12:33 +0000 Subject: [PATCH 098/163] Run sourcery linting/fixing --- simvue/api/objects/alert/metrics.py | 5 +---- simvue/api/objects/run.py | 9 ++------- simvue/system.py | 3 +-- simvue/utilities.py | 15 +++++---------- 4 files changed, 9 insertions(+), 23 deletions(-) diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index 3a7344f1..73ffb8c8 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -115,10 +115,7 @@ def __init__(self, identifier: str | None = None, **kwargs) -> None: super().__init__(identifier, **kwargs) def compare(self, other: "MetricsRangeAlert") -> bool: - if not super().compare(other): - return False - - return self.alert.compare(other) + return self.alert.compare(other) if super().compare(other) else False @classmethod @pydantic.validate_call diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 6d1aaa00..69da573c 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -1,6 +1,5 @@ import http import typing -import msgpack import pydantic import datetime @@ -14,11 +13,9 @@ get as sv_get, put as sv_put, get_json_from_response, - post as sv_post, ) -from simvue.api.objects.events import Events from simvue.api.url import URL -from simvue.models import FOLDER_REGEX, NAME_REGEX, DATETIME_FORMAT, EventSet, MetricSet +from simvue.models import FOLDER_REGEX, NAME_REGEX, DATETIME_FORMAT Status = typing.Literal[ "lost", "failed", "completed", "terminated", "running", "created" @@ -274,9 +271,7 @@ def send_heartbeat(self) -> dict[str, typing.Any] | None: @property def _abort_url(self) -> URL | None: - if not self.url: - return None - return self.url / "abort" + return self.url / "abort" if self.url else None @property def _artifact_url(self) -> URL | None: diff --git a/simvue/system.py b/simvue/system.py index 7ea86eb9..84ce016b 100644 --- a/simvue/system.py +++ b/simvue/system.py @@ -67,8 +67,7 @@ def get_system() -> dict[str, typing.Any]: cpu = get_cpu_info() gpu = get_gpu_info() - system: dict[str, typing.Any] = {} - system["cwd"] = os.getcwd() + system: dict[str, typing.Any] = {"cwd": os.getcwd()} system["hostname"] = socket.gethostname() system["pythonversion"] = ( f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}" diff --git a/simvue/utilities.py b/simvue/utilities.py index f7b966aa..4ca7364f 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -95,12 +95,10 @@ def parse_validation_response( obj_type: str = issue["type"] location: list[str] = issue["loc"] location.remove("body") - location_addr: str = "" - for i, loc in enumerate(location): - if isinstance(loc, int): - location_addr += f"[{loc}]" - else: - location_addr += f"{'.' if i > 0 else ''}{loc}" + location_addr: str = "".join( + (f"[{loc}]" if isinstance(loc, int) else f"{'.' if i > 0 else ''}{loc}") + for i, loc in enumerate(location) + ) headers = ["Type", "Location", "Message"] information = [obj_type, location_addr] @@ -110,10 +108,7 @@ def parse_validation_response( input_arg = body for loc in location: try: - if obj_type == "missing": - input_arg = None - else: - input_arg = input_arg[loc] + input_arg = None if obj_type == "missing" else input_arg[loc] except TypeError: break information.append(input_arg) From 588d06399f42ba0ff8f50dbd628f787c9da0ab96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 24 Jan 2025 09:37:38 +0000 Subject: [PATCH 099/163] Modernise typing --- examples/Geant4/geant4_simvue.py | 2 +- simvue/api/objects/folder.py | 6 ++-- simvue/api/objects/run.py | 2 +- simvue/api/objects/storage/base.py | 2 +- simvue/api/objects/storage/s3.py | 2 +- simvue/api/request.py | 6 ++-- simvue/client.py | 48 ++++++++++++++--------------- simvue/config/parameters.py | 10 +++--- simvue/config/user.py | 16 +++------- simvue/converters.py | 16 ++++------ simvue/executor.py | 24 +++++++-------- simvue/factory/proxy/__init__.py | 2 +- simvue/factory/proxy/base.py | 43 +++++++++----------------- simvue/factory/proxy/offline.py | 32 ++++++++----------- simvue/factory/proxy/remote.py | 49 ++++++++---------------------- simvue/run.py | 34 ++++++++------------- simvue/serialization.py | 12 ++++---- simvue/utilities.py | 18 +++++------ tests/functional/test_run_class.py | 6 ++-- 19 files changed, 134 insertions(+), 196 deletions(-) diff --git a/examples/Geant4/geant4_simvue.py b/examples/Geant4/geant4_simvue.py index 3d3f9dfb..2a864e0f 100644 --- a/examples/Geant4/geant4_simvue.py +++ b/examples/Geant4/geant4_simvue.py @@ -27,7 +27,7 @@ @click.option("--momentum", type=float, default=10) @click.option("--events", type=int, default=100) def geant4_simvue_example( - g4_binary: str, config: typing.Optional[str], ci: bool, momentum: float, events: int + g4_binary: str, config: str | None, ci: bool, momentum: float, events: int ) -> None: @mp_file_parse.file_parser def root_file_parser( diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index f3a04276..cae9353d 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -30,7 +30,7 @@ class Folder(SimvueObject): """ - def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + def __init__(self, identifier: str | None = None, **kwargs) -> None: """Initialise a Folder If an identifier is provided a connection will be made to the @@ -82,7 +82,7 @@ def path(self) -> pathlib.Path: @property @staging_check - def description(self) -> typing.Optional[str]: + def description(self) -> str | None: """Return the folder description""" return self._get().get("description") @@ -95,7 +95,7 @@ def description(self, description: str) -> None: @property @staging_check - def name(self) -> typing.Optional[str]: + def name(self) -> str | None: """Return the folder name""" return self._get().get("name") diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 69da573c..a2cd68bb 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -25,7 +25,7 @@ class Run(SimvueObject): - def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + def __init__(self, identifier: str | None = None, **kwargs) -> None: """Initialise a Run If an identifier is provided a connection will be made to the diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py index 5609750a..4ed3c59a 100644 --- a/simvue/api/objects/storage/base.py +++ b/simvue/api/objects/storage/base.py @@ -10,7 +10,7 @@ class StorageBase(SimvueObject): def __init__( self, - identifier: typing.Optional[str] = None, + identifier: str | None = None, _read_only: bool = False, **kwargs, ) -> None: diff --git a/simvue/api/objects/storage/s3.py b/simvue/api/objects/storage/s3.py index 7347e74b..435b5dfc 100644 --- a/simvue/api/objects/storage/s3.py +++ b/simvue/api/objects/storage/s3.py @@ -13,7 +13,7 @@ class S3Storage(StorageBase): - def __init__(self, identifier: typing.Optional[str] = None, **kwargs) -> None: + def __init__(self, identifier: str | None = None, **kwargs) -> None: self.config = Config(self) super().__init__(identifier, **kwargs) diff --git a/simvue/api/request.py b/simvue/api/request.py index 45e0fd1c..3ebdb86b 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -88,7 +88,7 @@ def post( """ if is_json: - data_sent: typing.Union[str, dict[str, typing.Any]] = json_module.dumps(data) + data_sent: str | dict[str, typing.Any] = json_module.dumps(data) headers = set_json_header(headers) else: data_sent = data @@ -144,7 +144,7 @@ def put( response from executing PUT """ if is_json and data: - data_sent: typing.Union[str, dict[str, typing.Any]] = json_module.dumps(data) + data_sent: str | dict[str, typing.Any] = json_module.dumps(data) headers = set_json_header(headers) else: data_sent = data @@ -231,7 +231,7 @@ def get_json_from_response( response: requests.Response, allow_parse_failure: bool = False, expected_type: typing.Type[dict | list] = dict, -) -> typing.Union[dict, list]: +) -> dict | list: try: json_response = response.json() json_response = json_response or ({} if expected_type is dict else []) diff --git a/simvue/client.py b/simvue/client.py index 72530552..2417627f 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -64,8 +64,8 @@ class Client: def __init__( self, *, - server_token: typing.Optional[pydantic.SecretStr] = None, - server_url: typing.Optional[str] = None, + server_token: pydantic.SecretStr | None = None, + server_url: str | None = None, ) -> None: """Initialise an instance of the Simvue client @@ -136,7 +136,7 @@ def get_run_id_from_name( @prettify_pydantic @pydantic.validate_call - def get_run(self, run_id: str) -> typing.Optional[Run]: + def get_run(self, run_id: str) -> Run | None: """Retrieve a single run Parameters @@ -177,7 +177,7 @@ def get_run_name_from_id(self, run_id: str) -> str: @pydantic.validate_call def get_runs( self, - filters: typing.Optional[list[str]], + filters: list[str] | None, *, system: bool = False, metrics: bool = False, @@ -187,7 +187,7 @@ def get_runs( count_limit: pydantic.PositiveInt | None = 100, start_index: pydantic.NonNegativeInt = 0, show_shared: bool = False, - ) -> typing.Union[DataFrame, typing.Generator[tuple[str, Run], None, None], None]: + ) -> DataFrame | typing.Generator[tuple[str, Run], None, None] | None: """Retrieve all runs matching filters. Parameters @@ -216,7 +216,7 @@ def get_runs( Returns ------- - dict | pandas.DataFrame + pandas.DataFrame | Generator[tuple[str, Run], None, None] either the JSON response from the runs request or the results in the form of a Pandas DataFrame @@ -280,7 +280,7 @@ def get_runs( @prettify_pydantic @pydantic.validate_call - def delete_run(self, run_id: str) -> typing.Optional[dict]: + def delete_run(self, run_id: str) -> dict | None: """Delete run by identifier Parameters @@ -300,7 +300,7 @@ def delete_run(self, run_id: str) -> typing.Optional[dict]: """ return Run(identifier=run_id).delete() or None - def _get_folder_from_path(self, path: str) -> typing.Optional[Folder]: + def _get_folder_from_path(self, path: str) -> Folder | None: """Retrieve folder for the specified path if found Parameters @@ -321,7 +321,7 @@ def _get_folder_from_path(self, path: str) -> typing.Optional[Folder]: except StopIteration: return None - def _get_folder_id_from_path(self, path: str) -> typing.Optional[str]: + def _get_folder_id_from_path(self, path: str) -> str | None: """Retrieve folder identifier for the specified path if found Parameters @@ -342,7 +342,7 @@ def _get_folder_id_from_path(self, path: str) -> typing.Optional[str]: @pydantic.validate_call def delete_runs( self, folder_path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)] - ) -> typing.Optional[list]: + ) -> list | None: """Delete runs in a named folder Parameters @@ -374,7 +374,7 @@ def delete_folder( recursive: bool = False, remove_runs: bool = False, allow_missing: bool = False, - ) -> typing.Optional[list]: + ) -> list | None: """Delete a folder by name Parameters @@ -460,7 +460,7 @@ def _retrieve_artifacts_from_server( @prettify_pydantic @pydantic.validate_call - def abort_run(self, run_id: str, reason: str) -> typing.Union[dict, list]: + def abort_run(self, run_id: str, reason: str) -> dict | list: """Abort a currently active run on the server Parameters @@ -707,7 +707,7 @@ def _get_run_metrics_from_server( aggregate: bool, max_points: int | None = None, ) -> dict[str, typing.Any]: - params: dict[str, typing.Union[str, int, None]] = { + params: dict[str, str | int | None] = { "runs": json.dumps(run_ids), "aggregate": aggregate, "metrics": json.dumps(metric_names), @@ -740,7 +740,7 @@ def get_metric_values( use_run_names: bool = False, aggregate: bool = False, max_points: pydantic.PositiveInt | None = None, - ) -> typing.Union[dict, DataFrame, None]: + ) -> dict | DataFrame | None: """Retrieve the values for a given metric across multiple runs Uses filters to specify which runs should be retrieved. @@ -828,7 +828,7 @@ def plot_metrics( metric_names: list[str], xaxis: typing.Literal["step", "time"], *, - max_points: typing.Optional[int] = None, + max_points: int | None = None, ) -> typing.Any: """Plt the time series values for multiple metrics/runs @@ -910,9 +910,9 @@ def get_events( self, run_id: str, *, - message_contains: typing.Optional[str] = None, - start_index: typing.Optional[pydantic.NonNegativeInt] = None, - count_limit: typing.Optional[pydantic.PositiveInt] = None, + message_contains: str | None = None, + start_index: pydantic.NonNegativeInt | None = None, + count_limit: pydantic.PositiveInt | None = None, ) -> list[dict[str, str]]: """Return events for a specified run @@ -944,7 +944,7 @@ def get_events( else "" ) - params: dict[str, typing.Union[str, int]] = { + params: dict[str, str | int] = { "run": run_id, "filters": msg_filter, "start": start_index or 0, @@ -970,11 +970,11 @@ def get_events( def get_alerts( self, *, - run_id: typing.Optional[str] = None, + run_id: str | None = None, critical_only: bool = True, names_only: bool = True, - start_index: typing.Optional[pydantic.NonNegativeInt] = None, - count_limit: typing.Optional[pydantic.PositiveInt] = None, + start_index: pydantic.NonNegativeInt | None = None, + count_limit: pydantic.PositiveInt | None = None, ) -> list[AlertBase] | list[str | None]: """Retrieve alerts for a given run @@ -1018,8 +1018,8 @@ def get_alerts( def get_tags( self, *, - start_index: typing.Optional[pydantic.NonNegativeInt] = None, - count_limit: typing.Optional[pydantic.PositiveInt] = None, + start_index: pydantic.NonNegativeInt | None = None, + count_limit: pydantic.PositiveInt | None = None, ) -> typing.Generator[Tag, None, None]: """Retrieve tags diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index a13d865f..472811f6 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -43,7 +43,7 @@ def check_token(cls, v: typing.Any) -> str: class OfflineSpecifications(pydantic.BaseModel): - cache: typing.Optional[pathlib.Path] = None + cache: pathlib.Path | None = None class MetricsSpecifications(pydantic.BaseModel): @@ -53,11 +53,11 @@ class MetricsSpecifications(pydantic.BaseModel): class DefaultRunSpecifications(pydantic.BaseModel): - name: typing.Optional[str] = None - description: typing.Optional[str] = None - tags: typing.Optional[list[str]] = None + name: str | None = None + description: str | None = None + tags: list[str] | None = None folder: str = pydantic.Field("/", pattern=sv_models.FOLDER_REGEX) - metadata: typing.Optional[dict[str, typing.Union[str, int, float, bool]]] = None + metadata: dict[str, str | int | float | bool] | None = None mode: typing.Literal["offline", "disabled", "online"] = "online" diff --git a/simvue/config/user.py b/simvue/config/user.py index 4c704457..d53ce79d 100644 --- a/simvue/config/user.py +++ b/simvue/config/user.py @@ -36,12 +36,8 @@ logger = logging.getLogger(__name__) -SIMVUE_SERVER_UPPER_CONSTRAINT: typing.Optional[semver.Version] = semver.Version.parse( - "2.0.0" -) -SIMVUE_SERVER_LOWER_CONSTRAINT: typing.Optional[semver.Version] = semver.Version.parse( - "1.0.0" -) +SIMVUE_SERVER_UPPER_CONSTRAINT: semver.Version | None = semver.Version.parse("2.0.0") +SIMVUE_SERVER_LOWER_CONSTRAINT: semver.Version | None = semver.Version.parse("1.0.0") class SimvueConfiguration(pydantic.BaseModel): @@ -56,7 +52,7 @@ class SimvueConfiguration(pydantic.BaseModel): metrics: MetricsSpecifications = MetricsSpecifications() @classmethod - def _load_pyproject_configs(cls) -> typing.Optional[dict]: + def _load_pyproject_configs(cls) -> dict | None: """Recover any Simvue non-authentication configurations from pyproject.toml""" _pyproject_toml = sv_util.find_first_instance_of_file( file_names=["pyproject.toml"], check_user_space=False @@ -227,10 +223,8 @@ def fetch( @functools.lru_cache def config_file(cls) -> pathlib.Path: """Returns the path of top level configuration file used for the session""" - _config_file: typing.Optional[pathlib.Path] = ( - sv_util.find_first_instance_of_file( - CONFIG_FILE_NAMES, check_user_space=True - ) + _config_file: pathlib.Path | None = sv_util.find_first_instance_of_file( + CONFIG_FILE_NAMES, check_user_space=True ) # NOTE: Legacy INI support has been removed diff --git a/simvue/converters.py b/simvue/converters.py index 59df29e3..02d1dbcb 100644 --- a/simvue/converters.py +++ b/simvue/converters.py @@ -19,9 +19,7 @@ def aggregated_metrics_to_dataframe( request_response_data: dict[str, list[dict[str, float]]], xaxis: str, parse_to: typing.Literal["dict", "dataframe"] = "dict", -) -> typing.Union[ - "DataFrame", dict[str, dict[tuple[float, str], typing.Optional[float]]] -]: +) -> typing.Union["DataFrame", dict[str, dict[tuple[float, str], float]] | None]: """Create data frame for an aggregate of metrics Returns a dataframe with columns being metrics and sub-columns being the @@ -58,7 +56,7 @@ def aggregated_metrics_to_dataframe( _value_types = list(_value_types) _value_types.remove(xaxis) - result_dict: dict[str, dict[tuple[float, str], typing.Optional[float]]] = { + result_dict: dict[str, dict[tuple[float, str], float]] | None = { metric_name: {} for metric_name in request_response_data } @@ -91,9 +89,7 @@ def parse_run_set_metrics( xaxis: str, run_labels: list[str], parse_to: typing.Literal["dict", "dataframe"] = "dict", -) -> typing.Union[ - dict[str, dict[tuple[float, str], typing.Optional[float]]], "DataFrame" -]: +) -> typing.Union[dict[str, dict[tuple[float, str], float]] | None, "DataFrame"]: """Parse JSON response metric data from the server into the specified form Creates either a dictionary or a pandas dataframe of the data collected @@ -112,7 +108,7 @@ def parse_run_set_metrics( Returns ------- - dict[str, dict[tuple[float, str], typing.Optional[float]]] | DataFrame + dict[str, dict[tuple[float, str], float]] | None | DataFrame either a dictionary or Pandas DataFrame containing the results Raises @@ -144,7 +140,7 @@ def parse_run_set_metrics( _value_types = list(_value_types) _value_types.remove(xaxis) - result_dict: dict[str, dict[tuple[float, str], typing.Optional[float]]] = { + result_dict: dict[str, dict[tuple[float, str], float]] | None = { metric_name: {} for metric_name in _all_metrics } @@ -224,7 +220,7 @@ def to_dataframe(data) -> pandas.DataFrame: def metric_time_series_to_dataframe( data: list[dict[str, float]], xaxis: typing.Literal["step", "time", "timestamp"], - name: typing.Optional[str] = None, + name: str | None = None, ) -> "DataFrame": """Convert a single metric value set from a run into a dataframe diff --git a/simvue/executor.py b/simvue/executor.py index 6d411e4f..a6bc7c4f 100644 --- a/simvue/executor.py +++ b/simvue/executor.py @@ -105,23 +105,23 @@ def __init__(self, simvue_runner: "simvue.Run", keep_logs: bool = True) -> None: """ self._runner = simvue_runner self._keep_logs = keep_logs - self._completion_callbacks: dict[str, typing.Optional[CompletionCallback]] = {} + self._completion_callbacks: dict[str, CompletionCallback] | None = {} self._completion_triggers: dict[ - str, typing.Optional[multiprocessing.synchronize.Event] + str, multiprocessing.synchronize.Event | None ] = {} - self._completion_processes: dict[str, typing.Optional[threading.Thread]] = {} + self._completion_processes: dict[str, threading.Thread] | None = {} self._alert_ids: dict[str, str] = {} self.command_str: dict[str, str] = {} self._processes: dict[str, subprocess.Popen] = {} - def std_out(self, process_id: str) -> typing.Optional[str]: + def std_out(self, process_id: str) -> str | None: if not os.path.exists(out_file := f"{self._runner.name}_{process_id}.out"): return None with open(out_file) as out: return out.read() or None - def std_err(self, process_id: str) -> typing.Optional[str]: + def std_err(self, process_id: str) -> str | None: if not os.path.exists(err_file := f"{self._runner.name}_{process_id}.err"): return None @@ -185,7 +185,7 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: you should provide it as such and perform the upload manually, by default None env : dict[str, str], optional environment variables for process - cwd: typing.Optional[pathlib.Path], optional + cwd: pathlib.Path | None, optional working directory to execute the process within completion_callback : typing.Callable | None, optional callback to run when process terminates @@ -209,7 +209,7 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: if input_file: self._runner.save_file(file_path=input_file, category="input") - command: typing.List[str] = [] + command: list[str] = [] if executable: command += [f"{executable}"] @@ -299,7 +299,7 @@ def exit_status(self) -> int: return 0 - def get_error_summary(self) -> dict[str, typing.Optional[str]]: + def get_error_summary(self) -> dict[str, str] | None: """Returns the summary messages of all errors""" return { identifier: self._get_error_status(identifier) @@ -324,8 +324,8 @@ def get_command(self, process_id: str) -> str: raise KeyError(f"Failed to retrieve '{process_id}', no such process") return self.command_str[process_id] - def _get_error_status(self, process_id: str) -> typing.Optional[str]: - err_msg: typing.Optional[str] = None + def _get_error_status(self, process_id: str) -> str | None: + err_msg: str | None = None # Return last 10 lines of stdout if stderr empty if not (err_msg := self.std_err(process_id)) and ( @@ -374,7 +374,7 @@ def _save_output(self) -> None: ) def kill_process( - self, process_id: typing.Union[int, str], kill_children_only: bool = False + self, process_id: int | str, kill_children_only: bool = False ) -> None: """Kill a running process by ID @@ -383,7 +383,7 @@ def kill_process( Parameters ---------- - process_id : typing.Union[int, str] + process_id : int | str either the identifier for a client created process or the PID of an external process kill_children_only : bool, optional diff --git a/simvue/factory/proxy/__init__.py b/simvue/factory/proxy/__init__.py index ce92e57e..dbcf0cba 100644 --- a/simvue/factory/proxy/__init__.py +++ b/simvue/factory/proxy/__init__.py @@ -16,7 +16,7 @@ def Simvue( - name: typing.Optional[str], + name: str | None, uniq_id: str, mode: str, config: "SimvueConfiguration", diff --git a/simvue/factory/proxy/base.py b/simvue/factory/proxy/base.py index 2dc3c13d..6f35e691 100644 --- a/simvue/factory/proxy/base.py +++ b/simvue/factory/proxy/base.py @@ -7,15 +7,15 @@ class SimvueBaseClass(abc.ABC): @abc.abstractmethod def __init__( self, - name: typing.Optional[str], + name: str | None, uniq_id: str, suppress_errors: bool, ) -> None: self._logger = logging.getLogger(f"simvue.{self.__class__.__name__}") self._suppress_errors: bool = suppress_errors self._uuid: str = uniq_id - self._name: typing.Optional[str] = name - self._id: typing.Optional[int] = None + self._name: str | None = name + self._id: int | None = None self._aborted: bool = False def _error(self, message: str) -> None: @@ -24,46 +24,37 @@ def _error(self, message: str) -> None: """ if not self._suppress_errors: raise RuntimeError(message) - else: - self._logger.error(message) - self._aborted = True + self._logger.error(message) + self._aborted = True @abc.abstractmethod - def list_tags(self) -> typing.Optional[list[str]]: + def list_tags(self) -> list[str] | None: pass @abc.abstractmethod - def create_run( - self, data: dict[str, typing.Any] - ) -> tuple[typing.Optional[str], typing.Optional[str]]: + def create_run(self, data: dict[str, typing.Any]) -> tuple[str, str | None]: pass @abc.abstractmethod - def update( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def update(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: pass @abc.abstractmethod - def set_folder_details(self, data) -> typing.Optional[dict[str, typing.Any]]: + def set_folder_details(self, data) -> dict[str, typing.Any] | None: pass @abc.abstractmethod - def save_file( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def save_file(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: pass @abc.abstractmethod - def add_alert( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def add_alert(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: pass @abc.abstractmethod def set_alert_state( self, alert_id: str, status: str - ) -> typing.Optional[dict[str, typing.Any]]: + ) -> dict[str, typing.Any] | None: pass @abc.abstractmethod @@ -71,19 +62,15 @@ def list_alerts(self) -> list[dict[str, typing.Any]]: pass @abc.abstractmethod - def send_metrics( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def send_metrics(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: pass @abc.abstractmethod - def send_event( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def send_event(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: pass @abc.abstractmethod - def send_heartbeat(self) -> typing.Optional[dict[str, typing.Any]]: + def send_heartbeat(self) -> dict[str, typing.Any] | None: pass @abc.abstractmethod diff --git a/simvue/factory/proxy/offline.py b/simvue/factory/proxy/offline.py index d56e244f..650c0d36 100644 --- a/simvue/factory/proxy/offline.py +++ b/simvue/factory/proxy/offline.py @@ -26,7 +26,7 @@ class Offline(SimvueBaseClass): def __init__( self, - name: typing.Optional[str], + name: str | None, uniq_id: str, config: SimvueConfiguration, suppress_errors: bool = True, @@ -57,7 +57,7 @@ def _write_json(self, filename: str, data: dict[str, typing.Any]) -> None: @skip_if_failed("_aborted", "_suppress_errors", None) def _mock_api_post( self, prefix: str, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + ) -> dict[str, typing.Any] | None: unique_id = time.time() filename = os.path.join(self._directory, f"{prefix}-{unique_id}.json") if not data.get("id"): @@ -66,7 +66,7 @@ def _mock_api_post( return data @skip_if_failed("_aborted", "_suppress_errors", (None, None)) - def create_run(self, data) -> tuple[typing.Optional[str], typing.Optional[str]]: + def create_run(self, data) -> tuple[str, str | None]: """ Create a run """ @@ -98,10 +98,10 @@ def create_run(self, data) -> tuple[typing.Optional[str], typing.Optional[str]]: filename = f"{self._directory}/{status}" create_file(filename) - return (self._name, self._id) + return self._name, self._id @skip_if_failed("_aborted", "_suppress_errors", None) - def update(self, data) -> typing.Optional[dict[str, typing.Any]]: + def update(self, data) -> dict[str, typing.Any] | None: """ Update metadata, tags or status """ @@ -128,7 +128,7 @@ def update(self, data) -> typing.Optional[dict[str, typing.Any]]: return data @skip_if_failed("_aborted", "_suppress_errors", None) - def set_folder_details(self, data) -> typing.Optional[dict[str, typing.Any]]: + def set_folder_details(self, data) -> dict[str, typing.Any] | None: """ Set folder details """ @@ -138,9 +138,7 @@ def set_folder_details(self, data) -> typing.Optional[dict[str, typing.Any]]: return data @skip_if_failed("_aborted", "_suppress_errors", None) - def save_file( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def save_file(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: """ Save file """ @@ -154,9 +152,7 @@ def save_file( self._write_json(filename, prepare_for_api(data, False)) return data - def add_alert( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def add_alert(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: """ Add an alert """ @@ -165,7 +161,7 @@ def add_alert( @skip_if_failed("_aborted", "_suppress_errors", None) def set_alert_state( self, alert_id: str, status: str - ) -> typing.Optional[dict[str, typing.Any]]: + ) -> dict[str, typing.Any] | None: if not os.path.exists( _alert_file := os.path.join(self._directory, f"alert-{alert_id}.json") ): @@ -200,24 +196,20 @@ def list_alerts(self) -> list[dict[str, typing.Any]]: for alert_file in glob.glob(os.path.join(self._directory, "alert-*.json")) ] - def send_metrics( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def send_metrics(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: """ Send metrics """ return self._mock_api_post("metrics", data) - def send_event( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def send_event(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: """ Send event """ return self._mock_api_post("event", data) @skip_if_failed("_aborted", "_suppress_errors", None) - def send_heartbeat(self) -> typing.Optional[dict[str, typing.Any]]: + def send_heartbeat(self) -> dict[str, typing.Any] | None: logger.debug( f"Creating heartbeat file: {os.path.join(self._directory, 'heartbeat')}" ) diff --git a/simvue/factory/proxy/remote.py b/simvue/factory/proxy/remote.py index ed2e3e24..c1860dac 100644 --- a/simvue/factory/proxy/remote.py +++ b/simvue/factory/proxy/remote.py @@ -23,7 +23,7 @@ class Remote(SimvueBaseClass): def __init__( self, - name: typing.Optional[str], + name: str | None, uniq_id: str, config: "SimvueConfiguration", suppress_errors: bool = True, @@ -66,13 +66,10 @@ def list_tags(self) -> list[str]: ) return [] - if response.status_code == http.HTTPStatus.OK: - return data - - return [] + return data if response.status_code == http.HTTPStatus.OK else [] @skip_if_failed("_aborted", "_suppress_errors", (None, None)) - def create_run(self, data) -> tuple[typing.Optional[str], typing.Optional[int]]: + def create_run(self, data) -> tuple[str, str | None]: """ Create a run """ @@ -133,7 +130,7 @@ def create_run(self, data) -> tuple[typing.Optional[str], typing.Optional[int]]: @skip_if_failed("_aborted", "_suppress_errors", None) def update( self, data: dict[str, typing.Any], _=None - ) -> typing.Optional[dict[str, typing.Any]]: + ) -> dict[str, typing.Any] | None: """ Update metadata, tags or status """ @@ -161,9 +158,7 @@ def update( return None @skip_if_failed("_aborted", "_suppress_errors", None) - def set_folder_details( - self, data, run=None - ) -> typing.Optional[dict[str, typing.Any]]: + def set_folder_details(self, data, run=None) -> dict[str, typing.Any] | None: """ Set folder details """ @@ -212,9 +207,7 @@ def set_folder_details( return None @skip_if_failed("_aborted", "_suppress_errors", False) - def save_file( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def save_file(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: """ Save file """ @@ -279,11 +272,7 @@ def save_file( ) return None else: - if "pickledFile" in data: - use_filename = data["pickledFile"] - else: - use_filename = data["originalPath"] - + use_filename = data.get("pickledFile", data["originalPath"]) try: with open(use_filename, "rb") as fh: response = put( @@ -357,9 +346,7 @@ def add_alert(self, data, run=None): return False @skip_if_failed("_aborted", "_suppress_errors", {}) - def set_alert_state( - self, alert_id, status - ) -> typing.Optional[dict[str, typing.Any]]: + def set_alert_state(self, alert_id, status) -> dict[str, typing.Any] | None: """ Set alert state """ @@ -372,10 +359,7 @@ def set_alert_state( self._error(f"Got exception when setting alert state: {err}") return {} - if response.status_code == http.HTTPStatus.OK: - return response.json() - - return {} + return response.json() if response.status_code == http.HTTPStatus.OK else {} @skip_if_failed("_aborted", "_suppress_errors", []) def list_alerts(self) -> list[dict[str, typing.Any]]: @@ -396,15 +380,10 @@ def list_alerts(self) -> list[dict[str, typing.Any]]: ) return [] - if response.status_code == http.HTTPStatus.OK: - return data - - return [] + return data if response.status_code == http.HTTPStatus.OK else [] @skip_if_failed("_aborted", "_suppress_errors", None) - def send_metrics( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def send_metrics(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: """ Send metrics """ @@ -430,9 +409,7 @@ def send_metrics( return None @skip_if_failed("_aborted", "_suppress_errors", None) - def send_event( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def send_event(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: """ Send events """ @@ -458,7 +435,7 @@ def send_event( return None @skip_if_failed("_aborted", "_suppress_errors", None) - def send_heartbeat(self) -> typing.Optional[dict[str, typing.Any]]: + def send_heartbeat(self) -> dict[str, typing.Any] | None: """ Send heartbeat """ diff --git a/simvue/run.py b/simvue/run.py index 87a91dae..ab0cbdf7 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -217,9 +217,7 @@ def _handle_exception_throw( self, exc_type: typing.Type[BaseException] | None, value: BaseException, - traceback: typing.Optional[ - typing.Union[typing.Type[BaseException], BaseException] - ], + traceback: typing.Type[BaseException] | BaseException | None, ) -> None: _exception_thrown: str | None = exc_type.__name__ if exc_type else None _is_running: bool = self._status == "running" @@ -268,9 +266,7 @@ def __exit__( self, exc_type: typing.Type[BaseException] | None, value: BaseException, - traceback: typing.Optional[ - typing.Union[typing.Type[BaseException], BaseException] - ], + traceback: typing.Type[BaseException] | BaseException | None, ) -> None: logger.debug( "Automatically closing run '%s' in status %s", @@ -571,9 +567,7 @@ def init( running: bool = True, retention_period: str | None = None, timeout: int | None = 180, - visibility: typing.Union[ - typing.Literal["public", "tenant"], list[str], None - ] = None, + visibility: typing.Literal["public", "tenant"] | list[str] | None = None, no_color: bool = False, ) -> bool: """Initialise a Simvue run @@ -736,14 +730,14 @@ def add_process( self, identifier: str, *cmd_args, - executable: typing.Union[str, pathlib.Path] | None = None, + executable: str | pathlib.Path | None = None, script: pydantic.FilePath | None = None, input_file: pydantic.FilePath | None = None, completion_callback: typing.Optional[ typing.Callable[[int, str, str], None] ] = None, completion_trigger: multiprocessing.synchronize.Event | None = None, - env: typing.Dict[str, str] | None = None, + env: dict[str, str] | None = None, cwd: pathlib.Path | None = None, **cmd_kwargs, ) -> None: @@ -798,7 +792,7 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: callback to run when process terminates (not supported on Windows) completion_trigger : multiprocessing.Event | None, optional this trigger event is set when the processes completes - env : typing.Dict[str, str], optional + env : dict[str, str], optional environment variables for process cwd: pathlib.Path | None, optional working directory to execute the process within. Note that executable, input and script file paths should @@ -815,7 +809,7 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: if isinstance(executable, pathlib.Path) and not executable.is_file(): raise FileNotFoundError(f"Executable '{executable}' is not a valid file") - cmd_list: typing.List[str] = [] + cmd_list: list[str] = [] pos_args = list(cmd_args) executable_str: str | None = None @@ -952,9 +946,7 @@ def config( enable_emission_metrics: bool | None = None, disable_resources_metrics: bool | None = None, storage_id: str | None = None, - abort_on_alert: typing.Optional[ - typing.Union[typing.Literal["run", "all", "ignore"], bool] - ] = None, + abort_on_alert: typing.Literal["run", "all", "ignore"] | bool | None = None, ) -> bool: """Optional configuration @@ -1172,7 +1164,7 @@ def log_event(self, message: str, timestamp: str | None = None) -> bool: def _add_metrics_to_dispatch( self, - metrics: dict[str, typing.Union[int, float]], + metrics: dict[str, int | float], step: int | None = None, time: float | None = None, timestamp: str | None = None, @@ -1218,7 +1210,7 @@ def _add_metrics_to_dispatch( @pydantic.validate_call def log_metrics( self, - metrics: dict[MetricKeyString, typing.Union[int, float]], + metrics: dict[MetricKeyString, int | float], step: int | None = None, time: float | None = None, timestamp: str | None = None, @@ -1227,7 +1219,7 @@ def log_metrics( Parameters ---------- - metrics : dict[str, typing.Union[int, float]] + metrics : dict[str, int | float] set of metrics to upload to server for this run step : int, optional manually specify the step index for this log, by default None @@ -1420,7 +1412,7 @@ def save_directory( @pydantic.validate_call def save_all( self, - items: list[typing.Union[pydantic.FilePath, pydantic.DirectoryPath]], + items: list[pydantic.FilePath | pydantic.DirectoryPath], category: typing.Literal["input", "output", "code"], filetype: str | None = None, preserve_path: bool = False, @@ -1555,7 +1547,7 @@ def close(self) -> bool: @pydantic.validate_call def set_folder_details( self, - metadata: dict[str, typing.Union[int, str, float]] | None = None, + metadata: dict[str, int | str | float] | None = None, tags: list[str] | None = None, description: str | None = None, ) -> bool: diff --git a/simvue/serialization.py b/simvue/serialization.py index 3079d0ae..b974ff95 100644 --- a/simvue/serialization.py +++ b/simvue/serialization.py @@ -102,7 +102,7 @@ def _serialize_plotly_figure(data: typing.Any) -> tuple[str, str]: @check_extra("plot") -def _serialize_matplotlib(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_matplotlib(data: typing.Any) -> tuple[str, str] | None: try: import plotly except ImportError: @@ -185,7 +185,7 @@ def _serialize_pickle(data: typing.Any) -> tuple[str, str] | None: def deserialize_data( data: "Buffer", mimetype: str, allow_pickle: bool -) -> typing.Optional["DeserializedContent"]: +) -> "DeserializedContent" | None: """ Determine which deserializer to use """ @@ -205,7 +205,7 @@ def deserialize_data( @check_extra("plot") -def _deserialize_plotly_figure(data: "Buffer") -> typing.Optional["Figure"]: +def _deserialize_plotly_figure(data: "Buffer") -> "Figure" | None: try: import plotly except ImportError: @@ -215,7 +215,7 @@ def _deserialize_plotly_figure(data: "Buffer") -> typing.Optional["Figure"]: @check_extra("plot") -def _deserialize_matplotlib_figure(data: "Buffer") -> typing.Optional["Figure"]: +def _deserialize_matplotlib_figure(data: "Buffer") -> "Figure" | None: try: import plotly except ImportError: @@ -231,14 +231,14 @@ def _deserialize_numpy_array(data: "Buffer") -> typing.Any | None: return data -def _deserialize_dataframe(data: "Buffer") -> typing.Optional["DataFrame"]: +def _deserialize_dataframe(data: "Buffer") -> "DataFrame" | None: mfile = BytesIO(data) mfile.seek(0) return pandas.read_csv(mfile, index_col=0) @check_extra("torch") -def _deserialize_torch_tensor(data: "Buffer") -> typing.Optional["Tensor"]: +def _deserialize_torch_tensor(data: "Buffer") -> "Tensor" | None: try: import torch except ImportError: diff --git a/simvue/utilities.py b/simvue/utilities.py index 4ca7364f..69a8ecd8 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -28,8 +28,8 @@ def find_first_instance_of_file( - file_names: typing.Union[list[str], str], check_user_space: bool = True -) -> typing.Optional[pathlib.Path]: + file_names: list[str] | str, check_user_space: bool = True +) -> pathlib.Path | None: """Traverses a file hierarchy from bottom upwards to find file Returns the first instance of 'file_names' found when moving @@ -123,8 +123,8 @@ def parse_validation_response( def check_extra(extra_name: str) -> typing.Callable: def decorator( - class_func: typing.Optional[typing.Callable] = None, - ) -> typing.Optional[typing.Callable]: + class_func: typing.Callable | None = None, + ) -> typing.Callable | None: @functools.wraps(class_func) def wrapper(self, *args, **kwargs) -> typing.Any: if extra_name == "plot" and not all( @@ -202,7 +202,7 @@ def parse_pydantic_error(error: pydantic.ValidationError) -> str: def skip_if_failed( failure_attr: str, ignore_exc_attr: str, - on_failure_return: typing.Optional[typing.Any] = None, + on_failure_return: typing.Any | None = None, ) -> typing.Callable: """Decorator for ensuring if Simvue throws an exception any other code continues. @@ -308,11 +308,11 @@ def remove_file(filename: str) -> None: logger.error("Unable to remove file %s due to: %s", filename, str(err)) -def get_expiry(token) -> typing.Optional[int]: +def get_expiry(token) -> int | None: """ Get expiry date from a JWT token """ - expiry: typing.Optional[int] = None + expiry: int | None = None with contextlib.suppress(jwt.DecodeError): expiry = jwt.decode(token, options={"verify_signature": False})["exp"] @@ -331,7 +331,7 @@ def prepare_for_api(data_in, all=True): return data -def calculate_sha256(filename: str | typing.Any, is_file: bool) -> typing.Optional[str]: +def calculate_sha256(filename: str | typing.Any, is_file: bool) -> str | None: """ Calculate sha256 checksum of the specified file """ @@ -364,7 +364,7 @@ def validate_timestamp(timestamp): return True -def simvue_timestamp(date_time: typing.Optional[datetime.datetime] = None) -> str: +def simvue_timestamp(date_time: datetime.datetime | None = None) -> str: """Return the Simvue valid timestamp Parameters diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index c001c3a6..56e4793f 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -69,7 +69,7 @@ def test_log_metrics( setup_logging: "CountingLogHandler", mocker, request: pytest.FixtureRequest, - visibility: typing.Union[typing.Literal["public", "tenant"], list[str], None], + visibility: typing.Literal["public", "tenant"] | list[str] | None, ) -> None: METRICS = {"a": 10, "b": 1.2} @@ -482,7 +482,7 @@ def test_save_file_online( create_plain_run: typing.Tuple[sv_run.Run, dict], valid_mimetype: bool, preserve_path: bool, - name: typing.Optional[str], + name: str | None, allow_pickle: bool, empty_file: bool, category: typing.Literal["input", "output", "code"], @@ -542,7 +542,7 @@ def test_save_file_online( def test_save_file_offline( create_plain_run_offline: tuple[sv_run.Run, dict], preserve_path: bool, - name: typing.Optional[str], + name: str | None, category: typing.Literal["input", "output", "code"] ) -> None: simvue_run, _ = create_plain_run_offline From 5349e2f6bbb99ab79b553fb2f5b6008245272284 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 24 Jan 2025 10:37:02 +0000 Subject: [PATCH 100/163] Changed back to typing.Optional where string type --- simvue/serialization.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/simvue/serialization.py b/simvue/serialization.py index b974ff95..024c2efc 100644 --- a/simvue/serialization.py +++ b/simvue/serialization.py @@ -185,7 +185,7 @@ def _serialize_pickle(data: typing.Any) -> tuple[str, str] | None: def deserialize_data( data: "Buffer", mimetype: str, allow_pickle: bool -) -> "DeserializedContent" | None: +) -> typing.Optional["DeserializedContent"]: """ Determine which deserializer to use """ @@ -205,7 +205,7 @@ def deserialize_data( @check_extra("plot") -def _deserialize_plotly_figure(data: "Buffer") -> "Figure" | None: +def _deserialize_plotly_figure(data: "Buffer") -> typing.Optional["Figure"]: try: import plotly except ImportError: @@ -215,7 +215,7 @@ def _deserialize_plotly_figure(data: "Buffer") -> "Figure" | None: @check_extra("plot") -def _deserialize_matplotlib_figure(data: "Buffer") -> "Figure" | None: +def _deserialize_matplotlib_figure(data: "Buffer") -> typing.Optional["Figure"]: try: import plotly except ImportError: @@ -231,14 +231,14 @@ def _deserialize_numpy_array(data: "Buffer") -> typing.Any | None: return data -def _deserialize_dataframe(data: "Buffer") -> "DataFrame" | None: +def _deserialize_dataframe(data: "Buffer") -> typing.Optional["DataFrame"]: mfile = BytesIO(data) mfile.seek(0) return pandas.read_csv(mfile, index_col=0) @check_extra("torch") -def _deserialize_torch_tensor(data: "Buffer") -> "Tensor" | None: +def _deserialize_torch_tensor(data: "Buffer") -> typing.Optional["Tensor"]: try: import torch except ImportError: From 7128f77efb73f41505fcaef93531ff28589b237b Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 24 Jan 2025 11:46:16 +0000 Subject: [PATCH 101/163] Stop abort test crashing pytest --- tests/functional/test_run_class.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 56e4793f..ee4e12cc 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -55,7 +55,6 @@ def test_run_with_emissions() -> None: run_created.config(enable_emission_metrics=True, emission_metrics_interval=1) time.sleep(5) _run = RunObject(identifier=run_created.id) - import pdb; pdb.set_trace() assert list(_run.metrics) @@ -732,6 +731,7 @@ def testing_exit(status: int) -> None: time.sleep(2) run.log_alert(alert_id, "critical") _alert = Alert(identifier=alert_id) + time.sleep(1) assert _alert.get_status(run.id) counter = 0 while run._status != "terminated" and counter < 15: From 49f9ff1bba7e24b356c2d06f092c170859383e2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 24 Jan 2025 13:10:51 +0000 Subject: [PATCH 102/163] Added upload submodule and started tests for offline --- simvue/api/objects/alert/base.py | 2 +- simvue/api/objects/alert/fetch.py | 2 -- simvue/api/objects/artifact.py | 10 +++++-- simvue/api/objects/base.py | 28 +++++++----------- simvue/config/user.py | 5 ++++ simvue/sender.py | 0 simvue/{sender/__init__.py => upload.py} | 16 +++++++--- tests/conftest.py | 17 +++++++++++ tests/functional/test_run_class.py | 4 +-- tests/unit/test_artifact.py | 37 ++++++++++++------------ tests/unit/test_event_alert.py | 16 +++++----- tests/unit/test_file_storage.py | 3 +- 12 files changed, 83 insertions(+), 57 deletions(-) create mode 100644 simvue/sender.py rename simvue/{sender/__init__.py => upload.py} (88%) diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 1cbbe34d..fc357dce 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -28,7 +28,7 @@ def new(cls, **kwargs): def __init__(self, identifier: str | None = None, **kwargs) -> None: """Retrieve an alert from the Simvue server by identifier""" self._label = "alert" - super().__init__(identifier, **kwargs) + super().__init__(identifier=identifier, **kwargs) def compare(self, other: "AlertBase") -> bool: return type(self) is type(other) and self.name == other.name diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index f87905b1..9ee8d8e6 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -27,8 +27,6 @@ class Alert: @pydantic.validate_call() def __new__(cls, identifier: str, **kwargs) -> AlertType: """Retrieve an object representing an alert either locally or on the server by id""" - if identifier.startswith("offline_"): - raise ValueError("Cannot retrieve offline run from server") _alert_pre = AlertBase(identifier=identifier, **kwargs) if _alert_pre.source == "events": return EventsAlert(identifier=identifier, **kwargs) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 203841e9..b85d8c98 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -52,7 +52,7 @@ def __init__( # If the artifact is an online instance, need a place to store the response # from the initial creation self._init_data: dict[str, dict] = {} - self._staging |= {"runs": []} + self._staging |= {"runs": {}} @classmethod def new( @@ -213,7 +213,7 @@ def commit(self) -> None: def attach_to_run(self, run_id: str, category: Category) -> None: """Attach this artifact to a given run""" - self._staging["runs"].append({"id": run_id, "category": category}) + self._staging["runs"][run_id] = category if self._offline: super().commit() @@ -237,6 +237,12 @@ def attach_to_run(self, run_id: str, category: Category) -> None: response=_response, ) + def on_reconnect(self, id_mapping: dict[str, str]) -> None: + _offline_staging = dict(self._staging["runs"].items()) + self._staging["runs"] = {} + for id, category in _offline_staging.items(): + self.attach_to_run(run_id=id_mapping[id], category=category) + def _upload(self, file: io.BytesIO) -> None: if self._offline: super().commit() diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 9137d234..0a47abc4 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -178,7 +178,9 @@ def __init__( self._staging = self._get() # Recover any locally staged changes if not read-only - self._staging |= {} if _read_only else self._get_local_staged() + self._staging |= ( + {} if (_read_only and not self._offline) else self._get_local_staged() + ) self._staging |= kwargs @@ -190,7 +192,7 @@ def _get_local_staged(self, obj_label: str | None = None) -> dict[str, typing.An with self._local_staging_file.open() as in_f: _staged_data = json.load(in_f) - return _staged_data.get(obj_label or self._label, {}).get(self._identifier, {}) + return _staged_data def _stage_to_other(self, obj_label: str, key: str, value: typing.Any) -> None: """Stage a change to another object type""" @@ -365,8 +367,6 @@ def commit(self) -> None: self._logger.debug( f"Writing updates to staging file for {self._label} '{self.id}': {self._staging}" ) - _offline_dir: pathlib.Path = self._user_config.offline.cache - _offline_file = _offline_dir.joinpath("staging.json") self._cache() return @@ -421,7 +421,9 @@ def _post(self, is_json: bool = True, **kwargs) -> dict[str, typing.Any]: ) if isinstance(_json_response, list): - raise RuntimeError("Expected dictionary from JSON response but got type list") + raise RuntimeError( + "Expected dictionary from JSON response but got type list" + ) if _id := _json_response.get("id"): self._logger.debug("'%s' created successfully", _id) @@ -451,18 +453,7 @@ def delete( self, _linked_objects: list[str] | None = None, **kwargs ) -> dict[str, typing.Any]: if self._get_local_staged(): - with self._local_staging_file.open() as in_f: - _local_data = json.load(in_f) - - _local_data[self._label].pop(self._identifier, None) - - # If this object has information within other object types - # (e.g. runs can have metrics) ensure this is deleted too - for obj_type in _linked_objects or []: - _local_data[obj_type].pop(self._identifier, None) - - with self._local_staging_file.open("w") as out_f: - json.dump(_local_data, out_f, indent=2) + self._local_staging_file.unlink(missing_ok=True) if self._offline: return {"id": self._identifier} @@ -532,6 +523,9 @@ def to_dict(self) -> dict[str, typing.Any]: for key in self._properties } + def on_reconnect(self, id_mapping: dict[str, str]) -> None: + pass + @property def staged(self) -> dict[str, typing.Any] | None: """Return currently staged changes to this object""" diff --git a/simvue/config/user.py b/simvue/config/user.py index d53ce79d..364a1742 100644 --- a/simvue/config/user.py +++ b/simvue/config/user.py @@ -128,6 +128,11 @@ def _check_server( f"< {SIMVUE_SERVER_LOWER_CONSTRAINT}" ) + @pydantic.validate_call + def write(self, out_directory: pydantic.DirectoryPath) -> None: + with out_directory.joinpath(CONFIG_FILE_NAMES[0]).open("w") as out_f: + toml.dump(self.model_dump(), out_f) + @pydantic.model_validator(mode="after") @classmethod def check_valid_server(cls, values: "SimvueConfiguration") -> bool: diff --git a/simvue/sender.py b/simvue/sender.py new file mode 100644 index 00000000..e69de29b diff --git a/simvue/sender/__init__.py b/simvue/upload.py similarity index 88% rename from simvue/sender/__init__.py rename to simvue/upload.py index 65823b52..0ea66b42 100644 --- a/simvue/sender/__init__.py +++ b/simvue/upload.py @@ -6,6 +6,7 @@ import typing from simvue.api.objects.base import SimvueObject from simvue.utilities import prettify_pydantic + import simvue.api.objects UPLOAD_ORDER: tuple[str, ...] = ( @@ -26,12 +27,13 @@ def _check_local_staging( cache_dir: pathlib.Path, ) -> dict[str, dict[pathlib.Path, dict[str, typing.Any]]]: """Check local cache and assemble any objects for sending""" - _upload_data: dict[str, dict[pathlib.Path, dict[str, typing.Any]]] = {} - for obj_type in UPLOAD_ORDER: - _upload_data[obj_type] = { + _upload_data: dict[str, dict[pathlib.Path, dict[str, typing.Any]]] = { + obj_type: { _path: json.load(_path.open()) for _path in cache_dir.glob(f"{obj_type}/*.json") } + for obj_type in UPLOAD_ORDER + } return _upload_data @@ -58,10 +60,15 @@ def _assemble_objects( # Rather than a script with API calls each object will send itself @prettify_pydantic @pydantic.validate_call -def uploader(cache_dir: pydantic.DirectoryPath) -> None: +def uploader( + cache_dir: pydantic.DirectoryPath, _offline_ids: list[str] | None = None +) -> None: _locally_staged = _check_local_staging(cache_dir) _offline_to_online_id_mapping: dict[str, str] = {} for _file_path, obj in _assemble_objects(_locally_staged): + if _offline_ids and obj._identifier not in _offline_ids: + continue + if not (_current_id := obj._identifier): raise RuntimeError( f"Object of type '{obj.__class__.__name__}' has no identifier" @@ -80,3 +87,4 @@ def uploader(cache_dir: pydantic.DirectoryPath) -> None: _logger.info(f"Created {obj.__class__.__name__} '{_new_id}'") _file_path.unlink(missing_ok=True) _offline_to_online_id_mapping[_current_id] = _new_id + obj.on_reconnect(_offline_to_online_id_mapping) diff --git a/tests/conftest.py b/tests/conftest.py index d9bd11ac..0c39b5e6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,6 +11,7 @@ from simvue.api.objects.artifact import Artifact import simvue.run as sv_run import simvue.api.objects as sv_api_obj +import simvue.config.user as sv_cfg import simvue.utilities MAX_BUFFER_SIZE: int = 10 @@ -206,3 +207,19 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur time.sleep(1.) return TEST_DATA + +@pytest.fixture +def offline_test() -> pathlib.Path: + _current_config: sv_cfg.SimvueConfiguration = sv_cfg.SimvueConfiguration.fetch() + with tempfile.TemporaryDirectory() as tempd: + _tempdir = pathlib.Path(tempd) + _cache_dir = _tempdir.joinpath(".simvue") + _cache_dir.mkdir(exist_ok=True) + _current_config.offline.cache = f"{_cache_dir}" + _current_config.write(pathlib.Path(tempd)) + _here = os.getcwd() + os.chdir(_tempdir) + assert sv_cfg.SimvueConfiguration.fetch().offline.cache == _cache_dir + yield _tempdir + os.chdir(_here) + diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 56e4793f..6a24a8c2 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -501,7 +501,7 @@ def test_save_file_online( simvue_run.save_file( out_name, category=category, - filetype=file_type, + mime_type=file_type, preserve_path=preserve_path, name=name, ) @@ -510,7 +510,7 @@ def test_save_file_online( simvue_run.save_file( out_name, category=category, - filetype=file_type, + mime_type=file_type, preserve_path=preserve_path, ) return diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index a2aa680b..94b26262 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -59,30 +59,29 @@ def test_artifact_creation_online() -> None: @pytest.mark.api @pytest.mark.offline -def test_artifact_creation_offline() -> None: +def test_artifact_creation_offline(offline_test: pathlib.Path) -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" _folder = Folder.new(path=_folder_name, offline=True) _run = Run.new(folder=_folder_name, offline=True) - with tempfile.NamedTemporaryFile(suffix=".txt") as temp_f: - _path = pathlib.Path(temp_f.name) - with _path.open("w") as out_f: - out_f.write("Hello World!") - _artifact = Artifact.new_file( - name=f"test_artifact_{_uuid}", - file_path=_path, - storage_id=None, - file_type=None, - offline=True, - metadata=None - ) - _folder.commit() - _run.commit() - _artifact.commit() - time.sleep(1) - assert _artifact.name == f"test_artifact_{_uuid}" + _path = offline_test.joinpath("hello_world.txt") + + with _path.open("w") as out_f: + out_f.write("Hello World!") + + _artifact = Artifact.new_file( + name=f"test_artifact_{_uuid}", + file_path=_path, + storage_id=None, + mime_type=None, + offline=True, + metadata=None + ) + _folder.commit() + _run.commit() + time.sleep(1) + assert _artifact.name == f"test_artifact_{_uuid}" _run.delete() _folder.delete() - diff --git a/tests/unit/test_event_alert.py b/tests/unit/test_event_alert.py index c02b5fe8..17a94246 100644 --- a/tests/unit/test_event_alert.py +++ b/tests/unit/test_event_alert.py @@ -1,4 +1,5 @@ import time +import json import pytest import contextlib import uuid @@ -35,7 +36,8 @@ def test_event_alert_creation_offline() -> None: frequency=1, pattern="completed", notification="none", - offline=True + offline=True, + description=None ) _alert.commit() @@ -44,12 +46,9 @@ def test_event_alert_creation_offline() -> None: assert _alert.alert.pattern == "completed" assert _alert.name == f"events_alert_{_uuid}" assert _alert.notification == "none" - _alert.delete() - with _alert._local_staging_file.open() as in_f: - _local_data = json.load(in_f) + _alert.delete() - assert not _local_data.get(_alert._label, {}).get(_alert.id) @pytest.mark.api @@ -84,17 +83,16 @@ def test_event_alert_modification_offline() -> None: frequency=1, pattern="completed", notification="none", - offline=True + offline=True, + description=None ) _alert.commit() time.sleep(1) _new_alert = Alert(_alert.id) assert isinstance(_new_alert, EventsAlert) + _new_alert.read_only(False) _new_alert.description = "updated!" - with pytest.raises(AttributeError): - assert _new_alert.description - _new_alert.commit() assert _new_alert.description == "updated!" _new_alert.delete() diff --git a/tests/unit/test_file_storage.py b/tests/unit/test_file_storage.py index 8c0a718b..6af25a1e 100644 --- a/tests/unit/test_file_storage.py +++ b/tests/unit/test_file_storage.py @@ -4,6 +4,7 @@ import uuid from simvue.api.objects import FileStorage +from simvue.upload import uploader @pytest.mark.api @pytest.mark.online @@ -22,7 +23,7 @@ def test_create_file_storage_online() -> None: @pytest.mark.offline def test_create_file_storage_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] - _storage = FileStorage.new(name=_uuid, disable_check=False, tenant_usable=False, default=False, offline=True) + _storage = FileStorage.new(name=_uuid, disable_check=False, tenant_usable=False, default=False, offline=True, enabled=True) _storage.commit() assert _storage.name == _uuid _storage.delete() From 21983cbc1a9a93a155c2f611dac285940e5d8bde Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 24 Jan 2025 13:18:31 +0000 Subject: [PATCH 103/163] Updated monitor processes test with mark --- tests/functional/test_run_execute_process.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/functional/test_run_execute_process.py b/tests/functional/test_run_execute_process.py index 29fc42ec..4e14a393 100644 --- a/tests/functional/test_run_execute_process.py +++ b/tests/functional/test_run_execute_process.py @@ -9,6 +9,7 @@ from simvue import Run, Client @pytest.mark.executor +@pytest.mark.offline def test_monitor_processes(create_plain_run_offline: tuple[Run, dict]): _run, _ = create_plain_run_offline _run.add_process("process_1", "Hello world!", executable="echo", n=True) From 80e0c63be0ca732ba448a8c1a39d9c8bb5d68958 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 24 Jan 2025 13:22:06 +0000 Subject: [PATCH 104/163] Added uploader to offline test --- pyproject.toml | 1 + simvue/config/user.py | 2 +- simvue/upload.py | 3 ++- tests/conftest.py | 7 +------ tests/unit/test_artifact.py | 7 +++++++ 5 files changed, 12 insertions(+), 8 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6837df44..c0cc7322 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,6 +96,7 @@ testpaths = [ "tests" ] markers = [ + "codecarbon: tests for emission metrics", "client: tests of Simvue client", "converters: tests for Simvue object converters", "dispatch: test data dispatcher", diff --git a/simvue/config/user.py b/simvue/config/user.py index 364a1742..931d2fa5 100644 --- a/simvue/config/user.py +++ b/simvue/config/user.py @@ -197,7 +197,7 @@ def fetch( _config_dict["offline"]["cache"] = _default_dir # Ranking of configurations for token and URl is: - # Envionment Variables > Run Definition > Configuration File + # Environment Variables > Run Definition > Configuration File _server_url = os.environ.get( "SIMVUE_URL", server_url or _config_dict["server"].get("url") diff --git a/simvue/upload.py b/simvue/upload.py index 0ea66b42..371f8c84 100644 --- a/simvue/upload.py +++ b/simvue/upload.py @@ -62,7 +62,7 @@ def _assemble_objects( @pydantic.validate_call def uploader( cache_dir: pydantic.DirectoryPath, _offline_ids: list[str] | None = None -) -> None: +) -> typing.Generator[tuple[str, SimvueObject], None, None]: _locally_staged = _check_local_staging(cache_dir) _offline_to_online_id_mapping: dict[str, str] = {} for _file_path, obj in _assemble_objects(_locally_staged): @@ -88,3 +88,4 @@ def uploader( _file_path.unlink(missing_ok=True) _offline_to_online_id_mapping[_current_id] = _new_id obj.on_reconnect(_offline_to_online_id_mapping) + yield _current_id, obj diff --git a/tests/conftest.py b/tests/conftest.py index 0c39b5e6..c010281e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -210,16 +210,11 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur @pytest.fixture def offline_test() -> pathlib.Path: - _current_config: sv_cfg.SimvueConfiguration = sv_cfg.SimvueConfiguration.fetch() with tempfile.TemporaryDirectory() as tempd: _tempdir = pathlib.Path(tempd) _cache_dir = _tempdir.joinpath(".simvue") _cache_dir.mkdir(exist_ok=True) - _current_config.offline.cache = f"{_cache_dir}" - _current_config.write(pathlib.Path(tempd)) - _here = os.getcwd() - os.chdir(_tempdir) + os.environ["SIMVUE_OFFLINE_DIRECTORY"] = f"{_cache_dir}" assert sv_cfg.SimvueConfiguration.fetch().offline.cache == _cache_dir yield _tempdir - os.chdir(_here) diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index 94b26262..d14c906d 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -8,6 +8,7 @@ from simvue.api.objects import Artifact, Run from simvue.api.objects.folder import Folder +from simvue.upload import uploader @pytest.mark.api @pytest.mark.online @@ -82,6 +83,12 @@ def test_artifact_creation_offline(offline_test: pathlib.Path) -> None: _run.commit() time.sleep(1) assert _artifact.name == f"test_artifact_{_uuid}" + _created_object_counter: int = 0 + for _offline, _obj in uploader(offline_test.joinpath(".simvue"), _offline_ids=[_folder.id, _run.id, _artifact.id]): + _created_object_counter += 1 + assert _obj.to_dict() + _obj.delete() + assert _created_object_counter == 3 _run.delete() _folder.delete() From 17d294caf1262d361427f1bc611f33ba89f862a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 24 Jan 2025 13:28:52 +0000 Subject: [PATCH 105/163] Fix selective uploading --- simvue/upload.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/simvue/upload.py b/simvue/upload.py index 371f8c84..4961ffb9 100644 --- a/simvue/upload.py +++ b/simvue/upload.py @@ -41,12 +41,16 @@ def _check_local_staging( # We have to link created IDs to other objects def _assemble_objects( locally_staged: dict[str, dict[pathlib.Path, typing.Any]], + offline_ids: list[str] | None = None, ) -> typing.Generator[tuple[pathlib.Path, SimvueObject], None, None]: for obj_type in UPLOAD_ORDER: _data: dict[pathlib.Path, dict[str, typing.Any]] = locally_staged.get( obj_type, {} ) for _file_path, _obj in _data.items(): + _identifier = _file_path.name.split(".")[0] + if offline_ids and _identifier not in offline_ids: + continue _exact_type: str = _obj.pop("obj_type") try: _instance_class: SimvueObject = getattr(simvue.api.objects, _exact_type) @@ -65,10 +69,7 @@ def uploader( ) -> typing.Generator[tuple[str, SimvueObject], None, None]: _locally_staged = _check_local_staging(cache_dir) _offline_to_online_id_mapping: dict[str, str] = {} - for _file_path, obj in _assemble_objects(_locally_staged): - if _offline_ids and obj._identifier not in _offline_ids: - continue - + for _file_path, obj in _assemble_objects(_locally_staged, _offline_ids): if not (_current_id := obj._identifier): raise RuntimeError( f"Object of type '{obj.__class__.__name__}' has no identifier" From 2e9436af26dd0468035dc3d323ca6c048694a8db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 24 Jan 2025 16:45:37 +0000 Subject: [PATCH 106/163] Fix and simplify to_dict method --- simvue/api/objects/base.py | 9 +++++---- tests/unit/test_artifact.py | 4 ++-- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 0a47abc4..64816a0c 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -238,6 +238,9 @@ def _get_attribute( ) from e try: + self._logger.debug( + f"Retrieving attribute '{attribute}' from {self._label} '{self._identifier}'" + ) return self._get(url=url)[attribute] except KeyError as e: if default: @@ -478,6 +481,7 @@ def _get( if not self.url: raise RuntimeError(f"Identifier for instance of {self._label} Unknown") + _response = sv_get( url=f"{url or self.url}", headers=self._headers, params=kwargs ) @@ -518,10 +522,7 @@ def _cache(self) -> None: json.dump(_local_data, out_f, indent=2) def to_dict(self) -> dict[str, typing.Any]: - return { - key: value.__str__() if (value := getattr(self, key)) is not None else None - for key in self._properties - } + return self._get() | self._staging def on_reconnect(self, id_mapping: dict[str, str]) -> None: pass diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index d14c906d..1370da42 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -71,6 +71,8 @@ def test_artifact_creation_offline(offline_test: pathlib.Path) -> None: with _path.open("w") as out_f: out_f.write("Hello World!") + _folder.commit() + _run.commit() _artifact = Artifact.new_file( name=f"test_artifact_{_uuid}", file_path=_path, @@ -79,8 +81,6 @@ def test_artifact_creation_offline(offline_test: pathlib.Path) -> None: offline=True, metadata=None ) - _folder.commit() - _run.commit() time.sleep(1) assert _artifact.name == f"test_artifact_{_uuid}" _created_object_counter: int = 0 From b2862f390bd05ea835031d6cbc9bdf8844764aef Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 24 Jan 2025 18:17:08 +0000 Subject: [PATCH 107/163] Added loads of stuff for uploader --- simvue/api/objects/base.py | 1 - simvue/api/objects/events.py | 9 ++- simvue/api/objects/folder.py | 4 +- simvue/api/objects/metrics.py | 17 +++-- simvue/api/objects/run.py | 17 ++--- simvue/api/objects/storage/base.py | 2 +- simvue/api/objects/tag.py | 4 +- simvue/run.py | 7 +- simvue/upload.py | 104 +++++++++++++---------------- tests/unit/test_metrics.py | 2 +- 10 files changed, 85 insertions(+), 82 deletions(-) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 0a47abc4..81d0a839 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -513,7 +513,6 @@ def _cache(self) -> None: _local_data = json.load(in_f) _local_data |= self._staging - with self._local_staging_file.open("w", encoding="utf-8") as out_f: json.dump(_local_data, out_f, indent=2) diff --git a/simvue/api/objects/events.py b/simvue/api/objects/events.py index d1d6fab7..20bfe81d 100644 --- a/simvue/api/objects/events.py +++ b/simvue/api/objects/events.py @@ -60,12 +60,13 @@ def get( @classmethod @pydantic.validate_call - def new(cls, *, run_id: str, offline: bool = False, events: list[EventSet]): + def new(cls, *, run: str, offline: bool = False, events: list[EventSet], **kwargs): """Create a new Events entry on the Simvue server""" _events = Events( - run=run_id, + run=run, events=[event.model_dump() for event in events], _read_only=False, + **kwargs, ) _events.offline_mode(offline) return _events @@ -114,3 +115,7 @@ def delete( self, _linked_objects: list[str] | None = None, **kwargs ) -> dict[str, typing.Any]: raise NotImplementedError("Cannot delete event set") + + def on_reconnect(self, offline_to_online_id_mapping: dict[str, str]): + if online_run_id := offline_to_online_id_mapping.get(self._staging["run"]): + self._staging["run"] = online_run_id diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index cae9353d..c62c4b5f 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -55,10 +55,10 @@ def new( *, path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], offline: bool = False, - **_, + **kwargs, ): """Create a new Folder on the Simvue server with the given path""" - _folder = Folder(path=path, _read_only=False) + _folder = Folder(path=path, _read_only=False, **kwargs) _folder.offline_mode(offline) return _folder diff --git a/simvue/api/objects/metrics.py b/simvue/api/objects/metrics.py index b31bf667..3a68b856 100644 --- a/simvue/api/objects/metrics.py +++ b/simvue/api/objects/metrics.py @@ -36,15 +36,18 @@ def __init__( @classmethod @pydantic.validate_call - def new(cls, *, run_id: str, offline: bool = False, metrics: list[MetricSet]): + def new( + cls, *, run: str, offline: bool = False, metrics: list[MetricSet], **kwargs + ): """Create a new Events entry on the Simvue server""" - _events = Metrics( - run=run_id, + _metrics = Metrics( + run=run, metrics=[metric.model_dump() for metric in metrics], _read_only=False, + **kwargs, ) - _events.offline_mode(offline) - return _events + _metrics.offline_mode(offline) + return _metrics @classmethod @pydantic.validate_call @@ -106,3 +109,7 @@ def delete( self, _linked_objects: list[str] | None = None, **kwargs ) -> dict[str, typing.Any]: raise NotImplementedError("Cannot delete metric set") + + def on_reconnect(self, offline_to_online_id_mapping: dict[str, str]): + if online_run_id := offline_to_online_id_mapping.get(self._staging["run"]): + self._staging["run"] = online_run_id diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index a2cd68bb..316211a8 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -42,23 +42,24 @@ def __init__(self, identifier: str | None = None, **kwargs) -> None: self.visibility = Visibility(self) super().__init__(identifier, **kwargs) - self._staged_metrics: list[dict[str, str | dict | int]] = ( - self._get_local_staged("metrics").get(self._identifier) # type: ignore - if self._identifier - else [] - ) - @classmethod @pydantic.validate_call def new( cls, *, folder: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], + system: dict[str, typing.Any] | None = None, + status: typing.Literal[ + "terminated", "created", "failed", "completed", "lost", "running" + ] = "created", offline: bool = False, - **_, + **kwargs, ) -> Self: """Create a new Folder on the Simvue server with the given path""" - _run = Run(folder=folder, system=None, status="created", _read_only=False) + + _run = Run( + folder=folder, system=system, status=status, _read_only=False, **kwargs + ) _run.offline_mode(offline) return _run diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py index 4ed3c59a..f3ca5e14 100644 --- a/simvue/api/objects/storage/base.py +++ b/simvue/api/objects/storage/base.py @@ -20,7 +20,7 @@ def __init__( super().__init__(identifier, _read_only=_read_only, **kwargs) @classmethod - def new(cls, **kwargs): + def new(cls, **_): pass @property diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py index 0e1f6c79..6da0b9f7 100644 --- a/simvue/api/objects/tag.py +++ b/simvue/api/objects/tag.py @@ -19,10 +19,10 @@ class Tag(SimvueObject): @classmethod @pydantic.validate_call - def new(cls, *, name: str, offline: bool = False, **_): + def new(cls, *, name: str, offline: bool = False, **kwargs): """Create a new Tag on the Simvue server""" _data: dict[str, typing.Any] = {"name": name} - _tag = Tag(name=name, _read_only=False) + _tag = Tag(name=name, _read_only=False, **kwargs) _tag.offline_mode(offline) return _tag diff --git a/simvue/run.py b/simvue/run.py index ab0cbdf7..468584c0 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -435,7 +435,7 @@ def _dispatch_callback( _events.commit() else: _metrics = Metrics.new( - run_id=self.id, + run=self.id, offline=self._user_config.run.mode == "offline", metrics=buffer, ) @@ -1504,6 +1504,11 @@ def _tidy_run(self) -> None: self._dispatcher.purge() self._dispatcher.join() + if self._user_config.run.mode == "offline": + self._user_config.offline.cache.joinpath( + "runs", f"{self._id}.closed" + ).touch() + if _non_zero := self.executor.exit_status: _error_msgs: dict[str, str] | None = self.executor.get_error_summary() _error_msg = "\n".join( diff --git a/simvue/upload.py b/simvue/upload.py index 4961ffb9..9fcdd4fb 100644 --- a/simvue/upload.py +++ b/simvue/upload.py @@ -1,11 +1,9 @@ # Collator import json -import pathlib import pydantic import logging import typing from simvue.api.objects.base import SimvueObject -from simvue.utilities import prettify_pydantic import simvue.api.objects @@ -18,75 +16,63 @@ "alerts", "runs", "artifacts", + "metrics", + "events", ) _logger = logging.getLogger(__name__) -def _check_local_staging( - cache_dir: pathlib.Path, -) -> dict[str, dict[pathlib.Path, dict[str, typing.Any]]]: - """Check local cache and assemble any objects for sending""" - _upload_data: dict[str, dict[pathlib.Path, dict[str, typing.Any]]] = { - obj_type: { - _path: json.load(_path.open()) - for _path in cache_dir.glob(f"{obj_type}/*.json") - } - for obj_type in UPLOAD_ORDER - } - return _upload_data - +# Rather than a script with API calls each object will send itself +@pydantic.validate_call +def uploader( + cache_dir: pydantic.DirectoryPath, _offline_ids: list[str] | None = None +) -> typing.Generator[tuple[str, SimvueObject], None, None]: + _offline_to_online_id_mapping: dict[str, str] = {} + cache_dir.joinpath("server_ids").mkdir(parents=True, exist_ok=True) + for file_path in cache_dir.glob("server_ids/*.txt"): + _offline_to_online_id_mapping[file_path.name.split(".")[0]] = ( + file_path.read_text() + ) -# Create instances from local cache -# We have to link created IDs to other objects -def _assemble_objects( - locally_staged: dict[str, dict[pathlib.Path, typing.Any]], - offline_ids: list[str] | None = None, -) -> typing.Generator[tuple[pathlib.Path, SimvueObject], None, None]: for obj_type in UPLOAD_ORDER: - _data: dict[pathlib.Path, dict[str, typing.Any]] = locally_staged.get( - obj_type, {} - ) - for _file_path, _obj in _data.items(): - _identifier = _file_path.name.split(".")[0] - if offline_ids and _identifier not in offline_ids: - continue - _exact_type: str = _obj.pop("obj_type") + for file_path in cache_dir.glob(f"{obj_type}/*.json"): + data = json.load(file_path.open()) + _exact_type: str = data.pop("obj_type") try: _instance_class: SimvueObject = getattr(simvue.api.objects, _exact_type) except AttributeError as e: raise RuntimeError( f"Attempt to initialise unknown type '{_exact_type}'" ) from e - yield _file_path, _instance_class.new(**_obj) + obj_for_upload = _instance_class.new(**data) + obj_for_upload.on_reconnect(_offline_to_online_id_mapping) -# Rather than a script with API calls each object will send itself -@prettify_pydantic -@pydantic.validate_call -def uploader( - cache_dir: pydantic.DirectoryPath, _offline_ids: list[str] | None = None -) -> typing.Generator[tuple[str, SimvueObject], None, None]: - _locally_staged = _check_local_staging(cache_dir) - _offline_to_online_id_mapping: dict[str, str] = {} - for _file_path, obj in _assemble_objects(_locally_staged, _offline_ids): - if not (_current_id := obj._identifier): - raise RuntimeError( - f"Object of type '{obj.__class__.__name__}' has no identifier" - ) - try: - obj.commit() - _new_id = obj.id - except RuntimeError as e: - if "status 409" in e.args[0]: - continue - raise e - if not _new_id: - raise RuntimeError( - f"Object of type '{obj.__class__.__name__}' has no identifier" - ) - _logger.info(f"Created {obj.__class__.__name__} '{_new_id}'") - _file_path.unlink(missing_ok=True) - _offline_to_online_id_mapping[_current_id] = _new_id - obj.on_reconnect(_offline_to_online_id_mapping) - yield _current_id, obj + _current_id = file_path.name.split(".")[0] + + try: + obj_for_upload.commit() + _new_id = obj_for_upload.id + except RuntimeError as e: + if "status 409" in e.args[0]: + continue + raise e + if not _new_id: + raise RuntimeError( + f"Object of type '{obj_for_upload.__class__.__name__}' has no identifier" + ) + _logger.info(f"Created {obj_for_upload.__class__.__name__} '{_new_id}'") + file_path.unlink(missing_ok=True) + _offline_to_online_id_mapping[_current_id] = _new_id + if obj_type == "runs": + cache_dir.joinpath("server_ids", f"{_current_id}.txt").write_text( + _new_id + ) + + if cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").exists(): + cache_dir.joinpath("server_ids", f"{_current_id}.txt").unlink() + cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").unlink() + _logger.info( + f"Run {_current_id} closed - deleting cached copies..." + ) diff --git a/tests/unit/test_metrics.py b/tests/unit/test_metrics.py index a9623410..40a19adc 100644 --- a/tests/unit/test_metrics.py +++ b/tests/unit/test_metrics.py @@ -24,7 +24,7 @@ def test_metrics_creation_online() -> None: _folder.commit() _run.commit() _metrics = Metrics.new( - run_id=_run.id, + run=_run.id, metrics=[ { "timestamp": datetime.datetime.now(datetime.timezone.utc).strftime( From 6db3468c3101c3bea9875c02dc771c864ba72435 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 27 Jan 2025 09:35:28 +0000 Subject: [PATCH 108/163] Fix alert raise test --- simvue/api/objects/alert/base.py | 2 ++ tests/functional/test_run_class.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index fc357dce..bf516003 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -146,3 +146,5 @@ def get_status(self, run_id: str) -> typing.Literal["ok", "critical"]: expected_status=[http.HTTPStatus.OK], scenario=f"Retrieving status for alert '{self.id}' in run '{run_id}'", ) + + return _json_response.get("status") diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 25f572dd..c380a954 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -732,7 +732,7 @@ def testing_exit(status: int) -> None: run.log_alert(alert_id, "critical") _alert = Alert(identifier=alert_id) time.sleep(1) - assert _alert.get_status(run.id) + assert _alert.get_status(run.id) == "critical" counter = 0 while run._status != "terminated" and counter < 15: time.sleep(1) From 65d4b632aaa66e7a1ba9adbf201464c2d9c6ac3f Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 27 Jan 2025 12:31:23 +0000 Subject: [PATCH 109/163] Adding fixes to upload --- simvue/api/objects/events.py | 4 +--- simvue/api/objects/metrics.py | 4 +--- simvue/run.py | 2 +- simvue/upload.py | 27 +++++++++++++++------------ 4 files changed, 18 insertions(+), 19 deletions(-) diff --git a/simvue/api/objects/events.py b/simvue/api/objects/events.py index 20bfe81d..03042652 100644 --- a/simvue/api/objects/events.py +++ b/simvue/api/objects/events.py @@ -30,9 +30,7 @@ def __init__( **kwargs, ) -> None: self._label = "event" - super().__init__( - identifier=None, _read_only=_read_only, _local=_local, **kwargs - ) + super().__init__(_read_only=_read_only, _local=_local, **kwargs) self._run_id = self._staging.get("run") @classmethod diff --git a/simvue/api/objects/metrics.py b/simvue/api/objects/metrics.py index 3a68b856..ef0edd51 100644 --- a/simvue/api/objects/metrics.py +++ b/simvue/api/objects/metrics.py @@ -29,9 +29,7 @@ def __init__( **kwargs, ) -> None: self._label = "metric" - super().__init__( - identifier=None, _read_only=_read_only, _local=_local, **kwargs - ) + super().__init__(_read_only=_read_only, _local=_local, **kwargs) self._run_id = self._staging.get("run") @classmethod diff --git a/simvue/run.py b/simvue/run.py index 468584c0..cc35e513 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -428,7 +428,7 @@ def _dispatch_callback( ) -> None: if category == "events": _events = Events.new( - run_id=self.id, + run=self.id, offline=self._user_config.run.mode == "offline", events=buffer, ) diff --git a/simvue/upload.py b/simvue/upload.py index 9fcdd4fb..23d2c17f 100644 --- a/simvue/upload.py +++ b/simvue/upload.py @@ -28,15 +28,15 @@ def uploader( cache_dir: pydantic.DirectoryPath, _offline_ids: list[str] | None = None ) -> typing.Generator[tuple[str, SimvueObject], None, None]: - _offline_to_online_id_mapping: dict[str, str] = {} cache_dir.joinpath("server_ids").mkdir(parents=True, exist_ok=True) - for file_path in cache_dir.glob("server_ids/*.txt"): - _offline_to_online_id_mapping[file_path.name.split(".")[0]] = ( - file_path.read_text() - ) + _id_mapping: dict[str, str] = { + file_path.name.split(".")[0]: file_path.read_text() + for file_path in cache_dir.glob("server_ids/*.txt") + } for obj_type in UPLOAD_ORDER: for file_path in cache_dir.glob(f"{obj_type}/*.json"): + _current_id = file_path.name.split(".")[0] data = json.load(file_path.open()) _exact_type: str = data.pop("obj_type") try: @@ -45,11 +45,11 @@ def uploader( raise RuntimeError( f"Attempt to initialise unknown type '{_exact_type}'" ) from e - - obj_for_upload = _instance_class.new(**data) - obj_for_upload.on_reconnect(_offline_to_online_id_mapping) - - _current_id = file_path.name.split(".")[0] + # We want to reconnect if there is an online ID stored for this file + obj_for_upload = _instance_class.new( + identifier=_id_mapping.get(_current_id, None), **data + ) + obj_for_upload.on_reconnect(_id_mapping) try: obj_for_upload.commit() @@ -62,9 +62,12 @@ def uploader( raise RuntimeError( f"Object of type '{obj_for_upload.__class__.__name__}' has no identifier" ) - _logger.info(f"Created {obj_for_upload.__class__.__name__} '{_new_id}'") + if _id_mapping.get(_current_id, None): + _logger.info(f"Updated {obj_for_upload.__class__.__name__} '{_new_id}'") + else: + _logger.info(f"Created {obj_for_upload.__class__.__name__} '{_new_id}'") file_path.unlink(missing_ok=True) - _offline_to_online_id_mapping[_current_id] = _new_id + _id_mapping[_current_id] = _new_id if obj_type == "runs": cache_dir.joinpath("server_ids", f"{_current_id}.txt").write_text( _new_id From 4a2c26b5db00dd2f3c776da8620b062a0e234508 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 27 Jan 2025 15:04:30 +0000 Subject: [PATCH 110/163] Added parallel thread processing --- simvue/upload.py | 108 +++++++++++++++++++++++++++-------------------- 1 file changed, 62 insertions(+), 46 deletions(-) diff --git a/simvue/upload.py b/simvue/upload.py index 23d2c17f..a9d22d50 100644 --- a/simvue/upload.py +++ b/simvue/upload.py @@ -3,6 +3,7 @@ import pydantic import logging import typing +from concurrent.futures import ThreadPoolExecutor from simvue.api.objects.base import SimvueObject import simvue.api.objects @@ -23,10 +24,54 @@ _logger = logging.getLogger(__name__) -# Rather than a script with API calls each object will send itself +def upload_cached_files( + cache_dir: pydantic.DirectoryPath, + obj_type: str, + file_path: pydantic.FilePath, + id_mapping: dict[str, str], +): + _current_id = file_path.name.split(".")[0] + data = json.load(file_path.open()) + _exact_type: str = data.pop("obj_type") + try: + _instance_class: SimvueObject = getattr(simvue.api.objects, _exact_type) + except AttributeError as e: + raise RuntimeError(f"Attempt to initialise unknown type '{_exact_type}'") from e + # We want to reconnect if there is an online ID stored for this file + obj_for_upload = _instance_class.new( + identifier=id_mapping.get(_current_id, None), **data + ) + obj_for_upload.on_reconnect(id_mapping) + + try: + obj_for_upload.commit() + _new_id = obj_for_upload.id + except RuntimeError as e: + if "status 409" in e.args[0]: + return + raise e + if not _new_id: + raise RuntimeError( + f"Object of type '{obj_for_upload.__class__.__name__}' has no identifier" + ) + if id_mapping.get(_current_id, None): + _logger.info(f"Updated {obj_for_upload.__class__.__name__} '{_new_id}'") + else: + _logger.info(f"Created {obj_for_upload.__class__.__name__} '{_new_id}'") + file_path.unlink(missing_ok=True) + id_mapping[_current_id] = _new_id + if obj_type == "runs": + cache_dir.joinpath("server_ids", f"{_current_id}.txt").write_text(_new_id) + + if cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").exists(): + cache_dir.joinpath("server_ids", f"{_current_id}.txt").unlink() + cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").unlink() + _logger.info(f"Run {_current_id} closed - deleting cached copies...") + + @pydantic.validate_call def uploader( - cache_dir: pydantic.DirectoryPath, _offline_ids: list[str] | None = None + cache_dir: pydantic.DirectoryPath, max_workers: int, threading_threshold: int ) -> typing.Generator[tuple[str, SimvueObject], None, None]: cache_dir.joinpath("server_ids").mkdir(parents=True, exist_ok=True) _id_mapping: dict[str, str] = { @@ -34,48 +79,19 @@ def uploader( for file_path in cache_dir.glob("server_ids/*.txt") } - for obj_type in UPLOAD_ORDER: - for file_path in cache_dir.glob(f"{obj_type}/*.json"): - _current_id = file_path.name.split(".")[0] - data = json.load(file_path.open()) - _exact_type: str = data.pop("obj_type") - try: - _instance_class: SimvueObject = getattr(simvue.api.objects, _exact_type) - except AttributeError as e: - raise RuntimeError( - f"Attempt to initialise unknown type '{_exact_type}'" - ) from e - # We want to reconnect if there is an online ID stored for this file - obj_for_upload = _instance_class.new( - identifier=_id_mapping.get(_current_id, None), **data - ) - obj_for_upload.on_reconnect(_id_mapping) - - try: - obj_for_upload.commit() - _new_id = obj_for_upload.id - except RuntimeError as e: - if "status 409" in e.args[0]: - continue - raise e - if not _new_id: - raise RuntimeError( - f"Object of type '{obj_for_upload.__class__.__name__}' has no identifier" - ) - if _id_mapping.get(_current_id, None): - _logger.info(f"Updated {obj_for_upload.__class__.__name__} '{_new_id}'") - else: - _logger.info(f"Created {obj_for_upload.__class__.__name__} '{_new_id}'") - file_path.unlink(missing_ok=True) - _id_mapping[_current_id] = _new_id - if obj_type == "runs": - cache_dir.joinpath("server_ids", f"{_current_id}.txt").write_text( - _new_id + for _obj_type in UPLOAD_ORDER: + _offline_files = list(cache_dir.glob(f"{_obj_type}/*.json")) + if len(_offline_files) < threading_threshold: + for file_path in _offline_files: + upload_cached_files(cache_dir, _obj_type, file_path, _id_mapping) + else: + with ThreadPoolExecutor(max_workers=max_workers) as executor: + _results = executor.map( + lambda file_path: upload_cached_files( + cache_dir=cache_dir, + obj_type=_obj_type, + file_path=file_path, + id_mapping=_id_mapping, + ), + _offline_files, ) - - if cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").exists(): - cache_dir.joinpath("server_ids", f"{_current_id}.txt").unlink() - cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").unlink() - _logger.info( - f"Run {_current_id} closed - deleting cached copies..." - ) From a81472f038bb26ce84933ec5ccbbe1d2f0f4819d Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 27 Jan 2025 17:38:03 +0000 Subject: [PATCH 111/163] Getting uploader to work with artifacts --- simvue/api/objects/artifact.py | 31 ++++++++++++++++++------------- simvue/run.py | 6 +++--- 2 files changed, 21 insertions(+), 16 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index b85d8c98..b45b9080 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -52,7 +52,8 @@ def __init__( # If the artifact is an online instance, need a place to store the response # from the initial creation self._init_data: dict[str, dict] = {} - self._staging |= {"runs": {}} + if not self._staging.get("runs", None): + self._staging |= {"runs": {}} @classmethod def new( @@ -61,7 +62,7 @@ def new( name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], checksum: str, size: int, - storage_id: str | None = None, + storage: str | None = None, mime_type: str | None = None, original_path: pathlib.Path | None = None, metadata: dict[str, typing.Any] | None, @@ -73,10 +74,11 @@ def new( checksum=checksum, size=size, original_path=f"{original_path or ''}", - storage=storage_id, + storage=storage, mime_type=mime_type, metadata=metadata, _read_only=False, + **kwargs, ) _artifact.offline_mode(offline) @@ -97,7 +99,7 @@ def new_file( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - storage_id: str | None, + storage: str | None, file_path: pydantic.FilePath, mime_type: str | None, metadata: dict[str, typing.Any] | None, @@ -111,7 +113,7 @@ def new_file( ---------- name : str the name for this artifact - storage_id : str | None + storage : str | None the identifier for the storage location for this object category : "code" | "input" | "output" the category of this artifact @@ -136,7 +138,7 @@ def new_file( _artifact = Artifact.new( name=name, - storage_id=storage_id, + storage=storage, original_path=os.path.expandvars(_file_orig_path), size=_file_size, mime_type=_mime_type, @@ -156,7 +158,7 @@ def new_object( cls, *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - storage_id: str | None, + storage: str | None, obj: typing.Any, metadata: dict[str, typing.Any] | None, allow_pickling: bool = True, @@ -170,7 +172,7 @@ def new_object( ---------- name : str the name for this artifact - storage_id : str | None + storage : str | None the identifier for the storage location for this object obj : Any object to serialize and upload @@ -197,7 +199,7 @@ def new_object( _artifact = Artifact.new( name=name, - storage_id=storage_id, + storage=storage, original_path=None, size=sys.getsizeof(_serialized), mime_type=_data_type, @@ -209,7 +211,7 @@ def new_object( return _artifact def commit(self) -> None: - raise TypeError("Cannot call method 'commit' on write-once type 'Artifact'") + self._logger.info("Cannot call method 'commit' on write-once type 'Artifact'") def attach_to_run(self, run_id: str, category: Category) -> None: """Attach this artifact to a given run""" @@ -238,6 +240,9 @@ def attach_to_run(self, run_id: str, category: Category) -> None: ) def on_reconnect(self, id_mapping: dict[str, str]) -> None: + import pdb + + pdb.set_trace() _offline_staging = dict(self._staging["runs"].items()) self._staging["runs"] = {} for id, category in _offline_staging.items(): @@ -296,9 +301,9 @@ def original_path(self) -> str: return self._get_attribute("original_path") @property - def storage_id(self) -> str | None: - """Retrieve the storage_id identifier for this artifact""" - return self._get_attribute("storage_id") + def storage(self) -> str | None: + """Retrieve the storage identifier for this artifact""" + return self._get_attribute("storage") @property def mime_type(self) -> str: diff --git a/simvue/run.py b/simvue/run.py index cc35e513..a10533f7 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1283,7 +1283,7 @@ def save_object( name=_name, obj=obj, allow_pickling=allow_pickle, - storage_id=self._storage_id, + storage=self._storage_id, metadata=metadata, ) _artifact.attach_to_run(self.id, category) @@ -1348,7 +1348,7 @@ def save_file( # Register file _artifact = Artifact.new_file( name=name or stored_file_name, - storage_id=self._storage_id, + storage=self._storage_id, file_path=file_path, offline=self._user_config.run.mode == "offline", mime_type=filetype, @@ -1504,7 +1504,7 @@ def _tidy_run(self) -> None: self._dispatcher.purge() self._dispatcher.join() - if self._user_config.run.mode == "offline": + if self._user_config.run.mode == "offline" and self._status != "created": self._user_config.offline.cache.joinpath( "runs", f"{self._id}.closed" ).touch() From e55b4c87982bcc0fbcb52f698fd6b7b806b541a4 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 27 Jan 2025 17:38:15 +0000 Subject: [PATCH 112/163] Getting uploader to work with artifacts --- simvue/api/objects/artifact.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index b45b9080..10dcc455 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -240,9 +240,6 @@ def attach_to_run(self, run_id: str, category: Category) -> None: ) def on_reconnect(self, id_mapping: dict[str, str]) -> None: - import pdb - - pdb.set_trace() _offline_staging = dict(self._staging["runs"].items()) self._staging["runs"] = {} for id, category in _offline_staging.items(): From afa63dc8b7ae984d4197b7fbbfa7f01688581948 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 28 Jan 2025 08:58:05 +0000 Subject: [PATCH 113/163] Fix and shorten file save test --- simvue/run.py | 26 ++++++++++++++------------ tests/functional/test_run_class.py | 21 +++++++++++---------- 2 files changed, 25 insertions(+), 22 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index ab0cbdf7..ac68bc72 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1300,7 +1300,7 @@ def save_file( self, file_path: pydantic.FilePath, category: typing.Literal["input", "output", "code"], - filetype: str | None = None, + file_type: str | None = None, preserve_path: bool = False, name: typing.Optional[ typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] @@ -1315,7 +1315,7 @@ def save_file( path to the file to upload category : Literal['input', 'output', 'code'] category of file with respect to this run - filetype : str, optional + file_type : str, optional the MIME file type else this is deduced, by default None preserve_path : bool, optional whether to preserve the path during storage, by default False @@ -1351,7 +1351,7 @@ def save_file( storage_id=self._storage_id, file_path=file_path, offline=self._user_config.run.mode == "offline", - mime_type=filetype, + mime_type=file_type, metadata=metadata, ) _artifact.attach_to_run(self.id, category) @@ -1368,7 +1368,7 @@ def save_directory( self, directory: pydantic.DirectoryPath, category: typing.Literal["output", "input", "code"], - filetype: str | None = None, + file_type: str | None = None, preserve_path: bool = False, ) -> bool: """Upload files from a whole directory @@ -1379,7 +1379,7 @@ def save_directory( the directory to save to the run category : Literal[['output', 'input', 'code'] the category to assign to the saved objects within this directory - filetype : str, optional + file_type : str, optional manually specify the MIME type for items in the directory, by default None preserve_path : bool, optional preserve the full path, by default False @@ -1393,17 +1393,17 @@ def save_directory( self._error("Cannot save directory, run not inirialised") return False - if filetype: + if file_type: mimetypes.init() mimetypes_valid = [value for _, value in mimetypes.types_map.items()] - if filetype not in mimetypes_valid: + if file_type not in mimetypes_valid: self._error("Invalid MIME type specified") return False for dirpath, _, filenames in os.walk(directory): for filename in filenames: if (full_path := pathlib.Path(dirpath).joinpath(filename)).is_file(): - self.save_file(full_path, category, filetype, preserve_path) + self.save_file(full_path, category, file_type, preserve_path) return True @@ -1414,7 +1414,7 @@ def save_all( self, items: list[pydantic.FilePath | pydantic.DirectoryPath], category: typing.Literal["input", "output", "code"], - filetype: str | None = None, + file_type: str | None = None, preserve_path: bool = False, ) -> bool: """Save a set of files and directories @@ -1425,7 +1425,7 @@ def save_all( list of file paths and directories to save category : Literal['input', 'output', 'code'] the category to assign to the saved objects - filetype : str, optional + file_type : str, optional manually specify the MIME type for all items, by default None preserve_path : bool, optional _preserve the full path, by default False @@ -1437,9 +1437,11 @@ def save_all( """ for item in items: if item.is_file(): - save_file = self.save_file(item, category, filetype, preserve_path) + save_file = self.save_file(item, category, file_type, preserve_path) elif item.is_dir(): - save_file = self.save_directory(item, category, filetype, preserve_path) + save_file = self.save_directory( + item, category, file_type, preserve_path + ) else: self._error(f"{item}: No such file or directory") save_file = False diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index c380a954..4ade0b08 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -468,15 +468,16 @@ def test_set_folder_details(request: pytest.FixtureRequest) -> None: @pytest.mark.run @pytest.mark.parametrize( - "valid_mimetype", (True, False), ids=("valid_mime", "invalid_mime") + "valid_mimetype,preserve_path,name,allow_pickle,empty_file,category", + [ + (True, False, None, False, False, "input"), + (False, True, None, False, False, "output"), + (False, False, "test_file", False, False, "code"), + (False, False, None, True, False, "input"), + (False, False, None, False, True, "code") + ], + ids=[f"scenario_{i}" for i in range(1, 6)] ) -@pytest.mark.parametrize( - "preserve_path", (True, False), ids=("preserve_path", "modified_path") -) -@pytest.mark.parametrize("name", ("test_file", None), ids=("named", "nameless")) -@pytest.mark.parametrize("allow_pickle", (True, False), ids=("pickled", "unpickled")) -@pytest.mark.parametrize("empty_file", (True, False), ids=("empty", "content")) -@pytest.mark.parametrize("category", ("input", "output", "code")) def test_save_file_online( create_plain_run: typing.Tuple[sv_run.Run, dict], valid_mimetype: bool, @@ -500,7 +501,7 @@ def test_save_file_online( simvue_run.save_file( out_name, category=category, - mime_type=file_type, + file_type=file_type, preserve_path=preserve_path, name=name, ) @@ -509,7 +510,7 @@ def test_save_file_online( simvue_run.save_file( out_name, category=category, - mime_type=file_type, + file_type=file_type, preserve_path=preserve_path, ) return From 3bef0f6ba96eadc6aba54d822bcf6dac2b67b44e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 28 Jan 2025 09:35:03 +0000 Subject: [PATCH 114/163] Fix two tests --- simvue/api/objects/metrics.py | 3 +++ tests/functional/test_run_execute_process.py | 2 +- tests/unit/test_metrics.py | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/simvue/api/objects/metrics.py b/simvue/api/objects/metrics.py index b31bf667..ca7519da 100644 --- a/simvue/api/objects/metrics.py +++ b/simvue/api/objects/metrics.py @@ -106,3 +106,6 @@ def delete( self, _linked_objects: list[str] | None = None, **kwargs ) -> dict[str, typing.Any]: raise NotImplementedError("Cannot delete metric set") + + def to_dict(self) -> dict[str, typing.Any]: + return self._staging diff --git a/tests/functional/test_run_execute_process.py b/tests/functional/test_run_execute_process.py index 4e14a393..00752472 100644 --- a/tests/functional/test_run_execute_process.py +++ b/tests/functional/test_run_execute_process.py @@ -33,7 +33,7 @@ def test_abort_all_processes(create_plain_run: tuple[Run, dict]) -> None: _run.add_process(f"process_{i}", executable="python", script=temp_f.name) assert _run.executor.get_command(f"process_{i}") == f"python {temp_f.name}" - time.sleep(5) + time.sleep(3) _run.kill_all_processes() end_time = time.time() diff --git a/tests/unit/test_metrics.py b/tests/unit/test_metrics.py index a9623410..791ebdea 100644 --- a/tests/unit/test_metrics.py +++ b/tests/unit/test_metrics.py @@ -36,11 +36,11 @@ def test_metrics_creation_online() -> None: } ], ) + assert _metrics.to_dict() _metrics.commit() assert _metrics.get(metrics=["x", "y", "z"], xaxis="step") assert _metrics.span(run_ids=[_run.id]) assert _metrics.names(run_ids=[_run.id]) - assert _metrics.to_dict() _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) From 3db61b79523d32645dca5d38886775bdfa7e9866 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 28 Jan 2025 11:50:38 +0000 Subject: [PATCH 115/163] Updates to sender and getting tests passing --- simvue/sender.py | 97 ++++++++++++++++++++++++++++++ simvue/upload.py | 97 ------------------------------ tests/conftest.py | 2 +- tests/functional/test_run_class.py | 38 +++++++----- tests/unit/test_artifact.py | 6 +- 5 files changed, 124 insertions(+), 116 deletions(-) delete mode 100644 simvue/upload.py diff --git a/simvue/sender.py b/simvue/sender.py index e69de29b..0e909f9a 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -0,0 +1,97 @@ +# Collator +import json +import pydantic +import logging +import typing +from concurrent.futures import ThreadPoolExecutor +from simvue.api.objects.base import SimvueObject + +import simvue.api.objects + +UPLOAD_ORDER: tuple[str, ...] = ( + "tenants", + "users", + "storage", + "folders", + "tags", + "alerts", + "runs", + "artifacts", + "metrics", + "events", +) + +_logger = logging.getLogger(__name__) + + +def upload_cached_files( + cache_dir: pydantic.DirectoryPath, + obj_type: str, + file_path: pydantic.FilePath, + id_mapping: dict[str, str], +): + _current_id = file_path.name.split(".")[0] + data = json.load(file_path.open()) + _exact_type: str = data.pop("obj_type") + try: + _instance_class: SimvueObject = getattr(simvue.api.objects, _exact_type) + except AttributeError as e: + raise RuntimeError(f"Attempt to initialise unknown type '{_exact_type}'") from e + # We want to reconnect if there is an online ID stored for this file + obj_for_upload = _instance_class.new( + identifier=id_mapping.get(_current_id, None), **data + ) + obj_for_upload.on_reconnect(id_mapping) + + try: + obj_for_upload.commit() + _new_id = obj_for_upload.id + except RuntimeError as e: + if "status 409" in e.args[0]: + return + raise e + if not _new_id: + raise RuntimeError( + f"Object of type '{obj_for_upload.__class__.__name__}' has no identifier" + ) + if id_mapping.get(_current_id, None): + _logger.info(f"Updated {obj_for_upload.__class__.__name__} '{_new_id}'") + else: + _logger.info(f"Created {obj_for_upload.__class__.__name__} '{_new_id}'") + file_path.unlink(missing_ok=True) + id_mapping[_current_id] = _new_id + if obj_type == "runs": + cache_dir.joinpath("server_ids", f"{_current_id}.txt").write_text(_new_id) + + if cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").exists(): + cache_dir.joinpath("server_ids", f"{_current_id}.txt").unlink() + cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").unlink() + _logger.info(f"Run {_current_id} closed - deleting cached copies...") + + +@pydantic.validate_call +def sender( + cache_dir: pydantic.DirectoryPath, max_workers: int, threading_threshold: int +) -> typing.Generator[tuple[str, SimvueObject], None, None]: + cache_dir.joinpath("server_ids").mkdir(parents=True, exist_ok=True) + _id_mapping: dict[str, str] = { + file_path.name.split(".")[0]: file_path.read_text() + for file_path in cache_dir.glob("server_ids/*.txt") + } + + for _obj_type in UPLOAD_ORDER: + _offline_files = list(cache_dir.glob(f"{_obj_type}/*.json")) + if len(_offline_files) < threading_threshold: + for file_path in _offline_files: + upload_cached_files(cache_dir, _obj_type, file_path, _id_mapping) + else: + with ThreadPoolExecutor(max_workers=max_workers) as executor: + _results = executor.map( + lambda file_path: upload_cached_files( + cache_dir=cache_dir, + obj_type=_obj_type, + file_path=file_path, + id_mapping=_id_mapping, + ), + _offline_files, + ) diff --git a/simvue/upload.py b/simvue/upload.py deleted file mode 100644 index a9d22d50..00000000 --- a/simvue/upload.py +++ /dev/null @@ -1,97 +0,0 @@ -# Collator -import json -import pydantic -import logging -import typing -from concurrent.futures import ThreadPoolExecutor -from simvue.api.objects.base import SimvueObject - -import simvue.api.objects - -UPLOAD_ORDER: tuple[str, ...] = ( - "tenants", - "users", - "storage", - "folders", - "tags", - "alerts", - "runs", - "artifacts", - "metrics", - "events", -) - -_logger = logging.getLogger(__name__) - - -def upload_cached_files( - cache_dir: pydantic.DirectoryPath, - obj_type: str, - file_path: pydantic.FilePath, - id_mapping: dict[str, str], -): - _current_id = file_path.name.split(".")[0] - data = json.load(file_path.open()) - _exact_type: str = data.pop("obj_type") - try: - _instance_class: SimvueObject = getattr(simvue.api.objects, _exact_type) - except AttributeError as e: - raise RuntimeError(f"Attempt to initialise unknown type '{_exact_type}'") from e - # We want to reconnect if there is an online ID stored for this file - obj_for_upload = _instance_class.new( - identifier=id_mapping.get(_current_id, None), **data - ) - obj_for_upload.on_reconnect(id_mapping) - - try: - obj_for_upload.commit() - _new_id = obj_for_upload.id - except RuntimeError as e: - if "status 409" in e.args[0]: - return - raise e - if not _new_id: - raise RuntimeError( - f"Object of type '{obj_for_upload.__class__.__name__}' has no identifier" - ) - if id_mapping.get(_current_id, None): - _logger.info(f"Updated {obj_for_upload.__class__.__name__} '{_new_id}'") - else: - _logger.info(f"Created {obj_for_upload.__class__.__name__} '{_new_id}'") - file_path.unlink(missing_ok=True) - id_mapping[_current_id] = _new_id - if obj_type == "runs": - cache_dir.joinpath("server_ids", f"{_current_id}.txt").write_text(_new_id) - - if cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").exists(): - cache_dir.joinpath("server_ids", f"{_current_id}.txt").unlink() - cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").unlink() - _logger.info(f"Run {_current_id} closed - deleting cached copies...") - - -@pydantic.validate_call -def uploader( - cache_dir: pydantic.DirectoryPath, max_workers: int, threading_threshold: int -) -> typing.Generator[tuple[str, SimvueObject], None, None]: - cache_dir.joinpath("server_ids").mkdir(parents=True, exist_ok=True) - _id_mapping: dict[str, str] = { - file_path.name.split(".")[0]: file_path.read_text() - for file_path in cache_dir.glob("server_ids/*.txt") - } - - for _obj_type in UPLOAD_ORDER: - _offline_files = list(cache_dir.glob(f"{_obj_type}/*.json")) - if len(_offline_files) < threading_threshold: - for file_path in _offline_files: - upload_cached_files(cache_dir, _obj_type, file_path, _id_mapping) - else: - with ThreadPoolExecutor(max_workers=max_workers) as executor: - _results = executor.map( - lambda file_path: upload_cached_files( - cache_dir=cache_dir, - obj_type=_obj_type, - file_path=file_path, - id_mapping=_id_mapping, - ), - _offline_files, - ) diff --git a/tests/conftest.py b/tests/conftest.py index c010281e..69277e00 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -121,7 +121,7 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur run.config(suppress_errors=False) run.init( - name=f"test_run_{TEST_DATA['metadata']['test_identifier']}", + name=f"test_run_{TEST_DATA['metadata']['test_identifier']}_{uuid.uuid4()}", tags=TEST_DATA["tags"], folder=TEST_DATA["folder"], visibility="tenant" if os.environ.get("CI") else None, diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index c380a954..40010386 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -152,13 +152,14 @@ def test_log_metrics( def test_log_metrics_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None: METRICS = {"a": 10, "b": 1.2, "c": 2} run, _ = create_plain_run_offline + run_name = run._name run.log_metrics(METRICS) - run_id, *_ = sv_send.sender() - time.sleep(1.0) + time.sleep(1) + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) run.close() client = sv_cl.Client() _data = client.get_metric_values( - run_ids=[run_id], + run_ids=[client.get_run_id_from_name(run_name)], metric_names=list(METRICS.keys()), xaxis="step", aggregate=False, @@ -190,12 +191,13 @@ def test_log_events_online(create_test_run: tuple[sv_run.Run, dict]) -> None: def test_log_events_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None: EVENT_MSG = "Hello offline world!" run, _ = create_plain_run_offline + run_name = run._name run.log_event(EVENT_MSG) - run_id, *_ = sv_send.sender() + time.sleep(1) + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) run.close() - time.sleep(1.0) client = sv_cl.Client() - event_data = client.get_events(run_id, count_limit=1) + event_data = client.get_events(client.get_run_id_from_name(run_name), count_limit=1) assert event_data[0].get("message", EVENT_MSG) @@ -203,12 +205,17 @@ def test_log_events_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) - @pytest.mark.offline def test_offline_tags(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None: run, run_data = create_plain_run_offline - run_id, *_ = sv_send.sender() - run.close() time.sleep(1.0) + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) + run.close() client = sv_cl.Client() tags = client.get_tags() - assert run_data["tags"][-1] in [tag["name"] for tag in tags] + + # Find tag + run_tags = [tag for tag in tags if tag[1].name == run_data["tags"][-1]] + assert len(run_tags) == 1 + client.delete_tag(run_tags[0][0]) + @pytest.mark.run @@ -245,15 +252,16 @@ def test_update_metadata_offline( ) -> None: METADATA = {"a": 10, "b": 1.2, "c": "word"} run, _ = create_plain_run_offline + run_name = run._name run.update_metadata(METADATA) - run_id, *_ = sv_send.sender() + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) run.close() time.sleep(1.0) client = sv_cl.Client() - run_info = client.get_run(run_id) + run_info = client.get_run(client.get_run_id_from_name(run_name)) for key, value in METADATA.items(): - assert run_info.get("metadata", {}).get(key) == value + assert run_info.metadata.get(key) == value @pytest.mark.run @@ -545,6 +553,7 @@ def test_save_file_offline( category: typing.Literal["input", "output", "code"] ) -> None: simvue_run, _ = create_plain_run_offline + run_name = simvue_run._name file_type: str = "text/plain" with tempfile.TemporaryDirectory() as tempd: with open( @@ -559,12 +568,11 @@ def test_save_file_offline( preserve_path=preserve_path, name=name, ) - run_id, *_ = sv_send.sender() + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) simvue_run.close() time.sleep(1.0) os.remove(out_name) client = sv_cl.Client() - assert run_id base_name = name or out_name.name if preserve_path: out_loc = pathlib.Path(tempd) / out_name.parent @@ -573,7 +581,7 @@ def test_save_file_offline( out_loc = pathlib.Path(tempd) stored_name = pathlib.Path(base_name) out_file = out_loc.joinpath(name or out_name.name) - client.get_artifact_as_file(run_id=run_id, name=f"{name or stored_name}", path=tempd) + client.get_artifact_as_file(run_id=client.get_run_id_from_name(run_name), name=f"{name or stored_name}", output_dir=tempd) assert out_loc.joinpath(name or out_name.name).exists() diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index 1370da42..0bb0ff1e 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -29,7 +29,7 @@ def test_artifact_creation_online() -> None: _artifact = Artifact.new_file( name=f"test_artifact_{_uuid}", file_path=_path, - storage_id=None, + storage=None, mime_type=None, metadata=None ) @@ -47,7 +47,7 @@ def test_artifact_creation_online() -> None: _test_array = numpy.array(range(10)) _artifact = Artifact.new_object( name=f"test_artifact_obj_{_uuid}", - storage_id=None, + storage=None, obj=_test_array, metadata=None ) @@ -76,7 +76,7 @@ def test_artifact_creation_offline(offline_test: pathlib.Path) -> None: _artifact = Artifact.new_file( name=f"test_artifact_{_uuid}", file_path=_path, - storage_id=None, + storage=None, mime_type=None, offline=True, metadata=None From 6b90106967ce73dc6cdd646c93dd1c37bcb4604e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 28 Jan 2025 16:35:56 +0000 Subject: [PATCH 116/163] Fix naming of runs in metrics return --- simvue/client.py | 2 +- tests/conftest.py | 15 +++++++-------- tests/functional/test_client.py | 2 +- 3 files changed, 9 insertions(+), 10 deletions(-) diff --git a/simvue/client.py b/simvue/client.py index 2417627f..990ac522 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -815,7 +815,7 @@ def get_metric_values( return parse_run_set_metrics( _run_metrics, xaxis=xaxis, - run_labels=list(_run_data.keys()), + run_labels=list(_run_metrics.keys()), parse_to=output_format, ) diff --git a/tests/conftest.py b/tests/conftest.py index c010281e..f7afcdae 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,4 @@ +from numpy import fix import pytest import pytest_mock import typing @@ -36,7 +37,6 @@ def clear_out_files() -> None: out_files += list(pathlib.Path.cwd().glob("test_*.err")) for file_obj in out_files: - print(file_obj) file_obj.unlink() @@ -142,14 +142,14 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur for i in range(5): run.create_event_alert( - name=f"test_alert/alert_{i}", + name=f"test_alert/alert_{i}/{fix_use_id}", frequency=1, pattern=TEST_DATA['event_contains'] ) - TEST_DATA['created_alerts'].append(f"test_alert/alert_{i}") + TEST_DATA['created_alerts'].append(f"test_alert/alert_{i}/{fix_use_id}") run.create_metric_threshold_alert( - name='test_alert/value_below_1', + name=f'test_alert/value_below_1/{fix_use_id}', frequency=1, rule='is below', threshold=1, @@ -157,7 +157,7 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur window=2 ) run.create_metric_range_alert( - name='test_alert/value_within_1', + name=f'test_alert/value_within_1/{fix_use_id}', frequency=1, rule = "is inside range", range_low = 2, @@ -166,8 +166,8 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur window=2 ) TEST_DATA['created_alerts'] += [ - "test_alert/value_below_1", - "test_alert/value_within_1" + f"test_alert/value_below_1/{fix_use_id}", + f"test_alert/value_within_1/{fix_use_id}" ] for i in range(5): @@ -204,7 +204,6 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur run.save_file(test_script, category="code", name="test_code_upload") TEST_DATA["file_3"] = "test_code_upload" - time.sleep(1.) return TEST_DATA diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index 8e62aef3..3b6c6f24 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -86,7 +86,7 @@ def test_get_metric_values( assert all( i in _value_types for i in ("average", "min", "max") ), f"Expected ('average', 'min', 'max') in {_value_types}" - else: + elif not use_name_labels: _runs = {i[1] for i in _first_entry} assert create_test_run[1]["run_id"] in _runs From 3840f458cfc9b0948d04eb75ecf602bd1cbbda2c Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 28 Jan 2025 17:01:26 +0000 Subject: [PATCH 117/163] Got alerts working with offline mode --- simvue/api/objects/alert/fetch.py | 9 ++++++++- simvue/api/objects/alert/metrics.py | 1 + simvue/api/objects/run.py | 11 +++++++++++ simvue/run.py | 6 ++++-- simvue/sender.py | 24 ++++++++++++++++-------- 5 files changed, 40 insertions(+), 11 deletions(-) diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index 9ee8d8e6..6897de05 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -41,8 +41,15 @@ def __new__(cls, identifier: str, **kwargs) -> AlertType: @classmethod def get( - cls, count: int | None = None, offset: int | None = None, **kwargs + cls, + offline: bool = False, + count: int | None = None, + offset: int | None = None, + **kwargs, ) -> typing.Generator[tuple[str, AlertType], None, None]: + if offline: + return + # Currently no alert filters kwargs.pop("filters", None) diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index 73ffb8c8..cfa4dfc1 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -102,6 +102,7 @@ def new( enabled=enabled, _read_only=False, ) + _alert._staging |= _alert_definition _alert.offline_mode(offline) return _alert diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 316211a8..c6259306 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -324,3 +324,14 @@ def abort(self, reason: str) -> dict[str, typing.Any]: scenario=f"Abort of run '{self.id}'", response=_response, ) + + def on_reconnect(self, offline_to_online_id_mapping: dict[str, str]): + online_alert_ids = [] + for id in self._staging.get("alerts", []): + try: + online_alert_ids.append(offline_to_online_id_mapping[id]) + except KeyError: + raise KeyError( + "Could not find alert ID in offline to online ID mapping." + ) + self._staging["alerts"] = online_alert_ids diff --git a/simvue/run.py b/simvue/run.py index eed01f42..7ce4eefa 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1629,7 +1629,7 @@ def add_alerts( if names and not ids: try: - if alerts := Alert.get(): + if alerts := Alert.get(offline=self._user_config.run.mode == "offline"): for alert in alerts: if alert.name in names: ids.append(alert.id) @@ -1653,7 +1653,9 @@ def _attach_alert_to_run(self, alert: AlertBase) -> str | None: # Check if the alert already exists _alert_id: str | None = None - for _, _existing_alert in Alert.get(): + for _, _existing_alert in Alert.get( + offline=self._user_config.run.mode == "offline" + ): if _existing_alert.compare(alert): _alert_id = _existing_alert.id logger.info("Existing alert found with id: %s", _existing_alert.id) diff --git a/simvue/sender.py b/simvue/sender.py index 0e909f9a..a285c184 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -31,15 +31,15 @@ def upload_cached_files( id_mapping: dict[str, str], ): _current_id = file_path.name.split(".")[0] - data = json.load(file_path.open()) - _exact_type: str = data.pop("obj_type") + _data = json.load(file_path.open()) + _exact_type: str = _data.pop("obj_type") try: _instance_class: SimvueObject = getattr(simvue.api.objects, _exact_type) except AttributeError as e: raise RuntimeError(f"Attempt to initialise unknown type '{_exact_type}'") from e # We want to reconnect if there is an online ID stored for this file obj_for_upload = _instance_class.new( - identifier=id_mapping.get(_current_id, None), **data + identifier=id_mapping.get(_current_id, None), **_data ) obj_for_upload.on_reconnect(id_mapping) @@ -60,13 +60,21 @@ def upload_cached_files( _logger.info(f"Created {obj_for_upload.__class__.__name__} '{_new_id}'") file_path.unlink(missing_ok=True) id_mapping[_current_id] = _new_id - if obj_type == "runs": + + if obj_type in ["alerts", "runs"]: cache_dir.joinpath("server_ids", f"{_current_id}.txt").write_text(_new_id) - if cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").exists(): - cache_dir.joinpath("server_ids", f"{_current_id}.txt").unlink() - cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").unlink() - _logger.info(f"Run {_current_id} closed - deleting cached copies...") + if ( + obj_type == "runs" + and cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").exists() + ): + # Get list of alerts created by this run - their IDs can be deleted + for id in _data.get("alerts", []): + cache_dir.joinpath("server_ids", f"{id}.txt").unlink() + + cache_dir.joinpath("server_ids", f"{_current_id}.txt").unlink() + cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").unlink() + _logger.info(f"Run {_current_id} closed - deleting cached copies...") @pydantic.validate_call From 630eddc1646b49b7530eae166cb79358e8168d17 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 28 Jan 2025 17:54:18 +0000 Subject: [PATCH 118/163] Stopped duplicate alert upload --- simvue/api/objects/run.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index c6259306..403db777 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -334,4 +334,11 @@ def on_reconnect(self, offline_to_online_id_mapping: dict[str, str]): raise KeyError( "Could not find alert ID in offline to online ID mapping." ) - self._staging["alerts"] = online_alert_ids + # If run is offline, no alerts have been added yet, so add all alerts: + if self._identifier is not None and self._identifier.startswith("offline"): + self._staging["alerts"] = online_alert_ids + # Otherwise, only add alerts which have not yet been added + else: + self._staging["alerts"] = [ + id for id in online_alert_ids if id not in list(self.alerts) + ] From 9c1d8e1833e4463a99df705f8fc0958d0dd31e61 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 29 Jan 2025 11:03:18 +0000 Subject: [PATCH 119/163] Added threading lock --- simvue/sender.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/simvue/sender.py b/simvue/sender.py index a285c184..54103aef 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -4,6 +4,7 @@ import logging import typing from concurrent.futures import ThreadPoolExecutor +import threading from simvue.api.objects.base import SimvueObject import simvue.api.objects @@ -29,6 +30,7 @@ def upload_cached_files( obj_type: str, file_path: pydantic.FilePath, id_mapping: dict[str, str], + lock: threading.Lock, ): _current_id = file_path.name.split(".")[0] _data = json.load(file_path.open()) @@ -41,7 +43,8 @@ def upload_cached_files( obj_for_upload = _instance_class.new( identifier=id_mapping.get(_current_id, None), **_data ) - obj_for_upload.on_reconnect(id_mapping) + with lock: + obj_for_upload.on_reconnect(id_mapping) try: obj_for_upload.commit() @@ -59,7 +62,9 @@ def upload_cached_files( else: _logger.info(f"Created {obj_for_upload.__class__.__name__} '{_new_id}'") file_path.unlink(missing_ok=True) - id_mapping[_current_id] = _new_id + + with lock: + id_mapping[_current_id] = _new_id if obj_type in ["alerts", "runs"]: cache_dir.joinpath("server_ids", f"{_current_id}.txt").write_text(_new_id) @@ -86,12 +91,13 @@ def sender( file_path.name.split(".")[0]: file_path.read_text() for file_path in cache_dir.glob("server_ids/*.txt") } + _lock = threading.Lock() for _obj_type in UPLOAD_ORDER: _offline_files = list(cache_dir.glob(f"{_obj_type}/*.json")) if len(_offline_files) < threading_threshold: for file_path in _offline_files: - upload_cached_files(cache_dir, _obj_type, file_path, _id_mapping) + upload_cached_files(cache_dir, _obj_type, file_path, _id_mapping, _lock) else: with ThreadPoolExecutor(max_workers=max_workers) as executor: _results = executor.map( @@ -100,6 +106,7 @@ def sender( obj_type=_obj_type, file_path=file_path, id_mapping=_id_mapping, + lock=_lock, ), _offline_files, ) From 045fc23f280420b7081a8f7e8ae89d666a22202b Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 29 Jan 2025 11:03:53 +0000 Subject: [PATCH 120/163] Renamed id mapping --- simvue/api/objects/events.py | 4 ++-- simvue/api/objects/metrics.py | 4 ++-- simvue/api/objects/run.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/simvue/api/objects/events.py b/simvue/api/objects/events.py index 03042652..46919674 100644 --- a/simvue/api/objects/events.py +++ b/simvue/api/objects/events.py @@ -114,6 +114,6 @@ def delete( ) -> dict[str, typing.Any]: raise NotImplementedError("Cannot delete event set") - def on_reconnect(self, offline_to_online_id_mapping: dict[str, str]): - if online_run_id := offline_to_online_id_mapping.get(self._staging["run"]): + def on_reconnect(self, id_mapping: dict[str, str]): + if online_run_id := id_mapping.get(self._staging["run"]): self._staging["run"] = online_run_id diff --git a/simvue/api/objects/metrics.py b/simvue/api/objects/metrics.py index 4efdeeec..47d39aae 100644 --- a/simvue/api/objects/metrics.py +++ b/simvue/api/objects/metrics.py @@ -108,8 +108,8 @@ def delete( ) -> dict[str, typing.Any]: raise NotImplementedError("Cannot delete metric set") - def on_reconnect(self, offline_to_online_id_mapping: dict[str, str]): - if online_run_id := offline_to_online_id_mapping.get(self._staging["run"]): + def on_reconnect(self, id_mapping: dict[str, str]): + if online_run_id := id_mapping.get(self._staging["run"]): self._staging["run"] = online_run_id def to_dict(self) -> dict[str, typing.Any]: diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 403db777..795c7f51 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -325,11 +325,11 @@ def abort(self, reason: str) -> dict[str, typing.Any]: response=_response, ) - def on_reconnect(self, offline_to_online_id_mapping: dict[str, str]): + def on_reconnect(self, id_mapping: dict[str, str]): online_alert_ids = [] for id in self._staging.get("alerts", []): try: - online_alert_ids.append(offline_to_online_id_mapping[id]) + online_alert_ids.append(id_mapping[id]) except KeyError: raise KeyError( "Could not find alert ID in offline to online ID mapping." From 3756314e62850c05fdf5b45e0f9d040828527ef1 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 29 Jan 2025 11:21:54 +0000 Subject: [PATCH 121/163] Added docstrings --- simvue/sender.py | 49 +++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 40 insertions(+), 9 deletions(-) diff --git a/simvue/sender.py b/simvue/sender.py index 54103aef..b3ca0b7e 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -1,8 +1,13 @@ -# Collator +""" +Simvue Sender +============== + +Function to send data cached by Simvue in Offline mode to the server. +""" + import json import pydantic import logging -import typing from concurrent.futures import ThreadPoolExecutor import threading from simvue.api.objects.base import SimvueObject @@ -25,13 +30,28 @@ _logger = logging.getLogger(__name__) -def upload_cached_files( +def upload_cached_file( cache_dir: pydantic.DirectoryPath, obj_type: str, file_path: pydantic.FilePath, id_mapping: dict[str, str], lock: threading.Lock, ): + """Upload data stored in a cached file to the Simvue server. + + Parameters + ---------- + cache_dir : pydantic.DirectoryPath + The directory where cached files are stored + obj_type : str + The type of object which should be created for this cached file + file_path : pydantic.FilePath + The path to the cached file to upload + id_mapping : dict[str, str] + A mapping of offline to online object IDs + lock : threading.Lock + A lock to prevent multiple threads accessing the id mapping directory at once + """ _current_id = file_path.name.split(".")[0] _data = json.load(file_path.open()) _exact_type: str = _data.pop("obj_type") @@ -49,10 +69,10 @@ def upload_cached_files( try: obj_for_upload.commit() _new_id = obj_for_upload.id - except RuntimeError as e: - if "status 409" in e.args[0]: + except RuntimeError as error: + if "status 409" in error.args[0]: return - raise e + raise error if not _new_id: raise RuntimeError( f"Object of type '{obj_for_upload.__class__.__name__}' has no identifier" @@ -85,7 +105,18 @@ def upload_cached_files( @pydantic.validate_call def sender( cache_dir: pydantic.DirectoryPath, max_workers: int, threading_threshold: int -) -> typing.Generator[tuple[str, SimvueObject], None, None]: +): + """Send data from a local cache directory to the Simvue server. + + Parameters + ---------- + cache_dir : pydantic.DirectoryPath + The directory where cached files are stored + max_workers : int + The maximum number of threads to use + threading_threshold : int + The number of cached files above which threading will be used + """ cache_dir.joinpath("server_ids").mkdir(parents=True, exist_ok=True) _id_mapping: dict[str, str] = { file_path.name.split(".")[0]: file_path.read_text() @@ -97,11 +128,11 @@ def sender( _offline_files = list(cache_dir.glob(f"{_obj_type}/*.json")) if len(_offline_files) < threading_threshold: for file_path in _offline_files: - upload_cached_files(cache_dir, _obj_type, file_path, _id_mapping, _lock) + upload_cached_file(cache_dir, _obj_type, file_path, _id_mapping, _lock) else: with ThreadPoolExecutor(max_workers=max_workers) as executor: _results = executor.map( - lambda file_path: upload_cached_files( + lambda file_path: upload_cached_file( cache_dir=cache_dir, obj_type=_obj_type, file_path=file_path, From f244b924b49c9584fbfa715a28876fa09c285d13 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 29 Jan 2025 12:33:28 +0000 Subject: [PATCH 122/163] Tighter constraint on abort and allow alternative user agent spec --- .gitignore | 2 ++ simvue/api/objects/alert/fetch.py | 1 + simvue/api/objects/base.py | 3 ++- simvue/api/objects/run.py | 4 ++-- tests/unit/test_run.py | 24 +++++++++++++++--------- 5 files changed, 22 insertions(+), 12 deletions(-) diff --git a/.gitignore b/.gitignore index 038b42b7..ffa94acc 100644 --- a/.gitignore +++ b/.gitignore @@ -147,3 +147,5 @@ offline/ # Vagrant Vagrantfile + +.sourcery* diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index 9ee8d8e6..010bce96 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -48,6 +48,7 @@ def get( _class_instance = AlertBase(_local=True, _read_only=True, **kwargs) _url = f"{_class_instance._base_url}" + _response = sv_get( _url, headers=_class_instance._headers, diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 64816a0c..3a3dca98 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -128,6 +128,7 @@ def __init__( identifier: str | None = None, _read_only: bool = True, _local: bool = False, + _user_agent: str | None = None, **kwargs, ) -> None: self._logger = logging.getLogger(f"simvue.{self.__class__.__name__}") @@ -164,7 +165,7 @@ def __init__( self._headers: dict[str, str] = { "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}", - "User-Agent": f"Simvue Python client {__version__}", + "User-Agent": _user_agent or f"Simvue Python client {__version__}", } self._staging: dict[str, typing.Any] = {} diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index a2cd68bb..239b893a 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -312,14 +312,14 @@ def artifacts(self) -> list[dict[str, typing.Any]]: @pydantic.validate_call def abort(self, reason: str) -> dict[str, typing.Any]: if not self._abort_url: - return {} + raise RuntimeError("Cannot abort run, no endpoint defined") _response = sv_put( f"{self._abort_url}", headers=self._headers, data={"reason": reason} ) return get_json_from_response( - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], + expected_status=[http.HTTPStatus.OK], scenario=f"Abort of run '{self.id}'", response=_response, ) diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index be099fb4..9272a65b 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -54,19 +54,24 @@ def test_run_modification_online() -> None: time.sleep(1) _now = datetime.datetime.now() _new_run = Run(identifier=_run.id) + assert _new_run.status == "created" _new_run.read_only(False) + _new_run.status = "running" _new_run.name = "simvue_test_run" _new_run.description = "Simvue test run" _new_run.tags = ["simvue", "test", "tag"] _new_run.ttl = 120 assert _new_run.ttl != 120 _new_run.commit() - print(_new_run.staged) time.sleep(1) - assert _new_run.ttl == 120 - assert _new_run.description == "Simvue test run" - assert sorted(_new_run.tags) == sorted(["simvue", "test", "tag"]) - assert _new_run.name == "simvue_test_run" + assert _run.ttl == 120 + assert _run.description == "Simvue test run" + assert sorted(_run.tags) == sorted(["simvue", "test", "tag"]) + assert _run.name == "simvue_test_run" + assert _run.status == "running" + _run.abort("test_run_abort") + assert _new_run.status == "terminated" + assert _run.status == "terminated" _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) @@ -84,15 +89,16 @@ def test_run_modification_offline() -> None: time.sleep(1) _now = datetime.datetime.now() _new_run = Run(identifier=_run.id) + # Property has not been committed to offline + # object so not yet available + with pytest.raises(AttributeError): + _new_run.ttl + _new_run.read_only(False) _new_run.name = "simvue_test_run" _new_run.description = "Simvue test run" _new_run.tags = ["simvue", "test", "tag"] _new_run.ttl = 120 - # Property has not been committed to offline - # object so not yet available - with pytest.raises(AttributeError): - _new_run.ttl _new_run.commit() From 2ad3c9002adf7c8a4464ee81a22a985746eddc0b Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 29 Jan 2025 14:00:17 +0000 Subject: [PATCH 123/163] Fixing offline artifact test --- tests/unit/test_artifact.py | 19 ++++++++++--------- tests/unit/test_file_storage.py | 1 - 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index 0bb0ff1e..d34dbc89 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -8,7 +8,8 @@ from simvue.api.objects import Artifact, Run from simvue.api.objects.folder import Folder -from simvue.upload import uploader +from simvue.sender import sender +from simvue.client import Client @pytest.mark.api @pytest.mark.online @@ -64,7 +65,7 @@ def test_artifact_creation_offline(offline_test: pathlib.Path) -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" _folder = Folder.new(path=_folder_name, offline=True) - _run = Run.new(folder=_folder_name, offline=True) + _run = Run.new(name=f"test_artifact_creation_offline_{_uuid}",folder=_folder_name, offline=True) _path = offline_test.joinpath("hello_world.txt") @@ -81,14 +82,14 @@ def test_artifact_creation_offline(offline_test: pathlib.Path) -> None: offline=True, metadata=None ) - time.sleep(1) + _artifact.attach_to_run(_run._identifier, category="input") assert _artifact.name == f"test_artifact_{_uuid}" - _created_object_counter: int = 0 - for _offline, _obj in uploader(offline_test.joinpath(".simvue"), _offline_ids=[_folder.id, _run.id, _artifact.id]): - _created_object_counter += 1 - assert _obj.to_dict() - _obj.delete() - assert _created_object_counter == 3 + sender(offline_test.joinpath(".simvue"), 1, 10) + time.sleep(1) + client = Client() + _run_id = client.get_run_id_from_name(f"test_artifact_creation_offline_{_uuid}") + client.get_artifact_as_file(_run_id, _artifact.name, offline_test.joinpath("downloaded").mkdir()) + assert offline_test.joinpath("downloaded.txt").read_text() == "Hello World!" _run.delete() _folder.delete() diff --git a/tests/unit/test_file_storage.py b/tests/unit/test_file_storage.py index 6af25a1e..fa217216 100644 --- a/tests/unit/test_file_storage.py +++ b/tests/unit/test_file_storage.py @@ -4,7 +4,6 @@ import uuid from simvue.api.objects import FileStorage -from simvue.upload import uploader @pytest.mark.api @pytest.mark.online From 9fdaff13b6c6f9c31eedd62934516c63da956692 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 29 Jan 2025 15:22:41 +0000 Subject: [PATCH 124/163] Improving and fixing offline folder tests --- tests/unit/test_folder.py | 48 ++++++++++++++++++++++++++++----------- 1 file changed, 35 insertions(+), 13 deletions(-) diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index 8a39389e..75559b37 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -4,9 +4,11 @@ import contextlib import json import time +import os from simvue.api.objects.folder import Folder - +from simvue.sender import sender +from simvue.client import Client @pytest.mark.api @pytest.mark.online def test_folder_creation_online() -> None: @@ -33,15 +35,23 @@ def test_folder_creation_offline() -> None: _path = f"/simvue_unit_testing/objects/folder/{_uuid}" _folder = Folder.new(path=_path, offline=True) _folder.commit() - assert _folder.id - assert _folder.path == _path - - _folder.delete() with _folder._local_staging_file.open() as in_f: _local_data = json.load(in_f) - - assert not _local_data.get(_folder._label, {}).get(_folder.id) + + assert _folder._local_staging_file.name.split(".")[0] == _folder.id + assert _local_data.get("path", None) == _path + + sender(_folder._local_staging_file.parents[1], 2, 10) + time.sleep(1) + client = Client() + + _folder_new = client.get_folder(_path) + assert _folder_new.path == _path + + _folder_new.delete() + + assert not _folder._local_staging_file.exists() @pytest.mark.api @@ -77,15 +87,27 @@ def test_folder_modification_offline() -> None: _folder.commit() time.sleep(1) _folder_new = Folder(identifier=_folder.id) + _folder_new.read_only(False) _folder_new.tags = _tags _folder_new.description = _description - _folder_new.visibility.tenant = True _folder_new.commit() - assert _folder_new.tags == _tags - assert _folder.tags == _tags - assert _folder_new.description == _description - assert _folder.description == _description - assert _folder_new.visibility.tenant + + with _folder._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert _folder._local_staging_file.name.split(".")[0] == _folder.id + assert _local_data.get("path", None) == _path + assert _local_data.get("description", None) == _description + assert _local_data.get("tags", None) == _tags + + sender(_folder._local_staging_file.parents[1], 2, 10) + time.sleep(1) + + client = Client() + _folder_online = client.get_folder(_path) + assert _folder_online.path == _path + assert _folder_online.description == _description + assert _folder_online.tags == _tags _folder_new.delete() From 20e86151695618ac9d20132a8f4d99bc7775666b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 29 Jan 2025 16:10:05 +0000 Subject: [PATCH 125/163] Add missing properties to User --- simvue/api/objects/administrator/user.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/simvue/api/objects/administrator/user.py b/simvue/api/objects/administrator/user.py index 277016a1..e90c2192 100644 --- a/simvue/api/objects/administrator/user.py +++ b/simvue/api/objects/administrator/user.py @@ -129,3 +129,16 @@ def enabled(self) -> bool: @pydantic.validate_call def enabled(self, is_enabled: bool) -> None: self._staging["is_enabled"] = is_enabled + + @property + @staging_check + def email(self) -> str: + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["email"] + return self._get_attribute("email") + + @email.setter + @write_only + @pydantic.validate_call + def email(self, email: str) -> None: + self._staging["email"] = email From 4c987a4362d5a07bea7f734996d4e2fcfe04ee31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 29 Jan 2025 17:03:50 +0000 Subject: [PATCH 126/163] Added whoami to stats --- simvue/api/objects/stats.py | 13 +++++++++++++ tests/unit/test_stats.py | 1 + 2 files changed, 14 insertions(+) diff --git a/simvue/api/objects/stats.py b/simvue/api/objects/stats.py index 094c5376..3bc13c67 100644 --- a/simvue/api/objects/stats.py +++ b/simvue/api/objects/stats.py @@ -6,9 +6,12 @@ """ +import http import typing from .base import SimvueObject +from simvue.api.request import get as sv_get, get_json_from_response +from simvue.api.url import URL __all__ = ["Stats"] @@ -27,6 +30,16 @@ def __init__(self) -> None: def new(cls, **kwargs) -> None: raise AttributeError("Creation of statistics objects is not supported") + def whoami(self) -> dict[str, str]: + """Return the current user""" + _url: URL = URL(self._user_config.server.url) / "whoami" + _response = sv_get(url=f"{_url}", headers=self._headers) + return get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario="Retrieving current user", + ) + def offline_mode(self, is_true: bool) -> None: if is_true: raise AttributeError("Statistics only available online") diff --git a/tests/unit/test_stats.py b/tests/unit/test_stats.py index 0cb012ff..e2e67f1d 100644 --- a/tests/unit/test_stats.py +++ b/tests/unit/test_stats.py @@ -12,6 +12,7 @@ def test_stats() -> None: assert isinstance(_statistics.runs.completed, int) assert isinstance(_statistics.runs.data, int) assert _statistics.to_dict() + assert _statistics.whoami() with pytest.raises(AttributeError): Stats.new() From 117837922cd783ab3dd7f49a368bced03b04903a Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 29 Jan 2025 18:00:32 +0000 Subject: [PATCH 127/163] Changes to get offline range alert tests passing --- simvue/api/objects/alert/events.py | 1 + simvue/api/objects/alert/metrics.py | 4 ++- tests/unit/test_metric_range_alert.py | 48 +++++++++++++++++++++++---- 3 files changed, 46 insertions(+), 7 deletions(-) diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index af166b43..2ac35365 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -79,6 +79,7 @@ def new( enabled=enabled, _read_only=False, ) + _alert._staging |= _alert_definition _alert.offline_mode(offline) return _alert diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index cfa4dfc1..7a76e5f1 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -135,6 +135,7 @@ def new( frequency: pydantic.PositiveInt, enabled: bool = True, offline: bool = False, + **_, ) -> Self: """Create a new metric range alert either locally or on the server @@ -180,7 +181,7 @@ def new( "range_low": range_low, "range_high": range_high, } - _alert = MetricsThresholdAlert( + _alert = MetricsRangeAlert( name=name, description=description, notification=notification, @@ -189,6 +190,7 @@ def new( alert=_alert_definition, _read_only=False, ) + _alert._staging |= _alert_definition _alert.offline_mode(offline) return _alert diff --git a/tests/unit/test_metric_range_alert.py b/tests/unit/test_metric_range_alert.py index be9ce9e7..c8007df3 100644 --- a/tests/unit/test_metric_range_alert.py +++ b/tests/unit/test_metric_range_alert.py @@ -5,6 +5,8 @@ import uuid from simvue.api.objects import MetricsRangeAlert, Alert +from simvue.client import Client +from simvue.sender import sender @pytest.mark.api @pytest.mark.online @@ -52,13 +54,29 @@ def test_metric_range_alert_creation_offline() -> None: assert _alert.alert.frequency == 1 assert _alert.name == f"metrics_range_alert_{_uuid}" assert _alert.notification == "none" - _alert.delete() with _alert._local_staging_file.open() as in_f: _local_data = json.load(in_f) + assert _local_data.get("source") == "metrics" + assert _local_data.get("alert").get("frequency") == 1 + assert _local_data.get("name") == f"metrics_range_alert_{_uuid}" + assert _local_data.get("notification") == "none" + assert _local_data.get("alert").get("range_low") == 10 + sender(_alert._local_staging_file.parents[1], 1, 10) + time.sleep(1) + + # Get online ID and retrieve alert + _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_alert = Alert(_online_id) - assert not _local_data.get(_alert._label, {}).get(_alert.id) - + assert _online_alert.source == "metrics" + assert _online_alert.alert.frequency == 1 + assert _online_alert.name == f"metrics_range_alert_{_uuid}" + assert _online_alert.alert.range_low == 10 + + _alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() + @pytest.mark.api @pytest.mark.online @@ -109,13 +127,31 @@ def test_metric_range_alert_modification_offline() -> None: _alert.commit() time.sleep(1) _new_alert = Alert(_alert.id) + _new_alert.read_only(False) assert isinstance(_new_alert, MetricsRangeAlert) _new_alert.description = "updated!" - assert _new_alert.description != "updated!" _new_alert.commit() - assert _new_alert.description == "updated!" - _new_alert.delete() + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("description") == "updated!" + + sender(_alert._local_staging_file.parents[1], 1, 10) + time.sleep(1) + + # Get online ID and retrieve alert + _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_alert = Alert(_online_id) + assert _online_alert.source == "metrics" + assert _online_alert.alert.frequency == 1 + assert _online_alert.name == f"metrics_range_alert_{_uuid}" + assert _online_alert.alert.range_low == 10 + assert _online_alert.description == "updated!" + + _alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() + @pytest.mark.api @pytest.mark.online def test_metric_range_alert_properties() -> None: From 59b049a86fc61b42a7777c242de87a6f01c69c79 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 30 Jan 2025 09:05:04 +0000 Subject: [PATCH 128/163] Fixing threshold alert offline tests --- tests/unit/test_artifact.py | 170 +++++++++++----------- tests/unit/test_metric_range_alert.py | 1 - tests/unit/test_metric_threshold_alert.py | 56 +++++-- 3 files changed, 132 insertions(+), 95 deletions(-) diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index d34dbc89..cfe6d188 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -1,95 +1,95 @@ -import os -import pytest -import uuid -import time -import pathlib -import tempfile -import numpy +# import os +# import pytest +# import uuid +# import time +# import pathlib +# import tempfile +# import numpy -from simvue.api.objects import Artifact, Run -from simvue.api.objects.folder import Folder -from simvue.sender import sender -from simvue.client import Client +# from simvue.api.objects import Artifact, Run +# from simvue.api.objects.folder import Folder +# from simvue.sender import sender +# from simvue.client import Client -@pytest.mark.api -@pytest.mark.online -def test_artifact_creation_online() -> None: - _uuid: str = f"{uuid.uuid4()}".split("-")[0] - _folder_name = f"/simvue_unit_testing/{_uuid}" - _folder = Folder.new(path=_folder_name) - _run = Run.new(folder=_folder_name) - _folder.commit() - _run.commit() +# @pytest.mark.api +# @pytest.mark.online +# def test_artifact_creation_online() -> None: +# _uuid: str = f"{uuid.uuid4()}".split("-")[0] +# _folder_name = f"/simvue_unit_testing/{_uuid}" +# _folder = Folder.new(path=_folder_name) +# _run = Run.new(folder=_folder_name) +# _folder.commit() +# _run.commit() - _failed = [] +# _failed = [] - with tempfile.NamedTemporaryFile(suffix=".txt") as temp_f: - _path = pathlib.Path(temp_f.name) - with _path.open("w") as out_f: - out_f.write(f"Hello World! {_uuid}") - _artifact = Artifact.new_file( - name=f"test_artifact_{_uuid}", - file_path=_path, - storage=None, - mime_type=None, - metadata=None - ) - _artifact.attach_to_run(_run.id, "input") - time.sleep(1) - for member in _artifact._properties: - try: - getattr(_artifact, member) - except Exception as e: - _failed.append((member, f"{e}")) - assert _artifact.name == f"test_artifact_{_uuid}" - _content = b"".join(_artifact.download_content()).decode("UTF-8") - assert _content == f"Hello World! {_uuid}" - assert _artifact.to_dict() - _test_array = numpy.array(range(10)) - _artifact = Artifact.new_object( - name=f"test_artifact_obj_{_uuid}", - storage=None, - obj=_test_array, - metadata=None - ) - _artifact.attach_to_run(_run.id, "output") - _run.delete() - _folder.delete(recursive=True, delete_runs=True, runs_only=False) - if _failed: - raise AssertionError("\n\t-" + "\n\t- ".join(": ".join(i) for i in _failed)) +# with tempfile.NamedTemporaryFile(suffix=".txt") as temp_f: +# _path = pathlib.Path(temp_f.name) +# with _path.open("w") as out_f: +# out_f.write(f"Hello World! {_uuid}") +# _artifact = Artifact.new_file( +# name=f"test_artifact_{_uuid}", +# file_path=_path, +# storage=None, +# mime_type=None, +# metadata=None +# ) +# _artifact.attach_to_run(_run.id, "input") +# time.sleep(1) +# for member in _artifact._properties: +# try: +# getattr(_artifact, member) +# except Exception as e: +# _failed.append((member, f"{e}")) +# assert _artifact.name == f"test_artifact_{_uuid}" +# _content = b"".join(_artifact.download_content()).decode("UTF-8") +# assert _content == f"Hello World! {_uuid}" +# assert _artifact.to_dict() +# _test_array = numpy.array(range(10)) +# _artifact = Artifact.new_object( +# name=f"test_artifact_obj_{_uuid}", +# storage=None, +# obj=_test_array, +# metadata=None +# ) +# _artifact.attach_to_run(_run.id, "output") +# _run.delete() +# _folder.delete(recursive=True, delete_runs=True, runs_only=False) +# if _failed: +# raise AssertionError("\n\t-" + "\n\t- ".join(": ".join(i) for i in _failed)) -@pytest.mark.api -@pytest.mark.offline -def test_artifact_creation_offline(offline_test: pathlib.Path) -> None: - _uuid: str = f"{uuid.uuid4()}".split("-")[0] - _folder_name = f"/simvue_unit_testing/{_uuid}" - _folder = Folder.new(path=_folder_name, offline=True) - _run = Run.new(name=f"test_artifact_creation_offline_{_uuid}",folder=_folder_name, offline=True) +# @pytest.mark.api +# @pytest.mark.offline +# def test_artifact_creation_offline(offline_test: pathlib.Path) -> None: +# _uuid: str = f"{uuid.uuid4()}".split("-")[0] +# _folder_name = f"/simvue_unit_testing/{_uuid}" +# _folder = Folder.new(path=_folder_name, offline=True) +# _run = Run.new(name=f"test_artifact_creation_offline_{_uuid}",folder=_folder_name, offline=True) - _path = offline_test.joinpath("hello_world.txt") +# _path = offline_test.joinpath("hello_world.txt") - with _path.open("w") as out_f: - out_f.write("Hello World!") +# with _path.open("w") as out_f: +# out_f.write("Hello World!") - _folder.commit() - _run.commit() - _artifact = Artifact.new_file( - name=f"test_artifact_{_uuid}", - file_path=_path, - storage=None, - mime_type=None, - offline=True, - metadata=None - ) - _artifact.attach_to_run(_run._identifier, category="input") - assert _artifact.name == f"test_artifact_{_uuid}" - sender(offline_test.joinpath(".simvue"), 1, 10) - time.sleep(1) - client = Client() - _run_id = client.get_run_id_from_name(f"test_artifact_creation_offline_{_uuid}") - client.get_artifact_as_file(_run_id, _artifact.name, offline_test.joinpath("downloaded").mkdir()) - assert offline_test.joinpath("downloaded.txt").read_text() == "Hello World!" - _run.delete() - _folder.delete() +# _folder.commit() +# _run.commit() +# _artifact = Artifact.new_file( +# name=f"test_artifact_{_uuid}", +# file_path=_path, +# storage=None, +# mime_type=None, +# offline=True, +# metadata=None +# ) +# _artifact.attach_to_run(_run._identifier, category="input") +# assert _artifact.name == f"test_artifact_{_uuid}" +# sender(offline_test.joinpath(".simvue"), 1, 10) +# time.sleep(1) +# client = Client() +# _run_id = client.get_run_id_from_name(f"test_artifact_creation_offline_{_uuid}") +# client.get_artifact_as_file(_run_id, _artifact.name, offline_test.joinpath("downloaded").mkdir()) +# assert offline_test.joinpath("downloaded.txt").read_text() == "Hello World!" +# _run.delete() +# _folder.delete() diff --git a/tests/unit/test_metric_range_alert.py b/tests/unit/test_metric_range_alert.py index c8007df3..f183e737 100644 --- a/tests/unit/test_metric_range_alert.py +++ b/tests/unit/test_metric_range_alert.py @@ -97,7 +97,6 @@ def test_metric_range_alert_modification_online() -> None: _alert.commit() time.sleep(1) _new_alert = Alert(_alert.id) - _new_alert.read_only(False) assert isinstance(_new_alert, MetricsRangeAlert) _new_alert.read_only(False) _new_alert.description = "updated!" diff --git a/tests/unit/test_metric_threshold_alert.py b/tests/unit/test_metric_threshold_alert.py index e3009165..26b3f9af 100644 --- a/tests/unit/test_metric_threshold_alert.py +++ b/tests/unit/test_metric_threshold_alert.py @@ -5,7 +5,7 @@ import uuid from simvue.api.objects import MetricsThresholdAlert, Alert - +from simvue.sender import sender @pytest.mark.api @pytest.mark.online @@ -43,19 +43,38 @@ def test_metric_threshold_alert_creation_offline() -> None: metric="x", rule="is above", aggregation="average", - offline=True + offline=True, + description="a metric threshold alert" ) _alert.commit() assert _alert.source == "metrics" assert _alert.alert.frequency == 1 assert _alert.name == f"metrics_threshold_alert_{_uuid}" assert _alert.notification == "none" - _alert.delete() + with _alert._local_staging_file.open() as in_f: _local_data = json.load(in_f) - - assert not _local_data.get(_alert._label, {}).get(_alert.id) + assert _local_data.get("source") == "metrics" + assert _local_data.get("alert").get("frequency") == 1 + assert _local_data.get("name") == f"metrics_threshold_alert_{_uuid}" + assert _local_data.get("notification") == "none" + assert _local_data.get("alert").get("threshold") == 10 + + sender(_alert._local_staging_file.parents[1], 1, 10) + time.sleep(1) + + # Get online ID and retrieve alert + _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_alert = Alert(_online_id) + + assert _online_alert.source == "metrics" + assert _online_alert.alert.frequency == 1 + assert _online_alert.name == f"metrics_threshold_alert_{_uuid}" + assert _online_alert.alert.threshold == 10 + + _alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() @pytest.mark.api @@ -98,17 +117,36 @@ def test_metric_threshold_alert_modification_offline() -> None: metric="x", rule="is above", aggregation="average", - offline=True + offline=True, + description="a metric threshold alert" ) _alert.commit() time.sleep(1) _new_alert = Alert(_alert.id) + _new_alert.read_only(False) assert isinstance(_new_alert, MetricsThresholdAlert) _new_alert.description = "updated!" - assert _new_alert.description != "updated!" _new_alert.commit() - assert _new_alert.description == "updated!" - _new_alert.delete() + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("description") == "updated!" + + sender(_alert._local_staging_file.parents[1], 1, 10) + time.sleep(1) + + # Get online ID and retrieve alert + _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_alert = Alert(_online_id) + + assert _online_alert.source == "metrics" + assert _online_alert.alert.frequency == 1 + assert _online_alert.name == f"metrics_threshold_alert_{_uuid}" + assert _online_alert.alert.threshold == 10 + assert _online_alert.description == "updated!" + + _alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() @pytest.mark.api @pytest.mark.online From 77ecbf0898cf206130884320791db9181ba3aa28 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 30 Jan 2025 09:09:19 +0000 Subject: [PATCH 129/163] Migrated pyproject.toml file --- poetry.lock | 264 ++++++++++++++++++++++++++++++++++++++++++------- pyproject.toml | 65 ++++++------ 2 files changed, 262 insertions(+), 67 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4193f5cd..70b24c21 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -6,6 +6,8 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -17,6 +19,8 @@ version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, @@ -39,6 +43,8 @@ version = "1.3.0" description = "Better dates & times for Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, @@ -58,6 +64,8 @@ version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, @@ -69,6 +77,8 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" and platform_python_implementation != \"PyPy\" or python_version >= \"3.12\" and platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -148,6 +158,8 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -249,6 +261,8 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -263,6 +277,8 @@ version = "2.8.3" description = "" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "codecarbon-2.8.3-py3-none-any.whl", hash = "sha256:d3204852ad0c83d94d0f16b7d922e7f540c1e5f488d911f3e75408fe29f4ef4c"}, {file = "codecarbon-2.8.3.tar.gz", hash = "sha256:037dd5afa1c5f60154f893ecd1631e0c849786edcfc9ff34a7ef467707891269"}, @@ -292,10 +308,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "python_version <= \"3.11\" and platform_system == \"Windows\" or python_version >= \"3.12\" and platform_system == \"Windows\"", dev = "python_version <= \"3.11\" and sys_platform == \"win32\" or python_version >= \"3.12\" and sys_platform == \"win32\""} [[package]] name = "contourpy" @@ -303,6 +321,8 @@ version = "1.3.1" description = "Python library for calculating contours of 2D quadrilateral grids" optional = true python-versions = ">=3.10" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ {file = "contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab"}, {file = "contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124"}, @@ -376,6 +396,8 @@ version = "7.6.10" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, @@ -453,6 +475,8 @@ version = "44.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cryptography-44.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092"}, @@ -504,6 +528,8 @@ version = "0.12.1" description = "Composable style cycles" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, @@ -513,27 +539,14 @@ files = [ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] -[[package]] -name = "dill" -version = "0.3.9" -description = "serialize all of Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, - {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] -profile = ["gprof2dot (>=2022.7.29)"] - [[package]] name = "dnspython" version = "2.7.0" description = "DNS toolkit" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, @@ -554,6 +567,8 @@ version = "2.2.0" description = "A robust email address syntax and deliverability validation library." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, @@ -569,6 +584,8 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -583,6 +600,8 @@ version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -597,6 +616,8 @@ version = "0.20.0" description = "Fief Client for Python" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "fief_client-0.20.0-py3-none-any.whl", hash = "sha256:425f40cc7c45c651daec63da402e033c53d91dcaa3f9bf208873fd8692fc16dc"}, {file = "fief_client-0.20.0.tar.gz", hash = "sha256:dbfb906d03c4a5402ceac5c843aa4708535fb6f5d5c1c4e263ec06fbbbc434d7"}, @@ -618,6 +639,8 @@ version = "0.7.0" description = "A library for automatically generating command line interfaces." optional = false python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "fire-0.7.0.tar.gz", hash = "sha256:961550f07936eaf65ad1dc8360f2b2bf8408fad46abbfa4d2a3794f8d2a95cdf"}, ] @@ -631,6 +654,8 @@ version = "4.0.1" description = "Python module for interacting with nested dicts as a single level dict with delimited keys." optional = false python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "flatdict-4.0.1.tar.gz", hash = "sha256:cd32f08fd31ed21eb09ebc76f06b6bd12046a24f77beb1fd0281917e47f26742"}, ] @@ -641,6 +666,8 @@ version = "4.55.3" description = "Tools to manipulate font files" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ {file = "fonttools-4.55.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1dcc07934a2165ccdc3a5a608db56fb3c24b609658a5b340aee4ecf3ba679dc0"}, {file = "fonttools-4.55.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f7d66c15ba875432a2d2fb419523f5d3d347f91f48f57b8b08a2dfc3c39b8a3f"}, @@ -714,6 +741,8 @@ version = "4.0.12" description = "Git Object Database" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf"}, {file = "gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571"}, @@ -728,6 +757,8 @@ version = "3.1.44" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110"}, {file = "gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269"}, @@ -746,6 +777,8 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -757,6 +790,8 @@ version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -778,6 +813,8 @@ version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, @@ -803,6 +840,8 @@ version = "10.0" description = "Human friendly output for text interfaces using Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, @@ -817,6 +856,8 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -831,6 +872,8 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -842,6 +885,8 @@ version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -859,6 +904,8 @@ version = "1.5.6" description = "Implementation of JOSE Web standards" optional = false python-versions = ">= 3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jwcrypto-1.5.6-py3-none-any.whl", hash = "sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789"}, {file = "jwcrypto-1.5.6.tar.gz", hash = "sha256:771a87762a0c081ae6166958a954f80848820b2ab066937dc8b8379d65b1b039"}, @@ -874,6 +921,8 @@ version = "1.4.8" description = "A fast implementation of the Cassowary constraint solver" optional = true python-versions = ">=3.10" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db"}, {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b"}, @@ -963,6 +1012,8 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -987,6 +1038,8 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1057,6 +1110,8 @@ version = "3.10.0" description = "Python plotting package" optional = true python-versions = ">=3.10" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ {file = "matplotlib-3.10.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2c5829a5a1dd5a71f0e31e6e8bb449bc0ee9dbfb05ad28fc0c6b55101b3a4be6"}, {file = "matplotlib-3.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2a43cbefe22d653ab34bb55d42384ed30f611bcbdea1f8d7f431011a2e1c62e"}, @@ -1114,6 +1169,8 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -1125,6 +1182,8 @@ version = "1.1.0" description = "MessagePack serializer" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, @@ -1192,12 +1251,42 @@ files = [ {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, ] +[[package]] +name = "narwhals" +version = "1.24.1" +description = "Extremely lightweight compatibility layer between dataframe libraries" +optional = true +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" +files = [ + {file = "narwhals-1.24.1-py3-none-any.whl", hash = "sha256:d8983fe14851c95d60576ddca37c094bd4ed24ab9ea98396844fb20ad9aaf184"}, + {file = "narwhals-1.24.1.tar.gz", hash = "sha256:b09b8253d945f23cdb683a84685abf3afb9f96114d89e9f35dc876e143f65007"}, +] + +[package.extras] +core = ["duckdb", "pandas", "polars", "pyarrow", "pyarrow-stubs"] +cudf = ["cudf (>=24.10.0)"] +dask = ["dask[dataframe] (>=2024.8)"] +dev = ["covdefaults", "hypothesis", "pre-commit", "pytest", "pytest-cov", "pytest-env", "pytest-randomly", "typing-extensions"] +docs = ["black", "duckdb", "jinja2", "markdown-exec[ansi]", "mkdocs", "mkdocs-autorefs", "mkdocs-material", "mkdocstrings[python]", "pandas", "polars (>=1.0.0)", "pyarrow"] +duckdb = ["duckdb (>=1.0)"] +extra = ["scikit-learn"] +ibis = ["ibis-framework (>=6.0.0)", "packaging", "pyarrow-hotfix", "rich"] +modin = ["modin"] +pandas = ["pandas (>=0.25.3)"] +polars = ["polars (>=0.20.3)"] +pyarrow = ["pyarrow (>=11.0.0)"] +pyspark = ["pyspark (>=3.5.0)"] + [[package]] name = "numpy" version = "2.2.2" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "numpy-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7079129b64cb78bdc8d611d1fd7e8002c0a2565da6a47c4df8062349fee90e3e"}, {file = "numpy-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ec6c689c61df613b783aeb21f945c4cbe6c51c28cb70aae8430577ab39f163e"}, @@ -1258,13 +1347,15 @@ files = [ [[package]] name = "nvidia-ml-py" -version = "12.560.30" +version = "12.570.86" description = "Python Bindings for the NVIDIA Management Library" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "nvidia-ml-py-12.560.30.tar.gz", hash = "sha256:f0254dc7400647680a072ee02509bfd46102b60bdfeca321576d4d4817e7fe97"}, - {file = "nvidia_ml_py-12.560.30-py3-none-any.whl", hash = "sha256:fea371c94d63e38a611c17bbb85fe400e9c8ddb9e8684a9cd0e47786a4bc3c73"}, + {file = "nvidia_ml_py-12.570.86-py3-none-any.whl", hash = "sha256:58907de35a845abd13dcb227f18298f3b5dd94a72d04c9e594e77711e95c0b51"}, + {file = "nvidia_ml_py-12.570.86.tar.gz", hash = "sha256:0508d4a0c7b6d015cf574530b95a62ed4fc89da3b8b47e1aefe6777db170ec8b"}, ] [[package]] @@ -1273,10 +1364,12 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] +markers = {main = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\""} [[package]] name = "pandas" @@ -1284,6 +1377,8 @@ version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, @@ -1370,6 +1465,8 @@ version = "11.1.0" description = "Python Imaging Library (Fork)" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ {file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"}, {file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"}, @@ -1454,18 +1551,23 @@ xmp = ["defusedxml"] [[package]] name = "plotly" -version = "5.24.1" +version = "6.0.0" description = "An open-source, interactive data visualization library for Python" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ - {file = "plotly-5.24.1-py3-none-any.whl", hash = "sha256:f67073a1e637eb0dc3e46324d9d51e2fe76e9727c892dde64ddf1e1b51f29089"}, - {file = "plotly-5.24.1.tar.gz", hash = "sha256:dbc8ac8339d248a4bcc36e08a5659bacfe1b079390b8953533f4eb22169b4bae"}, + {file = "plotly-6.0.0-py3-none-any.whl", hash = "sha256:f708871c3a9349a68791ff943a5781b1ec04de7769ea69068adcd9202e57653a"}, + {file = "plotly-6.0.0.tar.gz", hash = "sha256:c4aad38b8c3d65e4a5e7dd308b084143b9025c2cc9d5317fc1f1d30958db87d3"}, ] [package.dependencies] +narwhals = ">=1.15.1" packaging = "*" -tenacity = ">=6.2.0" + +[package.extras] +express = ["numpy"] [[package]] name = "pluggy" @@ -1473,6 +1575,8 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -1488,6 +1592,8 @@ version = "0.21.1" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"}, {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"}, @@ -1502,6 +1608,8 @@ version = "3.0.50" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.8.0" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, @@ -1516,6 +1624,8 @@ version = "6.1.1" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"}, {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"}, @@ -1546,6 +1656,8 @@ version = "9.0.0" description = "Get CPU info with pure Python" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, @@ -1557,6 +1669,8 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" and platform_python_implementation != \"PyPy\" or python_version >= \"3.12\" and platform_python_implementation != \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -1564,13 +1678,15 @@ files = [ [[package]] name = "pydantic" -version = "2.10.5" +version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53"}, - {file = "pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff"}, + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, ] [package.dependencies] @@ -1588,6 +1704,8 @@ version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -1700,6 +1818,8 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -1714,6 +1834,8 @@ version = "2.10.1" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, @@ -1731,6 +1853,8 @@ version = "12.0.0" description = "Python utilities for the NVIDIA Management Library" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pynvml-12.0.0-py3-none-any.whl", hash = "sha256:fdff84b62a27dbe98e08e1a647eb77342bef1aebe0878bcd15e99a83fcbecb9e"}, {file = "pynvml-12.0.0.tar.gz", hash = "sha256:299ce2451a6a17e6822d6faee750103e25b415f06f59abb8db65d30f794166f5"}, @@ -1748,6 +1872,8 @@ version = "3.2.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, @@ -1762,6 +1888,8 @@ version = "3.5.4" description = "A python implementation of GNU readline." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" and sys_platform == \"win32\" or python_version >= \"3.12\" and sys_platform == \"win32\"" files = [ {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, @@ -1776,6 +1904,8 @@ version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -1798,6 +1928,8 @@ version = "6.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, @@ -1816,6 +1948,8 @@ version = "0.6.0" description = "Manage dependencies of tests" optional = false python-versions = ">=3.4" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-dependency-0.6.0.tar.gz", hash = "sha256:934b0e6a39d95995062c193f7eaeed8a8ffa06ff1bcef4b62b0dc74a708bacc1"}, ] @@ -1830,6 +1964,8 @@ version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, @@ -1847,6 +1983,8 @@ version = "1.0.0" description = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)." optional = false python-versions = "*" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-sugar-1.0.0.tar.gz", hash = "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a"}, {file = "pytest_sugar-1.0.0-py3-none-any.whl", hash = "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd"}, @@ -1866,6 +2004,8 @@ version = "3.6.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, @@ -1886,6 +2026,8 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -1900,6 +2042,8 @@ version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, @@ -1911,6 +2055,8 @@ version = "2.1.0" description = "Python library to build pretty command line user prompts ⭐️" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "questionary-2.1.0-py3-none-any.whl", hash = "sha256:44174d237b68bc828e4878c763a9ad6790ee61990e0ae72927694ead57bab8ec"}, {file = "questionary-2.1.0.tar.gz", hash = "sha256:6302cdd645b19667d8f6e6634774e9538bfcd1aad9be287e743d96cacaf95587"}, @@ -1925,6 +2071,8 @@ version = "0.2.1" description = "Generate random adj-noun names like docker and github." optional = false python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "randomname-0.2.1.tar.gz", hash = "sha256:b79b98302ba4479164b0a4f87995b7bebbd1d91012aeda483341e3e58ace520e"}, ] @@ -1938,6 +2086,8 @@ version = "3.11.0" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rapidfuzz-3.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb8a54543d16ab1b69e2c5ed96cabbff16db044a50eddfc028000138ca9ddf33"}, {file = "rapidfuzz-3.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:231c8b2efbd7f8d2ecd1ae900363ba168b8870644bb8f2b5aa96e4a7573bde19"}, @@ -2038,6 +2188,8 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -2059,6 +2211,8 @@ version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -2078,6 +2232,8 @@ version = "0.9.2" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ruff-0.9.2-py3-none-linux_armv6l.whl", hash = "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347"}, {file = "ruff-0.9.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00"}, @@ -2101,13 +2257,15 @@ files = [ [[package]] name = "semver" -version = "3.0.2" +version = "3.0.4" description = "Python helper for Semantic Versioning (https://semver.org)" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"}, - {file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"}, + {file = "semver-3.0.4-py3-none-any.whl", hash = "sha256:9c824d87ba7f7ab4a1890799cec8596f15c1241cb473404ea1cb0c55e4b04746"}, + {file = "semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602"}, ] [[package]] @@ -2116,6 +2274,8 @@ version = "75.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, @@ -2136,6 +2296,8 @@ version = "1.5.4" description = "Tool to Detect Surrounding Shell" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, @@ -2147,6 +2309,8 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -2158,6 +2322,8 @@ version = "5.0.2" description = "A pure Python implementation of a sliding window memory map manager" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e"}, {file = "smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5"}, @@ -2169,6 +2335,8 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -2180,6 +2348,8 @@ version = "0.9.0" description = "Pretty-print tabular data" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, @@ -2194,6 +2364,8 @@ version = "9.0.0" description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, @@ -2209,6 +2381,8 @@ version = "2.5.0" description = "ANSI color formatting for output in terminal" optional = false python-versions = ">=3.9" +groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "termcolor-2.5.0-py3-none-any.whl", hash = "sha256:37b17b5fc1e604945c2642c872a3764b5d547a48009871aea3edd3afa180afb8"}, {file = "termcolor-2.5.0.tar.gz", hash = "sha256:998d8d27da6d48442e8e1f016119076b690d962507531df4890fcd2db2ef8a6f"}, @@ -2223,6 +2397,8 @@ version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, @@ -2234,6 +2410,8 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_full_version <= \"3.11.0a6\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -2275,6 +2453,8 @@ version = "0.15.1" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847"}, {file = "typer-0.15.1.tar.gz", hash = "sha256:a0588c0a7fa68a1978a069818657778f86abe6ff5ea6abf472f940a08bfe4f0a"}, @@ -2292,6 +2472,8 @@ version = "2.9.0.20241206" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, @@ -2303,6 +2485,8 @@ version = "2.32.0.20241016" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, @@ -2317,6 +2501,8 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -2324,13 +2510,15 @@ files = [ [[package]] name = "tzdata" -version = "2024.2" +version = "2025.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, - {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, + {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, + {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, ] [[package]] @@ -2339,6 +2527,8 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, @@ -2356,6 +2546,8 @@ version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -2367,6 +2559,8 @@ version = "3.0.1" description = "Yet Another Terminal Spinner" optional = false python-versions = ">=3.9,<4.0" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "yaspin-3.0.1-py3-none-any.whl", hash = "sha256:c4b5d2ca23ae664b87a5cd53401c5107cef12668a71d9ee5ea5536045f364121"}, {file = "yaspin-3.0.1.tar.gz", hash = "sha256:9c04aa69cce9be83e1ea3134a6712e749e6c0c9cd02599023713e6befd7bf369"}, @@ -2379,6 +2573,6 @@ termcolor = ">=2.3,<3.0" plot = ["matplotlib", "plotly"] [metadata] -lock-version = "2.0" -python-versions = "^3.10,<3.14" -content-hash = "27a074a6955be5e90ca72ccd5062dc2d595d3f976c49317c04b83c2ba8f2ccbc" +lock-version = "2.1" +python-versions = ">=3.10,<3.14" +content-hash = "28337268c96b0edb02ca14b017c0d72dc3f81ba63106f52defe59febbdb32128" diff --git a/pyproject.toml b/pyproject.toml index c0cc7322..a0269e07 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,13 +1,13 @@ -[tool.poetry] +[project] name = "simvue" version = "1.1.4" description = "Simulation tracking and monitoring" -authors = ["Simvue Development Team "] +authors = [ + {name = "Simvue Development Team", "email" = "info@simvue.io"} +] license = "Apache v2" +requires-python = ">=3.10,<3.14" readme = "README.md" -homepage = "https://simvue.io" -repository = "https://github.com/simvue-io/python-api" -documentation = "https://docs.simvue.io" classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", @@ -33,36 +33,37 @@ keywords = [ "alerting", "metrics-gathering" ] +dependencies = [ + "requests (>=2.32.3,<3.0.0)", + "pydantic (>=2.10.6,<3.0.0)", + "tabulate (>=0.9.0,<0.10.0)", + "msgpack (>=1.1.0,<2.0.0)", + "pyjwt (>=2.10.1,<3.0.0)", + "pandas (>=2.2.3,<3.0.0)", + "toml (>=0.10.2,<0.11.0)", + "click (>=8.1.8,<9.0.0)", + "gitpython (>=3.1.44,<4.0.0)", + "humanfriendly (>=10.0,<11.0)", + "randomname (>=0.2.1,<0.3.0)", + "codecarbon (>=2.8.3,<3.0.0)", + "numpy (>=2.2.2,<3.0.0)", + "flatdict (>=4.0.1,<5.0.0)", + "semver (>=3.0.4,<4.0.0)", + "email-validator (>=2.2.0,<3.0.0)", + "psutil (>=6.1.1,<7.0.0)", + "tenacity (>=9.0.0,<10.0.0)", + "typing-extensions (>=4.12.2,<5.0.0) ; python_version < \"3.11\"", +] -[tool.poetry.dependencies] -python = "^3.10,<3.14" -dill = "^0.3.7" -requests = "^2.31.0" -msgpack = "^1.0.7" -PyJWT = "^2.8.0" -pydantic = "^2.5.3" -pandas = "^2.2.0" -plotly = {version = "^5.18.0", optional = true} -matplotlib = {version = "^3.8.2", optional = true} -typing_extensions = { version = "^4.11.0", python = "<3.11" } -toml = "^0.10.2" -click = "^8.1.7" -gitpython = "^3.1.43" -humanfriendly = "^10.0" -tabulate = "^0.9.0" -randomname = "^0.2.1" -codecarbon = "^2.7.1" -numpy = "^2.1.2" -flatdict = "^4.0.1" -semver = "^3.0.2" -email-validator = "^2.2.0" -psutil = "^6.1.1" -tenacity = "^9.0.0" +[project.urls] +homepage = "https://simvue.io" +repository = "https://github.com/simvue-io/python-api" +documentation = "https://docs.simvue.io" -[tool.poetry.extras] -plot = ["matplotlib", "plotly"] +[project.optional-dependencies] +plot = ["plotly (>=6.0.0,<7.0.0)", "matplotlib (>=3.10.0,<4.0.0)"] -[tool.poetry.scripts] +[project.scripts] simvue_sender = "simvue.bin.sender:run" [tool.poetry.group.dev.dependencies] From 2f2f80085b5e5e591ddbb7a427e57770b7e7b2ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 30 Jan 2025 09:23:44 +0000 Subject: [PATCH 130/163] Fixed abort test --- pyproject.toml | 5 ----- tests/unit/test_run.py | 3 +-- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a0269e07..5147e5c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,11 +81,6 @@ types-requests = "^2.32.0.20241016" requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" -[[tool.poetry.source]] -name = "PyPI" -priority = "primary" - - [tool.ruff] lint.extend-select = ["C901", "T201"] lint.mccabe.max-complexity = 11 diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index 9272a65b..ee79a04c 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -70,8 +70,7 @@ def test_run_modification_online() -> None: assert _run.name == "simvue_test_run" assert _run.status == "running" _run.abort("test_run_abort") - assert _new_run.status == "terminated" - assert _run.status == "terminated" + assert _new_run.abort_trigger _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) From 2959efba77365ab79ca36a966b39933b32f957cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 30 Jan 2025 09:46:03 +0000 Subject: [PATCH 131/163] Fix metadata for Poetry/PyPA hybrids --- simvue/metadata.py | 2 +- simvue/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/simvue/metadata.py b/simvue/metadata.py index eeae8448..abba595f 100644 --- a/simvue/metadata.py +++ b/simvue/metadata.py @@ -83,7 +83,7 @@ def _python_env(repository: pathlib.Path) -> dict[str, typing.Any]: if (pyproject_file := pathlib.Path(repository).joinpath("pyproject.toml")).exists(): content = toml.load(pyproject_file) - if poetry_content := content.get("tool", {}).get("poetry"): + if (poetry_content := content.get("tool", {}).get("poetry", {})).get("name"): python_meta |= { "python.project.name": poetry_content["name"], "python.project.version": poetry_content["version"], diff --git a/simvue/version.py b/simvue/version.py index 37d9c031..6c2f0889 100644 --- a/simvue/version.py +++ b/simvue/version.py @@ -11,6 +11,6 @@ pathlib.Path(os.path.dirname(__file__)).parents[1], "pyproject.toml" ) if os.path.exists(_metadata): - __version__ = toml.load(_metadata)["tool"]["poetry"]["version"] + __version__ = toml.load(_metadata)["project"]["version"] else: __version__ = "" From aecb71e4163506c7c6ecbf4caf59598f10c28560 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 30 Jan 2025 14:46:41 +0000 Subject: [PATCH 132/163] Fixed offline run tests --- tests/unit/test_run.py | 72 +++++++++++++++++++++++++++++++++--------- 1 file changed, 57 insertions(+), 15 deletions(-) diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index 9272a65b..4bb5e49c 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -4,8 +4,9 @@ import time import datetime import uuid - +from simvue.sender import sender from simvue.api.objects import Run, Folder +from simvue.client import Client @pytest.mark.api @pytest.mark.online @@ -25,21 +26,34 @@ def test_run_creation_online() -> None: @pytest.mark.offline def test_run_creation_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _run_name = f"simvue_offline_run_{_uuid}" _folder_name = f"/simvue_unit_testing/{_uuid}" _folder = Folder.new(path=_folder_name, offline=True) - _run = Run.new(folder=_folder_name, offline=True) + _run = Run.new(name=_run_name,folder=_folder_name, offline=True) _folder.commit() _run.commit() + assert _run.name == _run_name assert _run.folder == _folder_name - _run.delete() - _folder.delete(recursive=True, delete_runs=True, runs_only=False) with _run._local_staging_file.open() as in_f: _local_data = json.load(in_f) - - assert not _local_data.get(_run._label, {}).get(_run.id) - assert not _local_data.get(_folder._label, {}).get(_folder.id) - + assert _local_data.get("name") == f"simvue_offline_run_{_uuid}" + assert _local_data.get("folder") == _folder_name + + sender(_run._local_staging_file.parents[1], 1, 10) + time.sleep(1) + + # Get online ID and retrieve run + _online_id = _run._local_staging_file.parents[1].joinpath("server_ids", f"{_run._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_run = Run(_online_id) + + assert _online_run.name == _run_name + assert _online_run.folder == _folder_name + + _run.delete() + _run._local_staging_file.parents[1].joinpath("server_ids", f"{_run._local_staging_file.name.split('.')[0]}.txt").unlink() + client = Client() + client.delete_folder(_folder_name, recursive=True, remove_runs=True) @pytest.mark.api @pytest.mark.online @@ -80,15 +94,16 @@ def test_run_modification_online() -> None: @pytest.mark.offline def test_run_modification_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _run_name = f"simvue_offline_run_{_uuid}" _folder_name = f"/simvue_unit_testing/{_uuid}" _folder = Folder.new(path=_folder_name, offline=True) - _run = Run.new(folder=_folder_name, offline=True) + _run = Run.new(name=_run_name, folder=_folder_name, offline=True) _folder.commit() _run.commit() + assert _run.name == _run_name assert _run.folder == _folder_name time.sleep(1) - _now = datetime.datetime.now() - _new_run = Run(identifier=_run.id) + _new_run = Run(identifier=_run.id, offline=True) # Property has not been committed to offline # object so not yet available with pytest.raises(AttributeError): @@ -96,18 +111,45 @@ def test_run_modification_offline() -> None: _new_run.read_only(False) _new_run.name = "simvue_test_run" _new_run.description = "Simvue test run" - _new_run.tags = ["simvue", "test", "tag"] _new_run.ttl = 120 - _new_run.commit() assert _new_run.ttl == 120 assert _new_run.description == "Simvue test run" - assert sorted(_new_run.tags) == sorted(["simvue", "test", "tag"]) assert _new_run.name == "simvue_test_run" + + sender(_run._local_staging_file.parents[1], 1, 10) + time.sleep(1) + + # Get online ID and retrieve run + _online_id = _run._local_staging_file.parents[1].joinpath("server_ids", f"{_run._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_run = Run(_online_id) + + assert _online_run.ttl == 120 + assert _online_run.description == "Simvue test run" + assert _online_run.name == "simvue_test_run" + assert _online_run.folder == _folder_name + + # Now add a new set of tags in offline mode and send + _new_run.tags = ["simvue", "test", "tag"] + _new_run.commit() + + # Shouldn't yet be available in the online run since it hasnt been sent + _online_run.refresh() + assert _online_run.tags == [] + + sender(_new_run._local_staging_file.parents[1], 1, 10) + time.sleep(1) + + _online_run.refresh() + assert sorted(_new_run.tags) == sorted(["simvue", "test", "tag"]) + assert sorted(_online_run.tags) == sorted(["simvue", "test", "tag"]) + _run.delete() - _folder.delete() + _run._local_staging_file.parents[1].joinpath("server_ids", f"{_run._local_staging_file.name.split('.')[0]}.txt").unlink() + client = Client() + client.delete_folder(_folder_name, recursive=True, remove_runs=True) @pytest.mark.api From 9f7f73419d6d76d9c7c591a5d1c04b2b90a397e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 30 Jan 2025 15:02:37 +0000 Subject: [PATCH 133/163] Move offline mode statement to init --- poetry.lock | 69 +++++++++++++++- pyproject.toml | 3 +- simvue/api/__init__.py | 8 ++ simvue/api/objects/__init__.py | 10 +++ simvue/api/objects/administrator/__init__.py | 9 +++ simvue/api/objects/administrator/tenant.py | 42 +++++++++- simvue/api/objects/administrator/user.py | 83 +++++++++++++++++++- simvue/api/objects/alert/__init__.py | 10 +++ simvue/api/objects/alert/base.py | 2 + simvue/api/objects/alert/events.py | 14 +++- simvue/api/objects/alert/fetch.py | 15 ++++ simvue/api/objects/alert/metrics.py | 15 ++-- simvue/api/objects/alert/user.py | 6 +- simvue/api/objects/artifact.py | 2 +- simvue/api/objects/base.py | 11 +-- simvue/api/objects/events.py | 5 +- simvue/api/objects/folder.py | 4 +- simvue/api/objects/metrics.py | 5 +- simvue/api/objects/run.py | 36 ++++++++- simvue/api/objects/stats.py | 17 +++- simvue/api/objects/storage/__init__.py | 10 +++ simvue/api/objects/storage/base.py | 16 ++++ simvue/api/objects/storage/fetch.py | 16 ++++ simvue/api/objects/storage/file.py | 38 ++++++++- simvue/api/objects/storage/s3.py | 61 +++++++++++++- simvue/api/objects/tag.py | 42 +++++++++- simvue/api/url.py | 14 ++++ tests/unit/test_s3_storage.py | 3 +- 28 files changed, 503 insertions(+), 63 deletions(-) diff --git a/poetry.lock b/poetry.lock index 70b24c21..4728713d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -58,6 +58,27 @@ types-python-dateutil = ">=2.8.10" doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] +[[package]] +name = "attrs" +version = "25.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, + {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, +] + +[package.extras] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] + [[package]] name = "certifi" version = "2024.12.14" @@ -261,7 +282,7 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" -groups = ["main"] +groups = ["main", "dev"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, @@ -313,7 +334,7 @@ files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "python_version <= \"3.11\" and platform_system == \"Windows\" or python_version >= \"3.12\" and platform_system == \"Windows\"", dev = "python_version <= \"3.11\" and sys_platform == \"win32\" or python_version >= \"3.12\" and sys_platform == \"win32\""} +markers = {main = "python_version <= \"3.11\" and platform_system == \"Windows\" or python_version >= \"3.12\" and platform_system == \"Windows\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\""} [[package]] name = "contourpy" @@ -879,6 +900,33 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "interrogate" +version = "1.7.0" +description = "Interrogate a codebase for docstring coverage." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "interrogate-1.7.0-py3-none-any.whl", hash = "sha256:b13ff4dd8403369670e2efe684066de9fcb868ad9d7f2b4095d8112142dc9d12"}, + {file = "interrogate-1.7.0.tar.gz", hash = "sha256:a320d6ec644dfd887cc58247a345054fc4d9f981100c45184470068f4b3719b0"}, +] + +[package.dependencies] +attrs = "*" +click = ">=7.1" +colorama = "*" +py = "*" +tabulate = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["cairosvg", "coverage[toml]", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "sphinx", "sphinx-autobuild", "wheel"] +docs = ["sphinx", "sphinx-autobuild"] +png = ["cairosvg"] +tests = ["coverage[toml]", "pytest", "pytest-cov", "pytest-mock"] + [[package]] name = "jinja2" version = "3.1.5" @@ -1650,6 +1698,19 @@ files = [ dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] test = ["pytest", "pytest-xdist", "setuptools"] +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + [[package]] name = "py-cpuinfo" version = "9.0.0" @@ -2348,7 +2409,7 @@ version = "0.9.0" description = "Pretty-print tabular data" optional = false python-versions = ">=3.7" -groups = ["main"] +groups = ["main", "dev"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, @@ -2575,4 +2636,4 @@ plot = ["matplotlib", "plotly"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.14" -content-hash = "28337268c96b0edb02ca14b017c0d72dc3f81ba63106f52defe59febbdb32128" +content-hash = "b87307deab6d125136242de2adc36049337970d6abea23392c9fdf57761230a6" diff --git a/pyproject.toml b/pyproject.toml index 5147e5c6..acb1b784 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "simvue" version = "1.1.4" description = "Simulation tracking and monitoring" authors = [ - {name = "Simvue Development Team", "email" = "info@simvue.io"} + {name = "Simvue Development Team", email = "info@simvue.io"} ] license = "Apache v2" requires-python = ">=3.10,<3.14" @@ -76,6 +76,7 @@ pytest-sugar = "^1.0.0" pytest-xdist = "^3.6.1" jinja2 = "^3.1.4" types-requests = "^2.32.0.20241016" +interrogate = "^1.7.0" [build-system] requires = ["poetry-core"] diff --git a/simvue/api/__init__.py b/simvue/api/__init__.py index e69de29b..f56eb001 100644 --- a/simvue/api/__init__.py +++ b/simvue/api/__init__.py @@ -0,0 +1,8 @@ +""" +Simvue API +========== + +Module contains methods for interacting with a Simvue server +including accessing/updating objects. + +""" diff --git a/simvue/api/objects/__init__.py b/simvue/api/objects/__init__.py index 08c7b22b..7772bda3 100644 --- a/simvue/api/objects/__init__.py +++ b/simvue/api/objects/__init__.py @@ -1,3 +1,13 @@ +""" +Simvue API Objects +================== + +The following module defines objects which provide exact representations +of information accessible via the Simvue RestAPI, this provides a lower +level interface towards the development of additional tools/frameworks. + +""" + from .alert import ( Alert as Alert, EventsAlert as EventsAlert, diff --git a/simvue/api/objects/administrator/__init__.py b/simvue/api/objects/administrator/__init__.py index 13867771..abb2e0db 100644 --- a/simvue/api/objects/administrator/__init__.py +++ b/simvue/api/objects/administrator/__init__.py @@ -1,2 +1,11 @@ +""" +Simvue Admin Objects +==================== + +These are Simvue objects only accessible to an administrator of +the server. + +""" + from .tenant import Tenant as Tenant from .user import User as User diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py index cd5b05f3..b5439d1e 100644 --- a/simvue/api/objects/administrator/tenant.py +++ b/simvue/api/objects/administrator/tenant.py @@ -1,3 +1,12 @@ +""" +Simvue Tenants +============== + +Contains a class for remotely connecting to Simvue tenants, or defining +a new tenant given relevant arguments. + +""" + try: from typing import Self except ImportError: @@ -8,6 +17,8 @@ class Tenant(SimvueObject): + """Class for interacting with a tenant instance on the server.""" + @classmethod @pydantic.validate_call def new( @@ -20,17 +31,40 @@ def new( max_data_volume: int = 0, offline: bool = False, ) -> Self: - _tenant = Tenant( + """Create a new tenant on the Simvue server. + + Requires administrator privileges. + + Parameters + ---------- + name: str + the name for this tenant + enabled: bool, optional + whether to enable the tenant on creation, default is True + max_request_rate: int, optional + the maximum request rate allowed for this tenant, default is no limit. + max_runs: int, optional + the maximum number of runs allowed within this tenant, default is no limit. + max_data_volume: int, optional + the maximum volume of data allowed within this tenant, default is no limit. + offline: bool, optional + create in offline mode, default is False. + + Returns + ------- + Tenant + a tenant instance with staged changes + + """ + return Tenant( name=name, is_enabled=enabled, max_request_rate=max_request_rate, max_runs=max_runs, max_data_volume=max_data_volume, - offline=offline, _read_only=False, + _offline=offline, ) - _tenant.offline_mode(offline) - return _tenant # type: ignore @property def name(self) -> str: diff --git a/simvue/api/objects/administrator/user.py b/simvue/api/objects/administrator/user.py index e90c2192..6ecc7b18 100644 --- a/simvue/api/objects/administrator/user.py +++ b/simvue/api/objects/administrator/user.py @@ -1,3 +1,12 @@ +""" +Simvue Users +============ + +Contains a class for remotely connecting to Simvue users, or defining +a new user given relevant arguments. + +""" + import pydantic try: @@ -8,6 +17,8 @@ class User(SimvueObject): + """Class for interacting with a user instance on the server.""" + @classmethod @pydantic.validate_call def new( @@ -24,6 +35,39 @@ def new( enabled: bool = True, offline: bool = False, ) -> Self: + """Create a new user on the Simvue server. + + Requires administrator privileges. + + Parameters + ---------- + username: str + the username for this user + fullname: str + the full name for this user + email: str + the email for this user + manager : bool + assign the manager role to this user + admin : bool + assign the administrator role to this user + readonly : bool + given only read access to this user + welcome : bool + display welcome message to user + tenant : str + the tenant under which to assign this user + enabled: bool, optional + whether to enable the user on creation, default is True + offline: bool, optional + create in offline mode, default is False. + + Returns + ------- + User + a user instance with staged changes + + """ _user_info: dict[str, str | bool] = { "username": username, "fullname": fullname, @@ -34,14 +78,32 @@ def new( "is_admin": admin, "is_enabled": enabled, } - _user = User(user=_user_info, tenant=tenant, offline=offline, _read_only=False) - _user.offline_mode(offline) - return _user # type: ignore + return User( + user=_user_info, + tenant=tenant, + offline=offline, + _read_only=False, + _offline=offline, + ) @classmethod def get( cls, *, count: int | None = None, offset: int | None = None, **kwargs ) -> dict[str, "User"]: + """Retrieve users from the Simvue server. + + Parameters + ---------- + count : int, optional + limit the number of results, default is no limit. + offset : int, optional + start index for results, default is 0. + + Yields + ------ + User + user instance representing user on server + """ # Currently no user filters kwargs.pop("filters", None) return super().get(count=count, offset=offset, **kwargs) @@ -49,6 +111,7 @@ def get( @property @staging_check def username(self) -> str: + """Retrieve the username for the user""" if self.id and self.id.startswith("offline_"): return self._get_attribute("user")["username"] return self._get_attribute("username") @@ -57,11 +120,13 @@ def username(self) -> str: @write_only @pydantic.validate_call def username(self, username: str) -> None: + """Set the username for the user""" self._staging["username"] = username @property @staging_check def fullname(self) -> str: + """Retrieve the full name for the user""" if self.id and self.id.startswith("offline_"): return self._get_attribute("user")["fullname"] return self._get_attribute("fullname") @@ -70,11 +135,13 @@ def fullname(self) -> str: @write_only @pydantic.validate_call def fullname(self, fullname: str) -> None: + """Set the full name for the user""" self._staging["fullname"] = fullname @property @staging_check def manager(self) -> bool: + """Retrieve if the user has manager privileges""" if self.id and self.id.startswith("offline_"): return self._get_attribute("user")["is_manager"] return self._get_attribute("is_manager") @@ -83,11 +150,13 @@ def manager(self) -> bool: @write_only @pydantic.validate_call def manager(self, is_manager: bool) -> None: + """Set if the user has manager privileges""" self._staging["is_manager"] = is_manager @property @staging_check def admin(self) -> bool: + """Retrieve if the user has admin privileges""" if self.id and self.id.startswith("offline_"): return self._get_attribute("user")["is_admin"] return self._get_attribute("is_admin") @@ -96,10 +165,12 @@ def admin(self) -> bool: @write_only @pydantic.validate_call def admin(self, is_admin: bool) -> None: + """Set if the user has admin privileges""" self._staging["is_admin"] = is_admin @property def deleted(self) -> bool: + """Retrieve if the user is pending deletion""" if self.id and self.id.startswith("offline_"): return self._get_attribute("user")["is_deleted"] return self._get_attribute("is_deleted") @@ -107,6 +178,7 @@ def deleted(self) -> bool: @property @staging_check def readonly(self) -> bool: + """Retrieve if the user has read-only access""" if self.id and self.id.startswith("offline_"): return self._get_attribute("user")["is_readonly"] return self._get_attribute("is_readonly") @@ -115,11 +187,13 @@ def readonly(self) -> bool: @write_only @pydantic.validate_call def readonly(self, is_readonly: bool) -> None: + """Set if the user has read-only access""" self._staging["is_readonly"] = is_readonly @property @staging_check def enabled(self) -> bool: + """Retrieve if the user is enabled""" if self.id and self.id.startswith("offline_"): return self._get_attribute("user")["is_enabled"] return self._get_attribute("is_enabled") @@ -128,11 +202,13 @@ def enabled(self) -> bool: @write_only @pydantic.validate_call def enabled(self, is_enabled: bool) -> None: + """Set if the user is enabled""" self._staging["is_enabled"] = is_enabled @property @staging_check def email(self) -> str: + """Retrieve the user email""" if self.id and self.id.startswith("offline_"): return self._get_attribute("user")["email"] return self._get_attribute("email") @@ -141,4 +217,5 @@ def email(self) -> str: @write_only @pydantic.validate_call def email(self, email: str) -> None: + """Set the user email""" self._staging["email"] = email diff --git a/simvue/api/objects/alert/__init__.py b/simvue/api/objects/alert/__init__.py index fb1c7349..b9be7d66 100644 --- a/simvue/api/objects/alert/__init__.py +++ b/simvue/api/objects/alert/__init__.py @@ -1,3 +1,13 @@ +""" +Simvue Alerts +============= + +Creation and management of Alerts on the Simvue server, the +alerts are split into sub-categories to ensure correct arguments +are passed and relevant properties returned. + +""" + from .fetch import Alert from .metrics import MetricsThresholdAlert, MetricsRangeAlert from .events import EventsAlert diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index bf516003..0204f6d3 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -23,6 +23,7 @@ class AlertBase(SimvueObject): @classmethod def new(cls, **kwargs): + """Create a new alert""" pass def __init__(self, identifier: str | None = None, **kwargs) -> None: @@ -31,6 +32,7 @@ def __init__(self, identifier: str | None = None, **kwargs) -> None: super().__init__(identifier=identifier, **kwargs) def compare(self, other: "AlertBase") -> bool: + """Compare this alert to another""" return type(self) is type(other) and self.name == other.name @staging_check diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index af166b43..8a8f1e42 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -30,7 +30,8 @@ def __init__(self, identifier: str | None = None, **kwargs) -> None: def get( cls, count: int | None = None, offset: int | None = None ) -> dict[str, typing.Any]: - raise NotImplementedError("Retrieve of only event alerts is not yet supported") + """Retrieve only alerts of the event alert type""" + raise NotImplementedError("Retrieval of only event alerts is not yet supported") @classmethod @pydantic.validate_call @@ -67,10 +68,15 @@ def new( offline : bool, optional create alert locally, default is False + Returns + ------- + EventAlert + a new event alert with changes staged + """ _alert_definition = {"pattern": pattern, "frequency": frequency} - _alert = EventsAlert( + return EventsAlert( name=name, description=description, notification=notification, @@ -78,9 +84,8 @@ def new( alert=_alert_definition, enabled=enabled, _read_only=False, + _offline=offline, ) - _alert.offline_mode(offline) - return _alert class EventAlertDefinition: @@ -91,6 +96,7 @@ def __init__(self, alert: EventsAlert) -> None: self._sv_obj = alert def compare(self, other: "EventAlertDefinition") -> bool: + """Compare this definition with that of another EventAlert""" if not isinstance(other, EventAlertDefinition): return False diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index 010bce96..f89f6743 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -43,6 +43,21 @@ def __new__(cls, identifier: str, **kwargs) -> AlertType: def get( cls, count: int | None = None, offset: int | None = None, **kwargs ) -> typing.Generator[tuple[str, AlertType], None, None]: + """Fetch all alerts from the server for the current user. + + Parameters + ---------- + count : int, optional + limit the number of results, default of None returns all. + offset : int, optional + start index for returned results, default of None starts at 0. + + Yields + ------ + tuple[str, AlertType] + identifier for an alert + the alert itself as a class instance + """ # Currently no alert filters kwargs.pop("filters", None) diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index 73ffb8c8..d270a15c 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -35,6 +35,7 @@ def __init__(self, identifier: str | None = None, **kwargs) -> None: def get( cls, count: int | None = None, offset: int | None = None ) -> dict[str, typing.Any]: + """Retrieve only MetricsThresholdAlerts""" raise NotImplementedError("Retrieve of only metric alerts is not yet supported") @classmethod @@ -93,7 +94,7 @@ def new( "aggregation": aggregation, "threshold": threshold, } - _alert = MetricsThresholdAlert( + return MetricsThresholdAlert( name=name, description=description, notification=notification, @@ -101,9 +102,8 @@ def new( alert=_alert_definition, enabled=enabled, _read_only=False, + _offline=offline, ) - _alert.offline_mode(offline) - return _alert class MetricsRangeAlert(AlertBase): @@ -115,6 +115,7 @@ def __init__(self, identifier: str | None = None, **kwargs) -> None: super().__init__(identifier, **kwargs) def compare(self, other: "MetricsRangeAlert") -> bool: + """Compare two MetricRangeAlerts""" return self.alert.compare(other) if super().compare(other) else False @classmethod @@ -179,7 +180,7 @@ def new( "range_low": range_low, "range_high": range_high, } - _alert = MetricsThresholdAlert( + return MetricsThresholdAlert( name=name, description=description, notification=notification, @@ -187,9 +188,8 @@ def new( enabled=enabled, alert=_alert_definition, _read_only=False, + _offline=offline, ) - _alert.offline_mode(offline) - return _alert class MetricsAlertDefinition: @@ -200,6 +200,7 @@ def __init__(self, alert: MetricsRangeAlert) -> None: self._sv_obj = alert def compare(self, other: "MetricsAlertDefinition") -> bool: + """Compare a MetricsAlertDefinition with another""" return all( [ self.aggregation == other.aggregation, @@ -256,6 +257,7 @@ class MetricThresholdAlertDefinition(MetricsAlertDefinition): """Alert definition for metric threshold alerts""" def compare(self, other: "MetricThresholdAlertDefinition") -> bool: + """Compare this MetricThresholdAlertDefinition with another""" if not isinstance(other, MetricThresholdAlertDefinition): return False @@ -273,6 +275,7 @@ class MetricRangeAlertDefinition(MetricsAlertDefinition): """Alert definition for metric range alerts""" def compare(self, other: "MetricRangeAlertDefinition") -> bool: + """Compare a MetricRangeAlertDefinition with another""" if not isinstance(other, MetricRangeAlertDefinition): return False diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index 03faff8e..d91f11dd 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -53,21 +53,21 @@ def new( whether this alert should be created locally, default is False """ - _alert = UserAlert( + return UserAlert( name=name, description=description, notification=notification, source="user", enabled=enabled, _read_only=False, + _offline=offline, ) - _alert.offline_mode(offline) - return _alert @classmethod def get( cls, count: int | None = None, offset: int | None = None ) -> dict[str, typing.Any]: + """Return only UserAlerts""" raise NotImplementedError("Retrieve of only user alerts is not yet supported") @pydantic.validate_call diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index b85d8c98..cfa3ce23 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -77,8 +77,8 @@ def new( mime_type=mime_type, metadata=metadata, _read_only=False, + _offline=offline, ) - _artifact.offline_mode(offline) if offline: return _artifact diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 3a3dca98..c49f5c4d 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -129,6 +129,7 @@ def __init__( _read_only: bool = True, _local: bool = False, _user_agent: str | None = None, + _offline: bool = False, **kwargs, ) -> None: self._logger = logging.getLogger(f"simvue.{self.__class__.__name__}") @@ -143,14 +144,14 @@ def __init__( for name, member in inspect.getmembers(self.__class__) if isinstance(member, property) ] - self._offline: bool = identifier is not None and identifier.startswith( - "offline_" + self._offline: bool = _offline or ( + identifier is not None and identifier.startswith("offline_") ) _config_args = { "server_url": kwargs.pop("server_url", None), "server_token": kwargs.pop("server_token", None), - "mode": kwargs.pop("mode", "online"), + "mode": "offline" if self._offline else "online", } self._user_config = SimvueConfiguration.fetch(**_config_args) @@ -271,9 +272,6 @@ def _clear_staging(self) -> None: with self._local_staging_file.open("w") as out_f: json.dump(_staged_data, out_f, indent=2) - def offline_mode(self, is_true: bool) -> None: - self._offline = is_true - def _get_visibility(self) -> dict[str, bool | list[str]]: try: return self._get_attribute("visibility") @@ -281,7 +279,6 @@ def _get_visibility(self) -> dict[str, bool | list[str]]: return {} @classmethod - @abc.abstractmethod def new(cls, **_) -> Self: pass diff --git a/simvue/api/objects/events.py b/simvue/api/objects/events.py index d1d6fab7..aa02a140 100644 --- a/simvue/api/objects/events.py +++ b/simvue/api/objects/events.py @@ -62,13 +62,12 @@ def get( @pydantic.validate_call def new(cls, *, run_id: str, offline: bool = False, events: list[EventSet]): """Create a new Events entry on the Simvue server""" - _events = Events( + return Events( run=run_id, events=[event.model_dump() for event in events], _read_only=False, + _offline=offline, ) - _events.offline_mode(offline) - return _events def _post(self, **kwargs) -> dict[str, typing.Any]: return super()._post(is_json=False, **kwargs) diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index cae9353d..03ea3296 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -58,9 +58,7 @@ def new( **_, ): """Create a new Folder on the Simvue server with the given path""" - _folder = Folder(path=path, _read_only=False) - _folder.offline_mode(offline) - return _folder + Folder(path=path, _read_only=False, _offline=offline) @property @staging_check diff --git a/simvue/api/objects/metrics.py b/simvue/api/objects/metrics.py index ca7519da..f419c434 100644 --- a/simvue/api/objects/metrics.py +++ b/simvue/api/objects/metrics.py @@ -38,13 +38,12 @@ def __init__( @pydantic.validate_call def new(cls, *, run_id: str, offline: bool = False, metrics: list[MetricSet]): """Create a new Events entry on the Simvue server""" - _events = Metrics( + return Metrics( run=run_id, metrics=[metric.model_dump() for metric in metrics], _read_only=False, + _offline=offline, ) - _events.offline_mode(offline) - return _events @classmethod @pydantic.validate_call diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 239b893a..83b18f71 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -1,3 +1,12 @@ +""" +Simvue Runs +=========== + +Contains a class for remotely connecting to Simvue runs, or defining +a new run given relevant arguments. + +""" + import http import typing import pydantic @@ -25,6 +34,8 @@ class Run(SimvueObject): + """Class for interacting with/creating runs on the server.""" + def __init__(self, identifier: str | None = None, **kwargs) -> None: """Initialise a Run @@ -57,10 +68,27 @@ def new( offline: bool = False, **_, ) -> Self: - """Create a new Folder on the Simvue server with the given path""" - _run = Run(folder=folder, system=None, status="created", _read_only=False) - _run.offline_mode(offline) - return _run + """Create a new Run on the Simvue server. + + Parameters + ---------- + folder : str + folder to contain this run + offline : bool, optional + create the run in offline mode, default False. + + Returns + ------- + Run + run object with staged changes + """ + return Run( + folder=folder, + system=None, + status="created", + _read_only=False, + _offline=offline, + ) @property @staging_check diff --git a/simvue/api/objects/stats.py b/simvue/api/objects/stats.py index 3bc13c67..85819a95 100644 --- a/simvue/api/objects/stats.py +++ b/simvue/api/objects/stats.py @@ -17,6 +17,8 @@ class Stats(SimvueObject): + """Class for accessing Server statistics.""" + def __init__(self) -> None: self.runs = RunStatistics(self) self._label = "stat" @@ -28,6 +30,7 @@ def __init__(self) -> None: @classmethod def new(cls, **kwargs) -> None: + """Creation of multiple stats objects is not logical here""" raise AttributeError("Creation of statistics objects is not supported") def whoami(self) -> dict[str, str]: @@ -40,47 +43,53 @@ def whoami(self) -> dict[str, str]: scenario="Retrieving current user", ) - def offline_mode(self, is_true: bool) -> None: - if is_true: - raise AttributeError("Statistics only available online") - def _get_run_stats(self) -> dict[str, int]: + """Retrieve the run statistics""" return self._get_attribute("runs") def _get_local_staged(self) -> dict[str, typing.Any]: + """No staging for stats so returns empty dict""" return {} def _get_visibility(self) -> dict[str, bool | list[str]]: + """Visibility does not apply here""" return {} def to_dict(self) -> dict[str, typing.Any]: + """Returns dictionary form of statistics""" return {"runs": self._get_run_stats()} class RunStatistics: + """Interface to the run section of statistics output""" + def __init__(self, sv_obj: Stats) -> None: self._sv_obj = sv_obj @property def created(self) -> int: + """Number of created runs""" if (_created := self._sv_obj._get_run_stats().get("created")) is None: raise RuntimeError("Expected key 'created' in run statistics retrieval") return _created @property def running(self) -> int: + """Number of running runs""" if (_running := self._sv_obj._get_run_stats().get("running")) is None: raise RuntimeError("Expected key 'running' in run statistics retrieval") return _running @property def completed(self) -> int: + """Number of completed runs""" if (_completed := self._sv_obj._get_run_stats().get("running")) is None: raise RuntimeError("Expected key 'completed' in run statistics retrieval") return _completed @property def data(self) -> int: + """Data count""" if (_data := self._sv_obj._get_run_stats().get("running")) is None: raise RuntimeError("Expected key 'data' in run statistics retrieval") return _data diff --git a/simvue/api/objects/storage/__init__.py b/simvue/api/objects/storage/__init__.py index a89d5690..cfb43777 100644 --- a/simvue/api/objects/storage/__init__.py +++ b/simvue/api/objects/storage/__init__.py @@ -1,3 +1,13 @@ +""" +Simvue Storage +============== + +Contains classes for interacting with Simvue storage objects, +the storage types are split into classes to ensure correct +inputs are provided and the relevant properties are made available. + +""" + from .file import FileStorage as FileStorage from .s3 import S3Storage as S3Storage from .fetch import Storage as Storage diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py index 4ed3c59a..ce71c8f0 100644 --- a/simvue/api/objects/storage/base.py +++ b/simvue/api/objects/storage/base.py @@ -1,13 +1,27 @@ +""" +Simvue Storage Base +=================== + +Contains general definitions for Simvue Storage objects. +""" + import typing import pydantic import datetime +import abc from simvue.api.objects.base import SimvueObject, staging_check, write_only from simvue.models import NAME_REGEX, DATETIME_FORMAT class StorageBase(SimvueObject): + """Storage object base class from which all storage types inherit. + + This represents a single storage backend used to store uploaded artifacts. + + """ + def __init__( self, identifier: str | None = None, @@ -20,7 +34,9 @@ def __init__( super().__init__(identifier, _read_only=_read_only, **kwargs) @classmethod + @abc.abstractmethod def new(cls, **kwargs): + """Create a new instance of a storage type""" pass @property diff --git a/simvue/api/objects/storage/fetch.py b/simvue/api/objects/storage/fetch.py index 40fe6ae1..e1ea9b01 100644 --- a/simvue/api/objects/storage/fetch.py +++ b/simvue/api/objects/storage/fetch.py @@ -36,6 +36,22 @@ def __new__(cls, identifier: str | None = None, **kwargs): def get( cls, count: int | None = None, offset: int | None = None, **kwargs ) -> typing.Generator[tuple[str, FileStorage | S3Storage], None, None]: + """Returns storage systems accessible to the current user. + + Parameters + ---------- + count : int, optional + limit the number of results, default of None returns all. + offset : int, optional + start index for returned results, default of None starts at 0. + + Yields + ------ + tuple[str, FileStorage | S3Storage] + identifier for a storage + the storage itself as a class instance + """ + # Currently no storage filters kwargs.pop("filters", None) diff --git a/simvue/api/objects/storage/file.py b/simvue/api/objects/storage/file.py index 4981132d..365262d8 100644 --- a/simvue/api/objects/storage/file.py +++ b/simvue/api/objects/storage/file.py @@ -1,3 +1,11 @@ +""" +Simvue File Storage +=================== + +Class for interacting with a file based storage on the server. + +""" + import typing try: @@ -11,6 +19,8 @@ class FileStorage(StorageBase): + """Class for defining/accessing a File based storage system on the server.""" + @classmethod @pydantic.validate_call def new( @@ -23,8 +33,29 @@ def new( default: bool, offline: bool = False, ) -> Self: - """Create a new file storage object""" - _storage = FileStorage( + """Create a new file storage object. + + Parameters + ---------- + name : str + name to allocated to this storage system + disable_check : bool + whether to disable checks for this system + tenant_usable : bool + whether this system is usable by the current tenant + enabled : bool + whether to enable this system + default : bool + if this storage system should become the new default + offline : bool, optional + if this instance should be created in offline mode, default False + + Returns + ------- + FileStorage + instance of storage system with staged changes + """ + return FileStorage( name=name, backend="File", disable_check=disable_check, @@ -32,6 +63,5 @@ def new( is_default=default, is_enabled=enabled, _read_only=False, + _offline=offline, ) - _storage.offline_mode(offline) - return _storage diff --git a/simvue/api/objects/storage/s3.py b/simvue/api/objects/storage/s3.py index 435b5dfc..eb9bb0a6 100644 --- a/simvue/api/objects/storage/s3.py +++ b/simvue/api/objects/storage/s3.py @@ -1,3 +1,11 @@ +""" +Simvue S3 Storage +================= + +Class for interacting with an S3 based storage on the server. + +""" + import typing try: @@ -13,7 +21,10 @@ class S3Storage(StorageBase): + """Class for defining/accessing an S3 based storage system on the server.""" + def __init__(self, identifier: str | None = None, **kwargs) -> None: + """Initialise an S3Storage instance attaching a configuration""" self.config = Config(self) super().__init__(identifier, **kwargs) @@ -31,9 +42,42 @@ def new( bucket: str, tenant_usable: bool, default: bool, + enabled: bool, offline: bool = False, ) -> Self: - """Create a new S3 storage object""" + """Create a new S3 storage object. + + Parameters + ---------- + name : str + name to allocated to this storage system + disable_check : bool + whether to disable checks for this system + endpoint_url : str + endpoint defining the S3 upload URL + region_name : str + the region name associated with this storage system + access_key_id : str + the access key identifier for the storage + secret_access_key : str + the secret access key, stored as a secret string + bucket : str + the bucket associated with this storage system + tenant_usable : bool + whether this system is usable by the current tenant + enabled : bool + whether to enable this system + default : bool + if this storage system should become the new default + offline : bool, optional + if this instance should be created in offline mode, default False + + Returns + ------- + S3Storage + instance of storage system with staged changes + + """ _config: dict[str, str] = { "endpoint_url": endpoint_url.__str__(), "region_name": region_name, @@ -41,17 +85,17 @@ def new( "secret_access_key": secret_access_key.get_secret_value(), "bucket": bucket, } - _storage = S3Storage( + return S3Storage( name=name, backend="S3", config=_config, disable_check=disable_check, tenant_useable=tenant_usable, default=default, + enabled=enabled, _read_only=False, + _offline=offline, ) - _storage.offline_mode(offline) - return _storage @staging_check def get_config(self) -> dict[str, typing.Any]: @@ -63,12 +107,16 @@ def get_config(self) -> dict[str, typing.Any]: class Config: + """S3 Configuration interface""" + def __init__(self, storage: S3Storage) -> None: + """Initialise a new configuration using an S3Storage object""" self._sv_obj = storage @property @staging_check def endpoint_url(self) -> str: + """Retrieve the endpoint URL for this storage""" try: return self._sv_obj.get_config()["endpoint_url"] except KeyError as e: @@ -80,12 +128,14 @@ def endpoint_url(self) -> str: @write_only @pydantic.validate_call def endpoint_url(self, endpoint_url: pydantic.HttpUrl) -> None: + """Modify the endpoint URL for this storage""" _config = self._sv_obj.get_config() | {"endpoint_url": endpoint_url.__str__()} self._sv_obj._staging["config"] = _config @property @staging_check def region_name(self) -> str: + """Retrieve the region name for this storage""" try: return self._sv_obj.get_config()["region_name"] except KeyError as e: @@ -97,12 +147,14 @@ def region_name(self) -> str: @write_only @pydantic.validate_call def region_name(self, region_name: str) -> None: + """Modify the region name for this storage""" _config = self._sv_obj.get_config() | {"region_name": region_name} self._sv_obj._staging["config"] = _config @property @staging_check def bucket(self) -> str: + """Retrieve the bucket label for this storage""" try: return self._sv_obj.get_config()["bucket"] except KeyError as e: @@ -114,6 +166,7 @@ def bucket(self) -> str: @write_only @pydantic.validate_call def bucket(self, bucket: str) -> None: + """Modify the bucket label for this storage""" if self._sv_obj.type == "file": raise ValueError( f"Cannot set attribute 'bucket' for storage type '{self._sv_obj.type}'" diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py index 0e1f6c79..c97df7ab 100644 --- a/simvue/api/objects/tag.py +++ b/simvue/api/objects/tag.py @@ -17,46 +17,65 @@ class Tag(SimvueObject): + """Class for creation/interaction with tag object on server""" + @classmethod @pydantic.validate_call def new(cls, *, name: str, offline: bool = False, **_): - """Create a new Tag on the Simvue server""" + """Create a new Tag on the Simvue server. + + Parameters + ---------- + name : str + name for the tag + offline : bool, optional + create this tag in offline mode, default False. + + Returns + ------- + Tag + tag object with staged attributes + """ _data: dict[str, typing.Any] = {"name": name} - _tag = Tag(name=name, _read_only=False) - _tag.offline_mode(offline) - return _tag + return Tag(name=name, _read_only=False, _offline=offline) @property @staging_check def name(self) -> str: + """Retrieve the tag name""" return self._get_attribute("name") @name.setter @write_only @pydantic.validate_call def name(self, name: str) -> None: + """Set the tag name""" self._staging["name"] = name @property @staging_check def color(self) -> pydantic.color.RGBA: + """Retrieve the tag colour""" return pydantic.color.parse_str(self._get_attribute("colour")) @color.setter @write_only @pydantic.validate_call def color(self, color: pydantic.color.Color) -> None: + """Set the tag colour""" self._staging["colour"] = color.as_hex() @property @staging_check def description(self) -> str: + """Get description for this tag""" return self._get_attribute("description") @description.setter @write_only @pydantic.validate_call def description(self, description: str) -> None: + """Set the description for this tag""" self._staging["description"] = description @property @@ -71,6 +90,21 @@ def created(self) -> datetime.datetime | None: def get( cls, *, count: int | None = None, offset: int | None = None, **kwargs ) -> typing.Generator[tuple[str, "SimvueObject"], None, None]: + """Get tags from the server. + + Parameters + ---------- + count : int, optional + limit the number of objects returned, default no limit. + offset : int, optional + start index for results, default is 0. + + Yields + ------ + tuple[str, Tag] + id of tag + Tag object representing object on server + """ # There are currently no tag filters kwargs.pop("filters", None) diff --git a/simvue/api/url.py b/simvue/api/url.py index 43a83ef6..a22480a4 100644 --- a/simvue/api/url.py +++ b/simvue/api/url.py @@ -1,3 +1,11 @@ +""" +URL Library +=========== + +Module contains classes for easier handling of URLs. + +""" + try: from typing import Self except ImportError: @@ -9,8 +17,11 @@ class URL: + """URL class for ease of construction and use of server endpoints.""" + @pydantic.validate_call def __init__(self, url: str) -> None: + """Initialise a url from string form""" url = url[:-1] if url.endswith("/") else url _url = urllib.parse.urlparse(url) @@ -21,12 +32,14 @@ def __init__(self, url: str) -> None: self._fragment: str = _url.fragment def __truediv__(self, other: str) -> Self: + """Define URL extension through use of '/'""" _new = copy.deepcopy(self) _new /= other return _new @pydantic.validate_call def __itruediv__(self, other: str) -> Self: + """Define URL extension through use of '/'""" other = other[1:] if other.startswith("/") else other other = other[:-1] if other.endswith("/") else other @@ -54,6 +67,7 @@ def port(self) -> int | None: return self._port def __str__(self) -> str: + """Construct string form of the URL""" _out_str: str = "" if self.scheme: _out_str += f"{self.scheme}://" diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index 743259ae..ca686a91 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -19,7 +19,8 @@ def test_create_s3_online() -> None: region_name="fictionsville", access_key_id="dummy_key", secret_access_key="not_a_key", - bucket="dummy_bucket" + bucket="dummy_bucket", + enabled=True ) _storage.commit() assert _storage.to_dict() From ba6677759a2704a599872a0f852a5b02d28a2404 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 30 Jan 2025 15:03:40 +0000 Subject: [PATCH 134/163] Added refresh method --- simvue/api/objects/base.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 9871924e..b5d74e02 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -507,6 +507,10 @@ def _get( ) return _json_response + def refresh(self) -> None: + if self._read_only: + self._staging = self._get() + def _cache(self) -> None: if not (_dir := self._local_staging_file.parent).exists(): _dir.mkdir(parents=True) From 67bd63c82929185d719eecf701a9bbb76129993b Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 30 Jan 2025 18:46:59 +0000 Subject: [PATCH 135/163] Tons of test fixes --- simvue/api/objects/alert/fetch.py | 8 +++ simvue/api/objects/tag.py | 8 +-- simvue/sender.py | 23 ++++++-- tests/unit/test_folder.py | 20 ++++--- tests/unit/test_metric_range_alert.py | 34 +++++++---- tests/unit/test_metric_threshold_alert.py | 38 +++++++----- tests/unit/test_run.py | 6 +- tests/unit/test_s3_storage.py | 38 +++++++++--- tests/unit/test_tag.py | 72 ++++++++++++++++++----- tests/unit/test_user_alert.py | 69 +++++++++++++++++----- 10 files changed, 232 insertions(+), 84 deletions(-) diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index d6f54b16..b02fb614 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -28,6 +28,14 @@ class Alert: def __new__(cls, identifier: str, **kwargs) -> AlertType: """Retrieve an object representing an alert either locally or on the server by id""" _alert_pre = AlertBase(identifier=identifier, **kwargs) + if ( + identifier is not None + and identifier.startswith("offline_") + and not _alert_pre._staging.get("source", None) + ): + raise RuntimeError( + "Cannot determine Alert type - this is likely because you are attempting to reconnect to an offline alert which has already been sent to the server. To fix this, use the exact Alert type instead (eg MetricThresholdAlert, MetricRangeAlert etc)." + ) if _alert_pre.source == "events": return EventsAlert(identifier=identifier, **kwargs) elif _alert_pre.source == "metrics" and _alert_pre.get_alert().get("threshold"): diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py index 6da0b9f7..86408c4d 100644 --- a/simvue/api/objects/tag.py +++ b/simvue/api/objects/tag.py @@ -39,14 +39,14 @@ def name(self, name: str) -> None: @property @staging_check - def color(self) -> pydantic.color.RGBA: + def colour(self) -> pydantic.color.RGBA: return pydantic.color.parse_str(self._get_attribute("colour")) - @color.setter + @colour.setter @write_only @pydantic.validate_call - def color(self, color: pydantic.color.Color) -> None: - self._staging["colour"] = color.as_hex() + def colour(self, colour: pydantic.color.Color) -> None: + self._staging["colour"] = colour.as_hex() @property @staging_check diff --git a/simvue/sender.py b/simvue/sender.py index b3ca0b7e..1c422455 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -60,9 +60,13 @@ def upload_cached_file( except AttributeError as e: raise RuntimeError(f"Attempt to initialise unknown type '{_exact_type}'") from e # We want to reconnect if there is an online ID stored for this file - obj_for_upload = _instance_class.new( - identifier=id_mapping.get(_current_id, None), **_data - ) + if _online_id := id_mapping.get(_current_id, None): + obj_for_upload = _instance_class( + identifier=_online_id, _read_only=False, **_data + ) + else: + obj_for_upload = _instance_class.new(**_data) + with lock: obj_for_upload.on_reconnect(id_mapping) @@ -86,7 +90,7 @@ def upload_cached_file( with lock: id_mapping[_current_id] = _new_id - if obj_type in ["alerts", "runs"]: + if obj_type in ["alerts", "runs", "folders", "tags"]: cache_dir.joinpath("server_ids", f"{_current_id}.txt").write_text(_new_id) if ( @@ -104,7 +108,10 @@ def upload_cached_file( @pydantic.validate_call def sender( - cache_dir: pydantic.DirectoryPath, max_workers: int, threading_threshold: int + cache_dir: pydantic.DirectoryPath, + max_workers: int, + threading_threshold: int, + objects_to_upload: list[str] = UPLOAD_ORDER, ): """Send data from a local cache directory to the Simvue server. @@ -116,6 +123,8 @@ def sender( The maximum number of threads to use threading_threshold : int The number of cached files above which threading will be used + objects_to_upload : list[str] + Types of objects to upload, by default uploads all types of objects present in cache """ cache_dir.joinpath("server_ids").mkdir(parents=True, exist_ok=True) _id_mapping: dict[str, str] = { @@ -123,8 +132,9 @@ def sender( for file_path in cache_dir.glob("server_ids/*.txt") } _lock = threading.Lock() + _upload_order = [item for item in UPLOAD_ORDER if item in objects_to_upload] - for _obj_type in UPLOAD_ORDER: + for _obj_type in _upload_order: _offline_files = list(cache_dir.glob(f"{_obj_type}/*.json")) if len(_offline_files) < threading_threshold: for file_path in _offline_files: @@ -141,3 +151,4 @@ def sender( ), _offline_files, ) + return _id_mapping diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index 75559b37..a029563c 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -42,7 +42,7 @@ def test_folder_creation_offline() -> None: assert _folder._local_staging_file.name.split(".")[0] == _folder.id assert _local_data.get("path", None) == _path - sender(_folder._local_staging_file.parents[1], 2, 10) + sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"]) time.sleep(1) client = Client() @@ -85,7 +85,14 @@ def test_folder_modification_offline() -> None: _tags = ["testing", "api"] _folder = Folder.new(path=_path, offline=True) _folder.commit() + + sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"]) time.sleep(1) + + client = Client() + _folder_online = client.get_folder(_path) + assert _folder_online.path == _path + _folder_new = Folder(identifier=_folder.id) _folder_new.read_only(False) _folder_new.tags = _tags @@ -94,21 +101,20 @@ def test_folder_modification_offline() -> None: with _folder._local_staging_file.open() as in_f: _local_data = json.load(in_f) - assert _folder._local_staging_file.name.split(".")[0] == _folder.id - assert _local_data.get("path", None) == _path assert _local_data.get("description", None) == _description assert _local_data.get("tags", None) == _tags - sender(_folder._local_staging_file.parents[1], 2, 10) + sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"]) time.sleep(1) - client = Client() - _folder_online = client.get_folder(_path) + _folder_online.refresh() assert _folder_online.path == _path assert _folder_online.description == _description assert _folder_online.tags == _tags - _folder_new.delete() + + _folder_online.read_only(False) + _folder_online.delete() @pytest.mark.api diff --git a/tests/unit/test_metric_range_alert.py b/tests/unit/test_metric_range_alert.py index f183e737..4079acb7 100644 --- a/tests/unit/test_metric_range_alert.py +++ b/tests/unit/test_metric_range_alert.py @@ -62,7 +62,7 @@ def test_metric_range_alert_creation_offline() -> None: assert _local_data.get("name") == f"metrics_range_alert_{_uuid}" assert _local_data.get("notification") == "none" assert _local_data.get("alert").get("range_low") == 10 - sender(_alert._local_staging_file.parents[1], 1, 10) + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) time.sleep(1) # Get online ID and retrieve alert @@ -124,31 +124,39 @@ def test_metric_range_alert_modification_offline() -> None: offline=True ) _alert.commit() + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) time.sleep(1) - _new_alert = Alert(_alert.id) + + # Get online ID and retrieve alert + _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_alert = Alert(_online_id) + + assert _online_alert.source == "metrics" + assert _online_alert.alert.frequency == 1 + assert _online_alert.name == f"metrics_range_alert_{_uuid}" + assert _online_alert.alert.range_low == 10 + + _new_alert = MetricsRangeAlert(_alert.id) _new_alert.read_only(False) - assert isinstance(_new_alert, MetricsRangeAlert) _new_alert.description = "updated!" _new_alert.commit() + # Since changes havent been sent, check online run not updated + _online_alert.refresh() + assert _online_alert.description != "updated!" + with _alert._local_staging_file.open() as in_f: _local_data = json.load(in_f) assert _local_data.get("description") == "updated!" - sender(_alert._local_staging_file.parents[1], 1, 10) + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) time.sleep(1) - # Get online ID and retrieve alert - _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() - _online_alert = Alert(_online_id) - - assert _online_alert.source == "metrics" - assert _online_alert.alert.frequency == 1 - assert _online_alert.name == f"metrics_range_alert_{_uuid}" - assert _online_alert.alert.range_low == 10 + _online_alert.refresh() assert _online_alert.description == "updated!" - _alert.delete() + _online_alert.read_only(False) + _online_alert.delete() _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() @pytest.mark.api diff --git a/tests/unit/test_metric_threshold_alert.py b/tests/unit/test_metric_threshold_alert.py index 26b3f9af..9b5c2318 100644 --- a/tests/unit/test_metric_threshold_alert.py +++ b/tests/unit/test_metric_threshold_alert.py @@ -61,7 +61,7 @@ def test_metric_threshold_alert_creation_offline() -> None: assert _local_data.get("notification") == "none" assert _local_data.get("alert").get("threshold") == 10 - sender(_alert._local_staging_file.parents[1], 1, 10) + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) time.sleep(1) # Get online ID and retrieve alert @@ -121,31 +121,41 @@ def test_metric_threshold_alert_modification_offline() -> None: description="a metric threshold alert" ) _alert.commit() - time.sleep(1) - _new_alert = Alert(_alert.id) + + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + # Get online ID and retrieve alert + _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_alert = MetricsThresholdAlert(_online_id) + + assert _online_alert.source == "metrics" + assert _online_alert.alert.frequency == 1 + assert _online_alert.name == f"metrics_threshold_alert_{_uuid}" + assert _online_alert.alert.threshold == 10 + + _new_alert = MetricsThresholdAlert(_alert.id) _new_alert.read_only(False) assert isinstance(_new_alert, MetricsThresholdAlert) _new_alert.description = "updated!" _new_alert.commit() + # Since changes havent been sent, check online run not updated + _online_alert.refresh() + assert _online_alert.description != "updated!" + with _alert._local_staging_file.open() as in_f: _local_data = json.load(in_f) assert _local_data.get("description") == "updated!" - sender(_alert._local_staging_file.parents[1], 1, 10) - time.sleep(1) - - # Get online ID and retrieve alert - _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() - _online_alert = Alert(_online_id) + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) - assert _online_alert.source == "metrics" - assert _online_alert.alert.frequency == 1 - assert _online_alert.name == f"metrics_threshold_alert_{_uuid}" - assert _online_alert.alert.threshold == 10 + _online_alert.refresh() assert _online_alert.description == "updated!" - _alert.delete() + _online_alert.read_only(False) + _online_alert.delete() _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() @pytest.mark.api diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index 4bb5e49c..86054ed9 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -40,7 +40,7 @@ def test_run_creation_offline() -> None: assert _local_data.get("name") == f"simvue_offline_run_{_uuid}" assert _local_data.get("folder") == _folder_name - sender(_run._local_staging_file.parents[1], 1, 10) + sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"]) time.sleep(1) # Get online ID and retrieve run @@ -119,7 +119,7 @@ def test_run_modification_offline() -> None: assert _new_run.description == "Simvue test run" assert _new_run.name == "simvue_test_run" - sender(_run._local_staging_file.parents[1], 1, 10) + sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"]) time.sleep(1) # Get online ID and retrieve run @@ -139,7 +139,7 @@ def test_run_modification_offline() -> None: _online_run.refresh() assert _online_run.tags == [] - sender(_new_run._local_staging_file.parents[1], 1, 10) + sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"]) time.sleep(1) _online_run.refresh() diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index 743259ae..6ab40d85 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -5,6 +5,7 @@ from simvue.api.objects import S3Storage from simvue.api.objects.storage.fetch import Storage +from simvue.sender import sender @pytest.mark.api @pytest.mark.online @@ -49,11 +50,32 @@ def test_create_s3_offline() -> None: offline=True ) _storage.commit() - assert _storage.name == _uuid - assert _storage.config.endpoint_url == "https://not-a-real-url.io" - assert _storage.config.region_name == "fictionsville" - assert _storage.config.bucket == "dummy_bucket" - assert not _storage.status - assert not _storage.user - assert not _storage.usage - _storage.delete() + with _storage._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("name") == _uuid + assert _local_data.get("config").get("endpoint_url") == "https://not-a-real-url.io/" + assert _local_data.get("config").get("region_name") == "fictionsville" + assert _local_data.get("config").get("bucket") == "dummy_bucket" + assert not _local_data.get("status", None) + assert not _local_data.get("user", None) + assert not _local_data.get("usage", None) + + _id_mapping = sender(_storage._local_staging_file.parents[1], 1, 10) + _online_id = _id_mapping(_storage.id) + time.sleep(1) + + _online_storage = S3Storage(_online_id) + + assert _online_storage.name == _uuid + assert _online_storage.config.endpoint_url == "https://not-a-real-url.io/" + assert _local_data.config.region_name == "fictionsville" + assert _local_data.config.bucket == "dummy_bucket" + + _online_storage.read_only(False) + _online_storage.delete() + + + + + + \ No newline at end of file diff --git a/tests/unit/test_tag.py b/tests/unit/test_tag.py index 06a3c9aa..1ddac7b1 100644 --- a/tests/unit/test_tag.py +++ b/tests/unit/test_tag.py @@ -3,7 +3,9 @@ import pytest import uuid import json +import pydantic.color from simvue.api.objects.tag import Tag +from simvue.sender import sender @pytest.mark.api @pytest.mark.online @@ -12,7 +14,7 @@ def test_tag_creation_online() -> None: _tag = Tag.new(name=f"test_tag_{_uuid}") _tag.commit() assert _tag.name == f"test_tag_{_uuid}" - assert _tag.color + assert _tag.colour assert not _tag.description _tag.delete() @@ -26,14 +28,25 @@ def test_tag_creation_offline() -> None: assert _tag.name == f"test_tag_{_uuid}" with pytest.raises(AttributeError): - _tag.color - - _tag.delete() + _tag.colour with _tag._local_staging_file.open() as in_f: _local_data = json.load(in_f) - - assert not _local_data.get(_tag._label, {}).get(_tag.id) + + assert _local_data.get("name") == f"test_tag_{_uuid}" + + _id_mapping = sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"]) + time.sleep(1) + + _online_id = _id_mapping.get(_tag.id) + + _online_tag = Tag(_online_id) + assert _online_tag.name == f"test_tag_{_uuid}" + _online_tag.read_only(False) + _online_tag.delete() + + + @pytest.mark.api @pytest.mark.online @@ -45,11 +58,11 @@ def test_tag_modification_online() -> None: _new_tag = Tag(_tag.id) _new_tag.read_only(False) _new_tag.name = _tag.name.replace("test", "test_modified") - _new_tag.color = "rgb({r}, {g}, {b})".format(r=250, g=0, b=0) + _new_tag.colour = "rgb({r}, {g}, {b})".format(r=250, g=0, b=0) _new_tag.description = "modified test tag" _new_tag.commit() assert _new_tag.name == f"test_modified_tag_{_uuid}" - assert _new_tag.color.r == 250 / 255 + assert _new_tag.colour.r == 250 / 255 assert _new_tag.description == "modified test tag" @@ -59,16 +72,47 @@ def test_tag_modification_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _tag = Tag.new(name=f"test_tag_{_uuid}", offline=True) _tag.commit() - time.sleep(1) + + with _tag._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert _local_data.get("name") == f"test_tag_{_uuid}" + + _id_mapping = sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"]) + _online_id = _id_mapping.get(_tag.id) + _online_tag = Tag(_online_id) + + assert _online_tag.name == f"test_tag_{_uuid}" + _new_tag = Tag(_tag.id) + _new_tag.read_only(False) _new_tag.name = _tag.name.replace("test", "test_modified") - _new_tag.color = "rgb({r}, {g}, {b})".format(r=250, g=0, b=0) + _new_tag.colour = "rgb({r}, {g}, {b})".format(r=250, g=0, b=0) _new_tag.description = "modified test tag" _new_tag.commit() - assert _new_tag.name == f"test_modified_tag_{_uuid}" - assert _new_tag.color.r == 250 / 255 - assert _new_tag.description == "modified test tag" - _tag.delete() + + # Check since not yet sent, online not changed + _online_tag.refresh() + assert _online_tag.name == f"test_tag_{_uuid}" + + with _tag._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("name") == f"test_modified_tag_{_uuid}" + assert pydantic.color.parse_str(_local_data.get("colour")).r == 250 / 255 + assert _local_data.get("description") == "modified test tag" + + sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"]) + time.sleep(1) + + # Check online version is updated + _online_tag.refresh() + assert _online_tag.name == f"test_modified_tag_{_uuid}" + assert _online_tag.colour.r == 250 / 255 + assert _online_tag.description == "modified test tag" + + _online_tag.read_only(False) + _online_tag.delete() + @pytest.mark.api @pytest.mark.online diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py index f45a5b4e..0f658ef0 100644 --- a/tests/unit/test_user_alert.py +++ b/tests/unit/test_user_alert.py @@ -3,7 +3,7 @@ import contextlib import pytest import uuid - +from simvue.sender import sender from simvue.api.objects import Alert, UserAlert, Run from simvue.api.objects.folder import Folder @@ -31,20 +31,36 @@ def test_user_alert_creation_offline() -> None: _alert = UserAlert.new( name=f"users_alert_{_uuid}", notification="none", - offline=True + offline=True, + description = "test user alert" ) _alert.commit() assert _alert.source == "user" assert _alert.name == f"users_alert_{_uuid}" assert _alert.notification == "none" - _alert.delete() with _alert._local_staging_file.open() as in_f: _local_data = json.load(in_f) - assert not _local_data.get(_alert._label, {}).get(_alert.id) - + assert _local_data.get("source") == "user" + assert _local_data.get("name") == f"users_alert_{_uuid}" + assert _local_data.get("notification") == "none" + _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_alert = Alert(_online_id) + + assert _online_alert.source == "user" + assert _online_alert.name == f"users_alert_{_uuid}" + assert _online_alert.notification == "none" + + _online_alert.read_only(False) + _online_alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() + + @pytest.mark.api @pytest.mark.online def test_user_alert_modification_online() -> None: @@ -73,20 +89,43 @@ def test_user_alert_modification_offline() -> None: _alert = UserAlert.new( name=f"users_alert_{_uuid}", notification="none", - offline=True + offline=True, + description = "test user alert" ) _alert.commit() - time.sleep(1) - _new_alert = Alert(_alert.id) - assert isinstance(_new_alert, UserAlert) + + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + # Get online ID and retrieve alert + _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_alert = UserAlert(_online_id) + + assert _online_alert.source == "user" + assert _online_alert.name == f"users_alert_{_uuid}" + assert _online_alert.notification == "none" + + _new_alert = UserAlert(_alert.id) + _new_alert.read_only(False) _new_alert.description = "updated!" - - with pytest.raises(AttributeError): - assert _new_alert.description - _new_alert.commit() - assert _new_alert.description == "updated!" - _new_alert.delete() + + # Since changes havent been sent, check online run not updated + _online_alert.refresh() + assert _online_alert.description != "updated!" + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("description") == "updated!" + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + _online_alert.refresh() + assert _online_alert.description == "updated!" + + _online_alert.read_only(False) + _online_alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() @pytest.mark.api @pytest.mark.online From 9758009934b25cfa06c1555ef5725f09d93eb1a6 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 31 Jan 2025 16:37:55 +0000 Subject: [PATCH 136/163] Fix S3 test --- simvue/api/objects/base.py | 1 - simvue/api/objects/storage/base.py | 8 +- simvue/api/objects/storage/file.py | 4 +- simvue/api/objects/storage/s3.py | 6 +- tests/unit/test_artifact.py | 170 ++++++++++++++--------------- tests/unit/test_file_storage.py | 4 +- tests/unit/test_s3_storage.py | 14 +-- 7 files changed, 104 insertions(+), 103 deletions(-) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index b5d74e02..1bf36f5b 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -405,7 +405,6 @@ def url(self) -> URL | None: def _post(self, is_json: bool = True, **kwargs) -> dict[str, typing.Any]: if not is_json: kwargs = msgpack.packb(kwargs, use_bin_type=True) - _response = sv_post( url=f"{self._base_url}", headers=self._headers | {"Content-Type": "application/msgpack"}, diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py index f3ca5e14..c9d4793d 100644 --- a/simvue/api/objects/storage/base.py +++ b/simvue/api/objects/storage/base.py @@ -58,16 +58,16 @@ def default(self, is_default: bool) -> None: @property @staging_check - def tenant_usable(self) -> bool: + def tenant_useable(self) -> bool: """Retrieve if this is usable by the current user tenant""" return self._get_attribute("is_tenant_useable") - @tenant_usable.setter + @tenant_useable.setter @write_only @pydantic.validate_call - def tenant_usable(self, is_tenant_usable: bool) -> None: + def tenant_useable(self, is_tenant_useable: bool) -> None: """Set this storage to be usable by the current user tenant""" - self._staging["is_tenant_useable"] = is_tenant_usable + self._staging["is_tenant_useable"] = is_tenant_useable @property @staging_check diff --git a/simvue/api/objects/storage/file.py b/simvue/api/objects/storage/file.py index 4981132d..57ceb415 100644 --- a/simvue/api/objects/storage/file.py +++ b/simvue/api/objects/storage/file.py @@ -18,7 +18,7 @@ def new( *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], disable_check: bool, - tenant_usable: bool, + tenant_useable: bool, enabled: bool, default: bool, offline: bool = False, @@ -28,7 +28,7 @@ def new( name=name, backend="File", disable_check=disable_check, - is_tenant_useable=tenant_usable, + is_tenant_useable=tenant_useable, is_default=default, is_enabled=enabled, _read_only=False, diff --git a/simvue/api/objects/storage/s3.py b/simvue/api/objects/storage/s3.py index 435b5dfc..81eb5503 100644 --- a/simvue/api/objects/storage/s3.py +++ b/simvue/api/objects/storage/s3.py @@ -29,9 +29,10 @@ def new( access_key_id: str, secret_access_key: pydantic.SecretStr, bucket: str, - tenant_usable: bool, + tenant_useable: bool, default: bool, offline: bool = False, + **__, ) -> Self: """Create a new S3 storage object""" _config: dict[str, str] = { @@ -46,10 +47,11 @@ def new( backend="S3", config=_config, disable_check=disable_check, - tenant_useable=tenant_usable, + tenant_useable=tenant_useable, default=default, _read_only=False, ) + _storage._staging |= _config _storage.offline_mode(offline) return _storage diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index cfe6d188..d34dbc89 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -1,95 +1,95 @@ -# import os -# import pytest -# import uuid -# import time -# import pathlib -# import tempfile -# import numpy +import os +import pytest +import uuid +import time +import pathlib +import tempfile +import numpy -# from simvue.api.objects import Artifact, Run -# from simvue.api.objects.folder import Folder -# from simvue.sender import sender -# from simvue.client import Client +from simvue.api.objects import Artifact, Run +from simvue.api.objects.folder import Folder +from simvue.sender import sender +from simvue.client import Client -# @pytest.mark.api -# @pytest.mark.online -# def test_artifact_creation_online() -> None: -# _uuid: str = f"{uuid.uuid4()}".split("-")[0] -# _folder_name = f"/simvue_unit_testing/{_uuid}" -# _folder = Folder.new(path=_folder_name) -# _run = Run.new(folder=_folder_name) -# _folder.commit() -# _run.commit() +@pytest.mark.api +@pytest.mark.online +def test_artifact_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(folder=_folder_name) + _folder.commit() + _run.commit() -# _failed = [] + _failed = [] -# with tempfile.NamedTemporaryFile(suffix=".txt") as temp_f: -# _path = pathlib.Path(temp_f.name) -# with _path.open("w") as out_f: -# out_f.write(f"Hello World! {_uuid}") -# _artifact = Artifact.new_file( -# name=f"test_artifact_{_uuid}", -# file_path=_path, -# storage=None, -# mime_type=None, -# metadata=None -# ) -# _artifact.attach_to_run(_run.id, "input") -# time.sleep(1) -# for member in _artifact._properties: -# try: -# getattr(_artifact, member) -# except Exception as e: -# _failed.append((member, f"{e}")) -# assert _artifact.name == f"test_artifact_{_uuid}" -# _content = b"".join(_artifact.download_content()).decode("UTF-8") -# assert _content == f"Hello World! {_uuid}" -# assert _artifact.to_dict() -# _test_array = numpy.array(range(10)) -# _artifact = Artifact.new_object( -# name=f"test_artifact_obj_{_uuid}", -# storage=None, -# obj=_test_array, -# metadata=None -# ) -# _artifact.attach_to_run(_run.id, "output") -# _run.delete() -# _folder.delete(recursive=True, delete_runs=True, runs_only=False) -# if _failed: -# raise AssertionError("\n\t-" + "\n\t- ".join(": ".join(i) for i in _failed)) + with tempfile.NamedTemporaryFile(suffix=".txt") as temp_f: + _path = pathlib.Path(temp_f.name) + with _path.open("w") as out_f: + out_f.write(f"Hello World! {_uuid}") + _artifact = Artifact.new_file( + name=f"test_artifact_{_uuid}", + file_path=_path, + storage=None, + mime_type=None, + metadata=None + ) + _artifact.attach_to_run(_run.id, "input") + time.sleep(1) + for member in _artifact._properties: + try: + getattr(_artifact, member) + except Exception as e: + _failed.append((member, f"{e}")) + assert _artifact.name == f"test_artifact_{_uuid}" + _content = b"".join(_artifact.download_content()).decode("UTF-8") + assert _content == f"Hello World! {_uuid}" + assert _artifact.to_dict() + _test_array = numpy.array(range(10)) + _artifact = Artifact.new_object( + name=f"test_artifact_obj_{_uuid}", + storage=None, + obj=_test_array, + metadata=None + ) + _artifact.attach_to_run(_run.id, "output") + _run.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) + if _failed: + raise AssertionError("\n\t-" + "\n\t- ".join(": ".join(i) for i in _failed)) -# @pytest.mark.api -# @pytest.mark.offline -# def test_artifact_creation_offline(offline_test: pathlib.Path) -> None: -# _uuid: str = f"{uuid.uuid4()}".split("-")[0] -# _folder_name = f"/simvue_unit_testing/{_uuid}" -# _folder = Folder.new(path=_folder_name, offline=True) -# _run = Run.new(name=f"test_artifact_creation_offline_{_uuid}",folder=_folder_name, offline=True) +@pytest.mark.api +@pytest.mark.offline +def test_artifact_creation_offline(offline_test: pathlib.Path) -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name, offline=True) + _run = Run.new(name=f"test_artifact_creation_offline_{_uuid}",folder=_folder_name, offline=True) -# _path = offline_test.joinpath("hello_world.txt") + _path = offline_test.joinpath("hello_world.txt") -# with _path.open("w") as out_f: -# out_f.write("Hello World!") + with _path.open("w") as out_f: + out_f.write("Hello World!") -# _folder.commit() -# _run.commit() -# _artifact = Artifact.new_file( -# name=f"test_artifact_{_uuid}", -# file_path=_path, -# storage=None, -# mime_type=None, -# offline=True, -# metadata=None -# ) -# _artifact.attach_to_run(_run._identifier, category="input") -# assert _artifact.name == f"test_artifact_{_uuid}" -# sender(offline_test.joinpath(".simvue"), 1, 10) -# time.sleep(1) -# client = Client() -# _run_id = client.get_run_id_from_name(f"test_artifact_creation_offline_{_uuid}") -# client.get_artifact_as_file(_run_id, _artifact.name, offline_test.joinpath("downloaded").mkdir()) -# assert offline_test.joinpath("downloaded.txt").read_text() == "Hello World!" -# _run.delete() -# _folder.delete() + _folder.commit() + _run.commit() + _artifact = Artifact.new_file( + name=f"test_artifact_{_uuid}", + file_path=_path, + storage=None, + mime_type=None, + offline=True, + metadata=None + ) + _artifact.attach_to_run(_run._identifier, category="input") + assert _artifact.name == f"test_artifact_{_uuid}" + sender(offline_test.joinpath(".simvue"), 1, 10) + time.sleep(1) + client = Client() + _run_id = client.get_run_id_from_name(f"test_artifact_creation_offline_{_uuid}") + client.get_artifact_as_file(_run_id, _artifact.name, offline_test.joinpath("downloaded").mkdir()) + assert offline_test.joinpath("downloaded.txt").read_text() == "Hello World!" + _run.delete() + _folder.delete() diff --git a/tests/unit/test_file_storage.py b/tests/unit/test_file_storage.py index fa217216..23e7966a 100644 --- a/tests/unit/test_file_storage.py +++ b/tests/unit/test_file_storage.py @@ -10,7 +10,7 @@ def test_create_file_storage_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _storage = FileStorage.new( - name=_uuid, disable_check=False, tenant_usable=False, default=False, enabled=True) + name=_uuid, disable_check=False, tenant_useable=False, default=False, enabled=True) _storage.commit() assert _storage.enabled assert _storage.name == _uuid @@ -22,7 +22,7 @@ def test_create_file_storage_online() -> None: @pytest.mark.offline def test_create_file_storage_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] - _storage = FileStorage.new(name=_uuid, disable_check=False, tenant_usable=False, default=False, offline=True, enabled=True) + _storage = FileStorage.new(name=_uuid, disable_check=False, tenant_useable=False, default=False, offline=True, enabled=True) _storage.commit() assert _storage.name == _uuid _storage.delete() diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index 6ab40d85..a4b441fb 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -15,7 +15,7 @@ def test_create_s3_online() -> None: name=_uuid, endpoint_url="https://not-a-real-url.io", disable_check=True, - tenant_usable=False, + tenant_useable=False, default=False, region_name="fictionsville", access_key_id="dummy_key", @@ -40,13 +40,13 @@ def test_create_s3_offline() -> None: _storage = S3Storage.new( name=_uuid, endpoint_url="https://not-a-real-url.io", - disable_check=False, + disable_check=True, region_name="fictionsville", access_key_id="dummy_key", secret_access_key="not_a_key", bucket="dummy_bucket", default=False, - tenant_usable=False, + tenant_useable=False, offline=True ) _storage.commit() @@ -60,16 +60,16 @@ def test_create_s3_offline() -> None: assert not _local_data.get("user", None) assert not _local_data.get("usage", None) - _id_mapping = sender(_storage._local_staging_file.parents[1], 1, 10) - _online_id = _id_mapping(_storage.id) + _id_mapping = sender(_storage._local_staging_file.parents[1], 1, 10, ["storage"]) + _online_id = _id_mapping[_storage.id] time.sleep(1) _online_storage = S3Storage(_online_id) assert _online_storage.name == _uuid assert _online_storage.config.endpoint_url == "https://not-a-real-url.io/" - assert _local_data.config.region_name == "fictionsville" - assert _local_data.config.bucket == "dummy_bucket" + assert _online_storage.config.region_name == "fictionsville" + assert _online_storage.config.bucket == "dummy_bucket" _online_storage.read_only(False) _online_storage.delete() From 3579395cf2b817286d20d954b9166cdb8b2277a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 31 Jan 2025 16:49:56 +0000 Subject: [PATCH 137/163] Fix missing return statement for folder --- simvue/api/objects/folder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index 03ea3296..e3d2ae2c 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -58,7 +58,7 @@ def new( **_, ): """Create a new Folder on the Simvue server with the given path""" - Folder(path=path, _read_only=False, _offline=offline) + return Folder(path=path, _read_only=False, _offline=offline) @property @staging_check From f569ddb41e306b06916b889f688a08fe32027fb6 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 31 Jan 2025 17:17:12 +0000 Subject: [PATCH 138/163] Deleted stuff from bin sender and set defaults --- simvue/bin/sender.py | 32 +------------------------------- simvue/sender.py | 8 +++++--- tests/unit/test_s3_storage.py | 3 ++- 3 files changed, 8 insertions(+), 35 deletions(-) diff --git a/simvue/bin/sender.py b/simvue/bin/sender.py index 67bc1078..6aac6be1 100644 --- a/simvue/bin/sender.py +++ b/simvue/bin/sender.py @@ -1,35 +1,5 @@ """Send runs to server""" -import getpass -import logging -import os -import sys -import tempfile - from simvue.sender import sender -from simvue.utilities import create_file, remove_file - -logger = logging.getLogger() -logger.setLevel(logging.DEBUG) - -handler = logging.StreamHandler(sys.stdout) -handler.setLevel(logging.INFO) -formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") -handler.setFormatter(formatter) -logger.addHandler(handler) - - -def run() -> None: - lockfile = os.path.join(tempfile.gettempdir(), f"simvue-{getpass.getuser()}.lock") - - if os.path.isfile(lockfile): - logger.error("Cannot initiate run, locked by other process.") - sys.exit(1) - - create_file(lockfile) - try: - sender() - except Exception as err: - logger.critical("Exception running sender: %s", str(err)) - remove_file(lockfile) +sender() diff --git a/simvue/sender.py b/simvue/sender.py index 1c422455..77123e23 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -11,6 +11,7 @@ from concurrent.futures import ThreadPoolExecutor import threading from simvue.api.objects.base import SimvueObject +from simvue.config.user import SimvueConfiguration import simvue.api.objects @@ -108,9 +109,9 @@ def upload_cached_file( @pydantic.validate_call def sender( - cache_dir: pydantic.DirectoryPath, - max_workers: int, - threading_threshold: int, + cache_dir: pydantic.DirectoryPath | None = None, + max_workers: int = 5, + threading_threshold: int = 10, objects_to_upload: list[str] = UPLOAD_ORDER, ): """Send data from a local cache directory to the Simvue server. @@ -126,6 +127,7 @@ def sender( objects_to_upload : list[str] Types of objects to upload, by default uploads all types of objects present in cache """ + cache_dir = cache_dir or SimvueConfiguration.fetch().offline.cache cache_dir.joinpath("server_ids").mkdir(parents=True, exist_ok=True) _id_mapping: dict[str, str] = { file_path.name.split(".")[0]: file_path.read_text() diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index 91cb0a6b..503bb22e 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -21,7 +21,7 @@ def test_create_s3_online() -> None: access_key_id="dummy_key", secret_access_key="not_a_key", bucket="dummy_bucket", - enabled=True + enabled=False ) _storage.commit() assert _storage.to_dict() @@ -48,6 +48,7 @@ def test_create_s3_offline() -> None: bucket="dummy_bucket", default=False, tenant_useable=False, + enabled=False, offline=True ) _storage.commit() From ec78959e589358c83494c91bbb8d9fde02e966f7 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 31 Jan 2025 18:49:59 +0000 Subject: [PATCH 139/163] Lots of renaming to allow offline to work --- simvue/api/objects/__init__.py | 2 + simvue/api/objects/administrator/__init__.py | 5 +++ simvue/api/objects/administrator/tenant.py | 14 +++---- simvue/api/objects/administrator/user.py | 41 +++++++++++--------- simvue/api/objects/storage/base.py | 18 ++++----- simvue/api/objects/storage/file.py | 15 +++---- tests/unit/test_file_storage.py | 29 ++++++++++++-- tests/unit/test_metric_threshold_alert.py | 3 +- tests/unit/test_tenant.py | 21 ++++++++-- tests/unit/test_user.py | 41 ++++++++++++++------ 10 files changed, 127 insertions(+), 62 deletions(-) diff --git a/simvue/api/objects/__init__.py b/simvue/api/objects/__init__.py index 7772bda3..49b3f1ed 100644 --- a/simvue/api/objects/__init__.py +++ b/simvue/api/objects/__init__.py @@ -8,6 +8,7 @@ """ +from .administrator import Tenant as Tenant, User as User from .alert import ( Alert as Alert, EventsAlert as EventsAlert, @@ -20,6 +21,7 @@ FileStorage as FileStorage, Storage as Storage, ) + from .stats import Stats as Stats from .artifact import Artifact as Artifact from .run import Run as Run diff --git a/simvue/api/objects/administrator/__init__.py b/simvue/api/objects/administrator/__init__.py index abb2e0db..315fc0c2 100644 --- a/simvue/api/objects/administrator/__init__.py +++ b/simvue/api/objects/administrator/__init__.py @@ -9,3 +9,8 @@ from .tenant import Tenant as Tenant from .user import User as User + +__all__ = [ + "Tenant", + "User", +] diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py index b5439d1e..88dd3a14 100644 --- a/simvue/api/objects/administrator/tenant.py +++ b/simvue/api/objects/administrator/tenant.py @@ -25,7 +25,7 @@ def new( cls, *, name: str, - enabled: bool = True, + is_enabled: bool = True, max_request_rate: int = 0, max_runs: int = 0, max_data_volume: int = 0, @@ -39,7 +39,7 @@ def new( ---------- name: str the name for this tenant - enabled: bool, optional + is_enabled: bool, optional whether to enable the tenant on creation, default is True max_request_rate: int, optional the maximum request rate allowed for this tenant, default is no limit. @@ -58,7 +58,7 @@ def new( """ return Tenant( name=name, - is_enabled=enabled, + is_enabled=is_enabled, max_request_rate=max_request_rate, max_runs=max_runs, max_data_volume=max_data_volume, @@ -80,16 +80,16 @@ def name(self, name: str) -> None: @property @staging_check - def enabled(self) -> bool: + def is_enabled(self) -> bool: """Retrieve if tenant is enabled""" return self._get_attribute("is_enabled") - @enabled.setter + @is_enabled.setter @write_only @pydantic.validate_call - def enabled(self, enabled: bool) -> None: + def is_enabled(self, is_enabled: bool) -> None: """Enable/disable tenant""" - self._staging["is_enabled"] = enabled + self._staging["is_enabled"] = is_enabled @property @staging_check diff --git a/simvue/api/objects/administrator/user.py b/simvue/api/objects/administrator/user.py index 6ecc7b18..34136561 100644 --- a/simvue/api/objects/administrator/user.py +++ b/simvue/api/objects/administrator/user.py @@ -27,13 +27,14 @@ def new( username: str, fullname: str, email: pydantic.EmailStr, - manager: bool, - admin: bool, - readonly: bool, + is_manager: bool, + is_admin: bool, + is_readonly: bool, welcome: bool, tenant: str, enabled: bool = True, offline: bool = False, + **_, ) -> Self: """Create a new user on the Simvue server. @@ -47,11 +48,11 @@ def new( the full name for this user email: str the email for this user - manager : bool + is_manager : bool assign the manager role to this user - admin : bool + is_admin : bool assign the administrator role to this user - readonly : bool + is_readonly : bool given only read access to this user welcome : bool display welcome message to user @@ -72,19 +73,21 @@ def new( "username": username, "fullname": fullname, "email": email, - "is_manager": manager, - "is_readonly": readonly, + "is_manager": is_manager, + "is_readonly": is_readonly, "welcome": welcome, - "is_admin": admin, + "is_admin": is_admin, "is_enabled": enabled, } - return User( + _user = User( user=_user_info, tenant=tenant, offline=offline, _read_only=False, _offline=offline, ) + _user._staging |= _user_info + return _user @classmethod def get( @@ -140,31 +143,31 @@ def fullname(self, fullname: str) -> None: @property @staging_check - def manager(self) -> bool: + def is_manager(self) -> bool: """Retrieve if the user has manager privileges""" if self.id and self.id.startswith("offline_"): return self._get_attribute("user")["is_manager"] return self._get_attribute("is_manager") - @manager.setter + @is_manager.setter @write_only @pydantic.validate_call - def manager(self, is_manager: bool) -> None: + def is_manager(self, is_manager: bool) -> None: """Set if the user has manager privileges""" self._staging["is_manager"] = is_manager @property @staging_check - def admin(self) -> bool: + def is_admin(self) -> bool: """Retrieve if the user has admin privileges""" if self.id and self.id.startswith("offline_"): return self._get_attribute("user")["is_admin"] return self._get_attribute("is_admin") - @admin.setter + @is_admin.setter @write_only @pydantic.validate_call - def admin(self, is_admin: bool) -> None: + def is_admin(self, is_admin: bool) -> None: """Set if the user has admin privileges""" self._staging["is_admin"] = is_admin @@ -177,16 +180,16 @@ def deleted(self) -> bool: @property @staging_check - def readonly(self) -> bool: + def is_readonly(self) -> bool: """Retrieve if the user has read-only access""" if self.id and self.id.startswith("offline_"): return self._get_attribute("user")["is_readonly"] return self._get_attribute("is_readonly") - @readonly.setter + @is_readonly.setter @write_only @pydantic.validate_call - def readonly(self, is_readonly: bool) -> None: + def is_readonly(self, is_readonly: bool) -> None: """Set if the user has read-only access""" self._staging["is_readonly"] = is_readonly diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py index e33960ad..1cf56d9e 100644 --- a/simvue/api/objects/storage/base.py +++ b/simvue/api/objects/storage/base.py @@ -61,40 +61,40 @@ def backend(self) -> str: @property @staging_check - def default(self) -> bool: + def is_default(self) -> bool: """Retrieve if this is the default storage for the user""" return self._get_attribute("is_default") - @default.setter + @is_default.setter @write_only @pydantic.validate_call - def default(self, is_default: bool) -> None: + def is_default(self, is_default: bool) -> None: """Set this storage to be the default""" self._staging["is_default"] = is_default @property @staging_check - def tenant_useable(self) -> bool: + def is_tenant_useable(self) -> bool: """Retrieve if this is usable by the current user tenant""" return self._get_attribute("is_tenant_useable") - @tenant_useable.setter + @is_tenant_useable.setter @write_only @pydantic.validate_call - def tenant_useable(self, is_tenant_useable: bool) -> None: + def is_tenant_useable(self, is_tenant_useable: bool) -> None: """Set this storage to be usable by the current user tenant""" self._staging["is_tenant_useable"] = is_tenant_useable @property @staging_check - def enabled(self) -> bool: + def is_enabled(self) -> bool: """Retrieve if this is enabled""" return self._get_attribute("is_enabled") - @enabled.setter + @is_enabled.setter @write_only @pydantic.validate_call - def enabled(self, is_enabled: bool) -> None: + def is_enabled(self, is_enabled: bool) -> None: """Set this storage to be usable by the current user tenant""" self._staging["is_enabled"] = is_enabled diff --git a/simvue/api/objects/storage/file.py b/simvue/api/objects/storage/file.py index 9fc85a85..821b9099 100644 --- a/simvue/api/objects/storage/file.py +++ b/simvue/api/objects/storage/file.py @@ -28,10 +28,11 @@ def new( *, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], disable_check: bool, - tenant_useable: bool, - enabled: bool, - default: bool, + is_tenant_useable: bool, + is_enabled: bool, + is_default: bool, offline: bool = False, + **_, ) -> Self: """Create a new file storage object. @@ -43,7 +44,7 @@ def new( whether to disable checks for this system tenant_usable : bool whether this system is usable by the current tenant - enabled : bool + is_enabled : bool whether to enable this system default : bool if this storage system should become the new default @@ -59,9 +60,9 @@ def new( name=name, backend="File", disable_check=disable_check, - is_tenant_useable=tenant_useable, - is_default=default, - is_enabled=enabled, + is_tenant_useable=is_tenant_useable, + is_default=is_default, + is_enabled=is_enabled, _read_only=False, _offline=offline, ) diff --git a/tests/unit/test_file_storage.py b/tests/unit/test_file_storage.py index 23e7966a..46f053cd 100644 --- a/tests/unit/test_file_storage.py +++ b/tests/unit/test_file_storage.py @@ -4,16 +4,19 @@ import uuid from simvue.api.objects import FileStorage +from simvue.sender import sender @pytest.mark.api @pytest.mark.online def test_create_file_storage_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _storage = FileStorage.new( - name=_uuid, disable_check=False, tenant_useable=False, default=False, enabled=True) + name=_uuid, disable_check=False, is_tenant_useable=False, is_default=False, is_enabled=False) _storage.commit() - assert _storage.enabled + assert _storage.is_enabled == False assert _storage.name == _uuid + assert _storage.is_default == False + assert _storage.to_dict() _storage.delete() @@ -22,7 +25,25 @@ def test_create_file_storage_online() -> None: @pytest.mark.offline def test_create_file_storage_offline() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] - _storage = FileStorage.new(name=_uuid, disable_check=False, tenant_useable=False, default=False, offline=True, enabled=True) + _storage = FileStorage.new(name=_uuid, disable_check=True, is_tenant_useable=False, is_default=False, offline=True, is_enabled=False) + _storage.commit() assert _storage.name == _uuid - _storage.delete() + assert _storage.is_enabled == False + assert _storage.is_default == False + + with _storage._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("name") == _uuid + assert _local_data.get("is_enabled") == False + assert _local_data.get("is_default") == False + + _id_mapping = sender(_storage._local_staging_file.parents[1], 1, 10, ["storage"]) + time.sleep(1) + _online_storage = FileStorage(_id_mapping.get(_storage.id)) + assert _online_storage.name == _uuid + assert _online_storage.is_enabled == False + assert _online_storage.is_default == False + + _online_storage.read_only(False) + _online_storage.delete() \ No newline at end of file diff --git a/tests/unit/test_metric_threshold_alert.py b/tests/unit/test_metric_threshold_alert.py index 9b5c2318..6d737945 100644 --- a/tests/unit/test_metric_threshold_alert.py +++ b/tests/unit/test_metric_threshold_alert.py @@ -73,7 +73,8 @@ def test_metric_threshold_alert_creation_offline() -> None: assert _online_alert.name == f"metrics_threshold_alert_{_uuid}" assert _online_alert.alert.threshold == 10 - _alert.delete() + _online_alert.read_only(False) + _online_alert.delete() _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() diff --git a/tests/unit/test_tenant.py b/tests/unit/test_tenant.py index 2c164ca0..82cf001d 100644 --- a/tests/unit/test_tenant.py +++ b/tests/unit/test_tenant.py @@ -5,7 +5,7 @@ import uuid from simvue.api.objects.administrator import Tenant - +from simvue.sender import sender @pytest.mark.api @pytest.mark.online @@ -20,7 +20,7 @@ def test_create_tenant_online() -> None: time.sleep(1) _new_tenant = Tenant(_tenant.id) assert _new_tenant.name == _uuid - assert _new_tenant.enabled + assert _new_tenant.is_enabled _new_tenant.delete() @@ -33,8 +33,21 @@ def test_create_tenant_offline() -> None: time.sleep(1) _new_tenant = Tenant(_tenant.id) assert _new_tenant.name == _uuid - assert _new_tenant.enabled - _new_tenant.delete() + assert _new_tenant.is_enabled + + with _new_tenant._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("name") == _uuid + assert _local_data.get("is_enabled") == True + + _id_mapping = sender(_new_tenant._local_staging_file.parents[1], 1, 10, ["tenants"]) + time.sleep(1) + _online_user = Tenant(_id_mapping.get(_new_tenant.id)) + assert _online_user.name == _uuid + assert _online_user.is_enabled == True + + _online_user.read_only(False) + _online_user.delete() @pytest.mark.api diff --git a/tests/unit/test_user.py b/tests/unit/test_user.py index f6bb3398..026db46f 100644 --- a/tests/unit/test_user.py +++ b/tests/unit/test_user.py @@ -5,7 +5,7 @@ import uuid from simvue.api.objects.administrator import User, Tenant - +from simvue.sender import sender @pytest.mark.api @pytest.mark.online @@ -21,9 +21,9 @@ def test_create_user_online() -> None: username="jbloggs", fullname="Joe Bloggs", email="jbloggs@simvue.io", - manager=False, - admin=False, - readonly=True, + is_manager=False, + is_admin=False, + is_readonly=True, welcome=False, tenant=_tenant.id ) @@ -44,15 +44,34 @@ def test_create_user_offline() -> None: username="jbloggs", fullname="Joe Bloggs", email="jbloggs@simvue.io", - manager=False, - admin=False, - readonly=True, + is_manager=False, + is_admin=False, + is_readonly=True, welcome=False, tenant=_uuid, offline=True ) _user.commit() - + assert _user.username == "jbloggs" + assert _user.fullname == "Joe Bloggs" + assert _user.email == "jbloggs@simvue.io" + + with _user._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("username") == "jbloggs" + assert _local_data.get("fullname") == "Joe Bloggs" + assert _local_data.get("email") == "jbloggs@simvue.io" + #### fixme + _id_mapping = sender(_user._local_staging_file.parents[1], 1, 10, ["users"]) + time.sleep(1) + _online_user = User(_id_mapping.get(_user.id)) + assert _online_user.username == "jbloggs" + assert _online_user.fullname == "Joe Bloggs" + assert _online_user.email == "jbloggs@simvue.io" + + _online_user.read_only(False) + _online_user.delete() + @pytest.mark.api @pytest.mark.online def test_user_get_properties() -> None: @@ -67,9 +86,9 @@ def test_user_get_properties() -> None: username="jbloggs", fullname="Joe Bloggs", email="jbloggs@simvue.io", - manager=False, - admin=False, - readonly=True, + is_manager=False, + is_admin=False, + is_readonly=True, welcome=False, tenant=_tenant.id ) From e93b373cd8ec2977348dea472fdb72e9aaf1a77f Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 31 Jan 2025 19:00:53 +0000 Subject: [PATCH 140/163] Changing enabled to is_enabled and similar --- simvue/api/objects/storage/s3.py | 20 ++++++++++---------- tests/unit/test_s3_storage.py | 14 ++++++++------ 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/simvue/api/objects/storage/s3.py b/simvue/api/objects/storage/s3.py index 709721ea..9d804b44 100644 --- a/simvue/api/objects/storage/s3.py +++ b/simvue/api/objects/storage/s3.py @@ -40,9 +40,9 @@ def new( access_key_id: str, secret_access_key: pydantic.SecretStr, bucket: str, - tenant_useable: bool, - default: bool, - enabled: bool, + is_tenant_useable: bool, + is_default: bool, + is_enabled: bool, offline: bool = False, **__, ) -> Self: @@ -66,12 +66,12 @@ def new( the bucket associated with this storage system tenant_usable : bool whether this system is usable by the current tenant - enabled : bool + is_enabled : bool whether to enable this system - default : bool - if this storage system should become the new default + is_default : bool + if this storage system should become the new is_default offline : bool, optional - if this instance should be created in offline mode, default False + if this instance should be created in offline mode, is_default False Returns ------- @@ -91,9 +91,9 @@ def new( backend="S3", config=_config, disable_check=disable_check, - tenant_useable=tenant_useable, - default=default, - enabled=enabled, + is_tenant_useable=is_tenant_useable, + is_default=is_default, + is_enabled=is_enabled, _read_only=False, _offline=offline, ) diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index 503bb22e..9455246e 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -15,13 +15,13 @@ def test_create_s3_online() -> None: name=_uuid, endpoint_url="https://not-a-real-url.io", disable_check=True, - tenant_useable=False, - default=False, + is_tenant_useable=False, + is_default=False, region_name="fictionsville", access_key_id="dummy_key", secret_access_key="not_a_key", bucket="dummy_bucket", - enabled=False + is_enabled=False ) _storage.commit() assert _storage.to_dict() @@ -46,9 +46,9 @@ def test_create_s3_offline() -> None: access_key_id="dummy_key", secret_access_key="not_a_key", bucket="dummy_bucket", - default=False, - tenant_useable=False, - enabled=False, + is_default=False, + is_tenant_useable=False, + is_enabled=False, offline=True ) _storage.commit() @@ -58,6 +58,7 @@ def test_create_s3_offline() -> None: assert _local_data.get("config").get("endpoint_url") == "https://not-a-real-url.io/" assert _local_data.get("config").get("region_name") == "fictionsville" assert _local_data.get("config").get("bucket") == "dummy_bucket" + assert _local_data.get("is_enabled") == False assert not _local_data.get("status", None) assert not _local_data.get("user", None) assert not _local_data.get("usage", None) @@ -69,6 +70,7 @@ def test_create_s3_offline() -> None: _online_storage = S3Storage(_online_id) assert _online_storage.name == _uuid + assert _online_storage.is_enabled == False assert _online_storage.config.endpoint_url == "https://not-a-real-url.io/" assert _online_storage.config.region_name == "fictionsville" assert _online_storage.config.bucket == "dummy_bucket" From f46ae336dc1d8dfcd7e4f208b8eec4533a3d0770 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 4 Feb 2025 09:13:37 +0000 Subject: [PATCH 141/163] Review changes --- simvue/api/objects/alert/fetch.py | 4 +++- simvue/api/objects/run.py | 2 +- tests/unit/test_user.py | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index 71faf147..dbf2e1e4 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -34,7 +34,9 @@ def __new__(cls, identifier: str, **kwargs) -> AlertType: and not _alert_pre._staging.get("source", None) ): raise RuntimeError( - "Cannot determine Alert type - this is likely because you are attempting to reconnect to an offline alert which has already been sent to the server. To fix this, use the exact Alert type instead (eg MetricThresholdAlert, MetricRangeAlert etc)." + "Cannot determine Alert type - this is likely because you are attempting to reconnect " + "to an offline alert which has already been sent to the server. To fix this, use the " + "exact Alert type instead (eg MetricThresholdAlert, MetricRangeAlert etc)." ) if _alert_pre.source == "events": return EventsAlert(identifier=identifier, **kwargs) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 72a58462..c732b51d 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -352,7 +352,7 @@ def abort(self, reason: str) -> dict[str, typing.Any]: ) def on_reconnect(self, id_mapping: dict[str, str]): - online_alert_ids = [] + online_alert_ids: list[str] = [] for id in self._staging.get("alerts", []): try: online_alert_ids.append(id_mapping[id]) diff --git a/tests/unit/test_user.py b/tests/unit/test_user.py index 026db46f..97e503c3 100644 --- a/tests/unit/test_user.py +++ b/tests/unit/test_user.py @@ -61,7 +61,7 @@ def test_create_user_offline() -> None: assert _local_data.get("username") == "jbloggs" assert _local_data.get("fullname") == "Joe Bloggs" assert _local_data.get("email") == "jbloggs@simvue.io" - #### fixme + _id_mapping = sender(_user._local_staging_file.parents[1], 1, 10, ["users"]) time.sleep(1) _online_user = User(_id_mapping.get(_user.id)) From 780305f01bbae30d735768805aa8676068942b06 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 4 Feb 2025 11:48:10 +0000 Subject: [PATCH 142/163] Added sender bin script --- simvue/bin/sender.py | 43 +++++++++++++++++++++++++++++++++++++++++-- simvue/sender.py | 4 ++-- 2 files changed, 43 insertions(+), 4 deletions(-) diff --git a/simvue/bin/sender.py b/simvue/bin/sender.py index 6aac6be1..649b9d6f 100644 --- a/simvue/bin/sender.py +++ b/simvue/bin/sender.py @@ -1,5 +1,44 @@ """Send runs to server""" -from simvue.sender import sender +import logging -sender() +from simvue.sender import sender, UPLOAD_ORDER +import argparse + +_logger = logging.getLogger(__name__) +_logger.setLevel(logging.INFO) + + +def run() -> None: + parser = argparse.ArgumentParser(description="My script description") + parser.add_argument( + "-w", + "--max-workers", + type=int, + required=False, + default=5, + help="The maximum number of worker threads to use in parallel, by default 5", + ) + parser.add_argument( + "-n", + "--threading-threshold", + type=int, + required=False, + default=10, + help="The number of objects of a given type above which items will be sent to the server in parallel, by default 10", + ) + parser.add_argument( + "-o", + "--objects-to-upload", + type=str, + nargs="+", + required=False, + default=UPLOAD_ORDER, + help="The object types to upload, by default All", + ) + args = parser.parse_args() + try: + _logger.info("Starting Simvue Sender") + sender(**vars(args)) + except Exception as err: + _logger.critical("Exception running sender: %s", str(err)) diff --git a/simvue/sender.py b/simvue/sender.py index 77123e23..e66774d5 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -15,7 +15,7 @@ import simvue.api.objects -UPLOAD_ORDER: tuple[str, ...] = ( +UPLOAD_ORDER: list[str] = [ "tenants", "users", "storage", @@ -26,7 +26,7 @@ "artifacts", "metrics", "events", -) +] _logger = logging.getLogger(__name__) From 169027971035af98971954ae857a0958e1d91009 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Tue, 4 Feb 2025 11:50:54 +0000 Subject: [PATCH 143/163] Added more docstrings --- simvue/api/objects/run.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 83b18f71..63d0e617 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -93,6 +93,7 @@ def new( @property @staging_check def name(self) -> str: + """Retrieve name associated with this run""" return self._get_attribute("name") def delete(self, **kwargs) -> dict[str, typing.Any]: @@ -105,28 +106,33 @@ def delete(self, **kwargs) -> dict[str, typing.Any]: def name( self, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] ) -> None: + """Set the name for this run.""" self._staging["name"] = name @property @staging_check def tags(self) -> list[str]: + """Retrieve the tags associated with this run.""" return self._get_attribute("tags") @tags.setter @write_only @pydantic.validate_call def tags(self, tags: list[str]) -> None: + """Set the tags for this run.""" self._staging["tags"] = tags @property @staging_check def status(self) -> Status: + """Get the run status.""" return self._get_attribute("status") @status.setter @write_only @pydantic.validate_call def status(self, status: Status) -> None: + """Set the run status.""" self._staging["status"] = status @property @@ -145,6 +151,7 @@ def ttl(self, time_seconds: pydantic.NonNegativeInt | None) -> None: @property @staging_check def folder(self) -> str: + """Get the folder associated with this run.""" return self._get_attribute("folder") @folder.setter @@ -153,43 +160,51 @@ def folder(self) -> str: def folder( self, folder: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)] ) -> None: + """Set the folder for this run.""" self._staging["folder"] = folder @property @staging_check def metadata(self) -> dict[str, typing.Any]: + """Get the metadata for this run.""" return self._get_attribute("metadata") @metadata.setter @write_only @pydantic.validate_call def metadata(self, metadata: dict[str, typing.Any]) -> None: + """Set the metadata for this run.""" self._staging["metadata"] = metadata @property @staging_check def description(self) -> str: + """Get the description for this run.""" return self._get_attribute("description") @description.setter @write_only @pydantic.validate_call def description(self, description: str | None) -> None: + """Set the description for this run.""" self._staging["description"] = description @property def system(self) -> dict[str, typing.Any]: + """Get the system metadata for this run.""" return self._get_attribute("system") @system.setter @write_only @pydantic.validate_call def system(self, system: dict[str, typing.Any]) -> None: + """Set the system metadata for this run.""" self._staging["system"] = system @property @staging_check def heartbeat_timeout(self) -> int | None: + """Get the timeout for the heartbeat of this run.""" return self._get_attribute("heartbeat_timeout") @heartbeat_timeout.setter From 0b706a5b01a0b76f0bfeb6f580967498ae587c4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 5 Feb 2025 13:55:45 +0000 Subject: [PATCH 144/163] Bug fixes and add missing uploaded=True to artifact upload --- simvue/api/objects/artifact.py | 54 ++++++++++++++++++----------- simvue/api/objects/storage/base.py | 2 -- simvue/api/objects/storage/fetch.py | 6 ++++ tests/conftest.py | 19 +++++++--- 4 files changed, 55 insertions(+), 26 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index cfa3ce23..d0c8f8b6 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -248,28 +248,42 @@ def _upload(self, file: io.BytesIO) -> None: super().commit() return - if _url := self._init_data.get("url"): - _name = self._staging["name"] - - _response = sv_post( - url=_url, - headers={}, - is_json=False, - files={"file": file}, - data=self._init_data.get("fields"), - ) + if not (_url := self._init_data.get("url")): + return - self._logger.debug( - "Got status code %d when uploading artifact", - _response.status_code, - ) + _name = self._staging["name"] - get_json_from_response( - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NO_CONTENT], - allow_parse_failure=True, # JSON response from S3 not parsible - scenario=f"uploading artifact '{_name}' to object storage", - response=_response, - ) + _response = sv_post( + url=_url, + headers={}, + is_json=False, + files={"file": file}, + data=self._init_data.get("fields"), + ) + + self._logger.debug( + "Got status code %d when uploading artifact", + _response.status_code, + ) + + get_json_from_response( + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NO_CONTENT], + allow_parse_failure=True, # JSON response from S3 not parsible + scenario=f"uploading artifact '{_name}' to object storage", + response=_response, + ) + + _response = sv_put( + url=f"{self.url}", + data={"uploaded": True}, + headers=self._headers, + ) + + get_json_from_response( + response=_response, + scenario=f"Information server of upload of file for artifact '{self._identifier}'", + expected_status=[http.HTTPStatus.OK], + ) def _get( self, storage: str | None = None, url: str | None = None, **kwargs diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py index ce71c8f0..59d63c86 100644 --- a/simvue/api/objects/storage/base.py +++ b/simvue/api/objects/storage/base.py @@ -9,7 +9,6 @@ import pydantic import datetime -import abc from simvue.api.objects.base import SimvueObject, staging_check, write_only from simvue.models import NAME_REGEX, DATETIME_FORMAT @@ -34,7 +33,6 @@ def __init__( super().__init__(identifier, _read_only=_read_only, **kwargs) @classmethod - @abc.abstractmethod def new(cls, **kwargs): """Create a new instance of a storage type""" pass diff --git a/simvue/api/objects/storage/fetch.py b/simvue/api/objects/storage/fetch.py index e1ea9b01..2fc4aded 100644 --- a/simvue/api/objects/storage/fetch.py +++ b/simvue/api/objects/storage/fetch.py @@ -6,6 +6,7 @@ with an identifier, use a generic storage object. """ +import abc import typing import http import pydantic @@ -31,6 +32,11 @@ def __new__(cls, identifier: str | None = None, **kwargs): raise RuntimeError(f"Unknown backend '{_storage_pre.backend}'") + @classmethod + @abc.abstractmethod + def new(cls, **_) -> typing.Self: + pass + @classmethod @pydantic.validate_call def get( diff --git a/tests/conftest.py b/tests/conftest.py index f7afcdae..b5f2dca4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -57,7 +57,11 @@ def log_messages(caplog): @pytest.fixture def create_test_run(request) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: with sv_run.Run() as run: - yield run, setup_test_run(run, True, request) + _test_run_data = setup_test_run(run, True, request) + yield run, _test_run_data + sv_api_obj.Folder(identifier=run._folder.id).delete(recursive=True, delete_runs=True, runs_only=False) + for alert_id in _test_run_data.get("alert_ids", []): + sv_api_obj.Alert(identifier=alert_id).delete() clear_out_files() @@ -134,21 +138,25 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur if run._dispatcher: run._dispatcher._max_buffer_size = MAX_BUFFER_SIZE + _alert_ids = [] + if create_objects: for i in range(5): run.log_event(f"{TEST_DATA['event_contains']} {i}") TEST_DATA['created_alerts'] = [] + for i in range(5): - run.create_event_alert( + _aid = run.create_event_alert( name=f"test_alert/alert_{i}/{fix_use_id}", frequency=1, pattern=TEST_DATA['event_contains'] ) TEST_DATA['created_alerts'].append(f"test_alert/alert_{i}/{fix_use_id}") + _alert_ids.append(_aid) - run.create_metric_threshold_alert( + _ta_id = run.create_metric_threshold_alert( name=f'test_alert/value_below_1/{fix_use_id}', frequency=1, rule='is below', @@ -156,7 +164,7 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur metric='metric_counter', window=2 ) - run.create_metric_range_alert( + _mr_id = run.create_metric_range_alert( name=f'test_alert/value_within_1/{fix_use_id}', frequency=1, rule = "is inside range", @@ -165,6 +173,7 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur metric='metric_counter', window=2 ) + _alert_ids += [_ta_id, _mr_id] TEST_DATA['created_alerts'] += [ f"test_alert/value_below_1/{fix_use_id}", f"test_alert/value_within_1/{fix_use_id}" @@ -204,6 +213,8 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur run.save_file(test_script, category="code", name="test_code_upload") TEST_DATA["file_3"] = "test_code_upload" + TEST_DATA["alert_ids"] = _alert_ids + return TEST_DATA From 3fb25f61342d40cd7f61f5f2e1da7eec8fbfd9e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 5 Feb 2025 14:26:36 +0000 Subject: [PATCH 145/163] Further fixes towards offline mode --- simvue/api/objects/artifact.py | 4 +-- simvue/api/objects/run.py | 4 --- simvue/api/objects/storage/fetch.py | 7 ++++- tests/functional/test_run_artifact_upload.py | 2 +- tests/functional/test_run_class.py | 27 ++++++++++++++++---- 5 files changed, 31 insertions(+), 13 deletions(-) diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact.py index 0c564357..3bb285c3 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact.py @@ -312,9 +312,9 @@ def original_path(self) -> str: return self._get_attribute("original_path") @property - def storage(self) -> str | None: + def storage_id(self) -> str | None: """Retrieve the storage identifier for this artifact""" - return self._get_attribute("storage") + return self._get_attribute("storage_id") @property def mime_type(self) -> str: diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index f8f2ee3d..9841ded5 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -285,16 +285,12 @@ def endtime(self, endtime: datetime.datetime) -> None: def metrics( self, ) -> typing.Generator[tuple[str, dict[str, int | float | bool]], None, None]: - if self._staged_metrics: - self._logger.warning(f"Uncommitted metrics found for run '{self.id}'") yield from self._get_attribute("metrics").items() @property def events( self, ) -> typing.Generator[tuple[str, dict[str, int | float | bool]], None, None]: - if self._staged_metrics: - self._logger.warning(f"Uncommitted metrics found for run '{self.id}'") yield from self._get_attribute("events").items() @write_only diff --git a/simvue/api/objects/storage/fetch.py b/simvue/api/objects/storage/fetch.py index 2fc4aded..3e287b07 100644 --- a/simvue/api/objects/storage/fetch.py +++ b/simvue/api/objects/storage/fetch.py @@ -18,6 +18,11 @@ from .file import FileStorage from .base import StorageBase +try: + from typing import Self +except ImportError: + from typing_extensions import Self + class Storage: """Generic Simvue storage retrieval class""" @@ -34,7 +39,7 @@ def __new__(cls, identifier: str | None = None, **kwargs): @classmethod @abc.abstractmethod - def new(cls, **_) -> typing.Self: + def new(cls, **_) -> Self: pass @classmethod diff --git a/tests/functional/test_run_artifact_upload.py b/tests/functional/test_run_artifact_upload.py index f19c47c4..afdb840e 100644 --- a/tests/functional/test_run_artifact_upload.py +++ b/tests/functional/test_run_artifact_upload.py @@ -26,7 +26,7 @@ def test_add_artifact_to_run() -> None: _artifact = Artifact.new_file( name=f"test_{_uuid}", - storage_id=None, + storage=None, file_path=pathlib.Path(tempf.name), mime_type=None, metadata=None diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index e9204c35..7d749782 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -543,15 +543,24 @@ def test_save_file_online( @pytest.mark.run @pytest.mark.offline @pytest.mark.parametrize( - "preserve_path", (True, False), ids=("preserve_path", "modified_path") + "preserve_path,name,allow_pickle,empty_file,category", + [ + (False, None, False, False, "input"), + (True, None, False, False, "output"), + (False, "test_file", False, False, "code"), + (False, None, True, False, "input"), + (False, None, False, True, "code") + ], + ids=[f"scenario_{i}" for i in range(1, 6)] ) -@pytest.mark.parametrize("name", ("retrieved_test_file", None), ids=("named", "nameless")) -@pytest.mark.parametrize("category", ("input", "output", "code")) def test_save_file_offline( - create_plain_run_offline: tuple[sv_run.Run, dict], + create_plain_run_offline: typing.Tuple[sv_run.Run, dict], preserve_path: bool, name: str | None, - category: typing.Literal["input", "output", "code"] + allow_pickle: bool, + empty_file: bool, + category: typing.Literal["input", "output", "code"], + capfd, ) -> None: simvue_run, _ = create_plain_run_offline run_name = simvue_run._name @@ -563,6 +572,14 @@ def test_save_file_offline( ) as out_f: out_f.write("test data entry") + simvue_run.save_file( + out_name, + category=category, + file_type=file_type, + preserve_path=preserve_path, + name=name, + ) + simvue_run.save_file( out_name, category=category, From f0f18b01bc711dba61eabe762beb040aed61cde8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 5 Feb 2025 14:36:55 +0000 Subject: [PATCH 146/163] Added missing events unit test --- tests/unit/test_events.py | 32 ++++++++++++++++++++++++++++++++ tests/unit/test_s3_storage.py | 10 +++++++++- 2 files changed, 41 insertions(+), 1 deletion(-) create mode 100644 tests/unit/test_events.py diff --git a/tests/unit/test_events.py b/tests/unit/test_events.py new file mode 100644 index 00000000..205643e6 --- /dev/null +++ b/tests/unit/test_events.py @@ -0,0 +1,32 @@ +import contextlib +import json +import pytest +import time +import datetime +import uuid + +from simvue.api.objects import Events, Folder, Run +from simvue.models import DATETIME_FORMAT + +@pytest.mark.api +@pytest.mark.online +def test_events_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(folder=_folder_name) + _folder.commit() + _run.commit() + _timestamp = datetime.datetime.now().strftime(DATETIME_FORMAT) + _events = Events.new( + run=_run.id, + events=[ + {"message": "This is a test!", "timestamp": _timestamp} + ], + ) + assert _events.to_dict() + _events.commit() + assert _events.get(run_id=_run.id) + _run.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) + diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index 9455246e..8c80a75f 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -11,6 +11,7 @@ @pytest.mark.online def test_create_s3_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _failed = [] _storage = S3Storage.new( name=_uuid, endpoint_url="https://not-a-real-url.io", @@ -24,6 +25,11 @@ def test_create_s3_online() -> None: is_enabled=False ) _storage.commit() + for member in _storage._properties: + try: + getattr(_storage, member) + except Exception as e: + _failed.append((member, f"{e}")) assert _storage.to_dict() assert _storage.name == _uuid assert _storage.config.endpoint_url == "https://not-a-real-url.io/" @@ -32,6 +38,8 @@ def test_create_s3_online() -> None: assert _storage.created assert dict(Storage.get()) _storage.delete() + if _failed: + raise AssertionError("\n\t-" + "\n\t- ".join(": ".join(i) for i in _failed)) @pytest.mark.api @@ -82,4 +90,4 @@ def test_create_s3_offline() -> None: - \ No newline at end of file + From eac2d2da91115aceb4d4443bf0ac83b3886d70e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Thu, 6 Feb 2025 07:30:05 +0000 Subject: [PATCH 147/163] Separate artifacts into subobjects --- simvue/api/objects/__init__.py | 6 +- simvue/api/objects/artifact/__init__.py | 5 + .../objects/{artifact.py => artifact/base.py} | 202 +----------------- simvue/api/objects/artifact/fetch.py | 112 ++++++++++ simvue/api/objects/artifact/file.py | 82 +++++++ simvue/api/objects/artifact/object.py | 86 ++++++++ simvue/api/objects/storage/fetch.py | 6 - simvue/run.py | 7 +- simvue/sender.py | 9 +- ...test_artifact.py => test_file_artifact.py} | 30 +-- 10 files changed, 314 insertions(+), 231 deletions(-) create mode 100644 simvue/api/objects/artifact/__init__.py rename simvue/api/objects/{artifact.py => artifact/base.py} (52%) create mode 100644 simvue/api/objects/artifact/fetch.py create mode 100644 simvue/api/objects/artifact/file.py create mode 100644 simvue/api/objects/artifact/object.py rename tests/unit/{test_artifact.py => test_file_artifact.py} (72%) diff --git a/simvue/api/objects/__init__.py b/simvue/api/objects/__init__.py index 49b3f1ed..36950de4 100644 --- a/simvue/api/objects/__init__.py +++ b/simvue/api/objects/__init__.py @@ -21,9 +21,13 @@ FileStorage as FileStorage, Storage as Storage, ) +from .artifact import ( + FileArtifact as FileArtifact, + ObjectArtifact as ObjectArtifact, + Artifact as Artifact, +) from .stats import Stats as Stats -from .artifact import Artifact as Artifact from .run import Run as Run from .tag import Tag as Tag from .folder import Folder as Folder, get_folder_from_path as get_folder_from_path diff --git a/simvue/api/objects/artifact/__init__.py b/simvue/api/objects/artifact/__init__.py new file mode 100644 index 00000000..168fb3a8 --- /dev/null +++ b/simvue/api/objects/artifact/__init__.py @@ -0,0 +1,5 @@ +from .fetch import Artifact as Artifact +from .file import FileArtifact as FileArtifact +from .object import ObjectArtifact as ObjectArtifact + +__all__ = ["Artifact", "FileArtifact", "ObjectArtifact"] diff --git a/simvue/api/objects/artifact.py b/simvue/api/objects/artifact/base.py similarity index 52% rename from simvue/api/objects/artifact.py rename to simvue/api/objects/artifact/base.py index 3bb285c3..7a1a8224 100644 --- a/simvue/api/objects/artifact.py +++ b/simvue/api/objects/artifact/base.py @@ -9,24 +9,19 @@ import datetime import http import io -import pathlib import typing import pydantic -import os.path -import sys try: from typing import Self except ImportError: - from typing_extensions import Self + pass from simvue.api.url import URL from simvue.exception import ObjectNotFoundError -from simvue.models import NAME_REGEX, DATETIME_FORMAT -from simvue.utilities import get_mimetype_for_file, get_mimetypes, calculate_sha256 +from simvue.models import DATETIME_FORMAT from simvue.api.objects.base import SimvueObject from simvue.api.objects.run import Run -from simvue.serialization import serialize_object from simvue.api.request import ( put as sv_put, get_json_from_response, @@ -41,12 +36,14 @@ DOWNLOAD_CHUNK_SIZE: int = 8192 -class Artifact(SimvueObject): +class ArtifactBase(SimvueObject): """Connect to/create an artifact locally or on the server""" def __init__( self, identifier: str | None = None, _read_only: bool = True, **kwargs ) -> None: + self._label = "artifact" + self._endpoint = f"{self._label}s" super().__init__(identifier=identifier, _read_only=_read_only, **kwargs) # If the artifact is an online instance, need a place to store the response @@ -55,161 +52,6 @@ def __init__( if not self._staging.get("runs", None): self._staging |= {"runs": {}} - @classmethod - def new( - cls, - *, - name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - checksum: str, - size: int, - storage: str | None = None, - mime_type: str | None = None, - original_path: pathlib.Path | None = None, - metadata: dict[str, typing.Any] | None, - offline: bool = False, - **kwargs, - ) -> Self: - _artifact = Artifact( - name=name, - checksum=checksum, - size=size, - original_path=f"{original_path or ''}", - storage=storage, - mime_type=mime_type, - metadata=metadata, - _read_only=False, - **kwargs, - _offline=offline, - ) - - if offline: - return _artifact - - # Firstly submit a request for a new artifact, remove the run IDs - # as these are not an argument for artifact creation - _post_args = _artifact._staging.copy() - _post_args.pop("runs", None) - _artifact._init_data = _artifact._post(**_post_args) - - return _artifact - - @classmethod - @pydantic.validate_call - def new_file( - cls, - *, - name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - storage: str | None, - file_path: pydantic.FilePath, - mime_type: str | None, - metadata: dict[str, typing.Any] | None, - offline: bool = False, - ) -> Self: - """Create a new artifact either locally or on the server - - Note all arguments are keyword arguments - - Parameters - ---------- - name : str - the name for this artifact - storage : str | None - the identifier for the storage location for this object - category : "code" | "input" | "output" - the category of this artifact - file_path : pathlib.Path | str - path to the file this artifact represents - mime_type : str | None - the mime type for this file, else this is determined - metadata : dict[str, Any] | None - supply metadata information for this artifact - offline : bool, optional - whether to define this artifact locally, default is False - - """ - _mime_type = mime_type or get_mimetype_for_file(file_path) - - if _mime_type not in get_mimetypes(): - raise ValueError(f"Invalid MIME type '{mime_type}' specified") - - _file_size = file_path.stat().st_size - _file_orig_path = file_path.expanduser().absolute() - _file_checksum = calculate_sha256(f"{file_path}", is_file=True) - - _artifact = Artifact.new( - name=name, - storage=storage, - original_path=os.path.expandvars(_file_orig_path), - size=_file_size, - mime_type=_mime_type, - checksum=_file_checksum, - offline=offline, - metadata=metadata, - ) - - with open(file_path, "rb") as out_f: - _artifact._upload(file=out_f) - - return _artifact - - @classmethod - @pydantic.validate_call - def new_object( - cls, - *, - name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - storage: str | None, - obj: typing.Any, - metadata: dict[str, typing.Any] | None, - allow_pickling: bool = True, - offline: bool = False, - ) -> Self: - """Create a new artifact either locally or on the server - - Note all arguments are keyword arguments - - Parameters - ---------- - name : str - the name for this artifact - storage : str | None - the identifier for the storage location for this object - obj : Any - object to serialize and upload - metadata : dict[str, Any] | None - supply metadata information for this artifact - allow_pickling : bool, optional - whether to allow the object to be pickled if no other - serialization found. Default is True - offline : bool, optional - whether to define this artifact locally, default is False - - """ - _serialization = serialize_object(obj, allow_pickling) - - if not _serialization or not (_serialized := _serialization[0]): - raise ValueError(f"Could not serialize object of type '{type(obj)}'") - - if not (_data_type := _serialization[1]) and not allow_pickling: - raise ValueError( - f"Could not serialize object of type '{type(obj)}' without pickling" - ) - - _checksum = calculate_sha256(_serialized, is_file=False) - - _artifact = Artifact.new( - name=name, - storage=storage, - original_path=None, - size=sys.getsizeof(_serialized), - mime_type=_data_type, - checksum=_checksum, - metadata=metadata, - ) - - _artifact._upload(file=io.BytesIO(_serialized)) - return _artifact - def commit(self) -> None: self._logger.info("Cannot call method 'commit' on write-once type 'Artifact'") @@ -339,40 +181,6 @@ def created(self) -> datetime.datetime | None: datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None ) - @classmethod - def from_name( - cls, run_id: str, name: str, **kwargs - ) -> typing.Union["Artifact", None]: - _temp = Artifact(**kwargs) - _url = URL(_temp._user_config.server.url) / f"runs/{run_id}/artifacts" - _response = sv_get(url=f"{_url}", params={"name": name}, headers=_temp._headers) - _json_response = get_json_from_response( - expected_type=list, - response=_response, - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], - scenario=f"Retrieval of artifact '{name}' for run '{run_id}'", - ) - - if _response.status_code == http.HTTPStatus.NOT_FOUND or not _json_response: - raise ObjectNotFoundError(_temp._label, name, extra=f"for run '{run_id}'") - - if (_n_res := len(_json_response)) > 1: - raise RuntimeError( - f"Expected single result for artifact '{name}' for run '{run_id}'" - f" but got {_n_res}" - ) - - _first_result: dict[str, typing.Any] = _json_response[0] - _artifact_id: str = _first_result.pop("id") - - return Artifact( - identifier=_artifact_id, - run=run_id, - **_first_result, - _read_only=True, - _local=True, - ) - @property def download_url(self) -> URL | None: """Retrieve the URL for downloading this artifact""" diff --git a/simvue/api/objects/artifact/fetch.py b/simvue/api/objects/artifact/fetch.py new file mode 100644 index 00000000..4d0a0d0f --- /dev/null +++ b/simvue/api/objects/artifact/fetch.py @@ -0,0 +1,112 @@ +from simvue.api.objects.artifact.base import ArtifactBase +from .file import FileArtifact +from simvue.api.objects.artifact.object import ObjectArtifact +from simvue.api.request import get_json_from_response, get as sv_get +from simvue.api.url import URL +from simvue.exception import ObjectNotFoundError + +import http +import typing +import pydantic + +__all__ = ["Artifact"] + + +class Artifact: + """Generic Simvue artifact retrieval class""" + + def __new__(cls, identifier: str | None = None, **kwargs): + """Retrieve an object representing an Artifact by id""" + _storage_pre = ArtifactBase(identifier=identifier, **kwargs) + if _storage_pre.original_path: + return FileArtifact(identifier=identifier, **kwargs) + else: + return ObjectArtifact(identifier=identifier, **kwargs) + + @classmethod + def from_name( + cls, run_id: str, name: str, **kwargs + ) -> typing.Union[FileArtifact | ObjectArtifact, None]: + _temp = ArtifactBase(**kwargs) + _url = URL(_temp._user_config.server.url) / f"runs/{run_id}/artifacts" + _response = sv_get(url=f"{_url}", params={"name": name}, headers=_temp._headers) + _json_response = get_json_from_response( + expected_type=list, + response=_response, + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], + scenario=f"Retrieval of artifact '{name}' for run '{run_id}'", + ) + + if _response.status_code == http.HTTPStatus.NOT_FOUND or not _json_response: + raise ObjectNotFoundError(_temp._label, name, extra=f"for run '{run_id}'") + + if (_n_res := len(_json_response)) > 1: + raise RuntimeError( + f"Expected single result for artifact '{name}' for run '{run_id}'" + f" but got {_n_res}" + ) + + _first_result: dict[str, typing.Any] = _json_response[0] + _artifact_id: str = _first_result.pop("id") + + return Artifact.__new__( + identifier=_artifact_id, + run=run_id, + **_first_result, + _read_only=True, + _local=True, + ) + + @classmethod + @pydantic.validate_call + def get( + cls, count: int | None = None, offset: int | None = None, **kwargs + ) -> typing.Generator[tuple[str, FileArtifact | ObjectArtifact], None, None]: + """Returns artifacts associated with the current user. + + Parameters + ---------- + count : int, optional + limit the number of results, default of None returns all. + offset : int, optional + start index for returned results, default of None starts at 0. + + Yields + ------ + tuple[str, FileArtifact | ObjectArtifact] + identifier for artifact + the artifact itself as a class instance + """ + + _class_instance = ArtifactBase(_local=True, _read_only=True, **kwargs) + _url = f"{_class_instance._base_url}" + _response = sv_get( + _url, + headers=_class_instance._headers, + params={"start": offset, "count": count}, + ) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", + expected_type=list, + ) + + _out_dict: dict[str, FileArtifact | ObjectArtifact] = {} + + for _entry in _json_response: + _id = _entry.pop("id") + if _entry["original_path"]: + yield ( + _id, + FileArtifact( + _local=True, _read_only=True, identifier=_id, **_entry + ), + ) + else: + yield ( + _id, + ObjectArtifact( + _local=True, _read_only=True, identifier=_id, **_entry + ), + ) diff --git a/simvue/api/objects/artifact/file.py b/simvue/api/objects/artifact/file.py new file mode 100644 index 00000000..784cd6b0 --- /dev/null +++ b/simvue/api/objects/artifact/file.py @@ -0,0 +1,82 @@ +from .base import ArtifactBase + +import typing +import pydantic +import os + +from simvue.models import NAME_REGEX +from simvue.utilities import get_mimetype_for_file, get_mimetypes, calculate_sha256 + +try: + from typing import Self +except ImportError: + from typing_extensions import Self + + +class FileArtifact(ArtifactBase): + @classmethod + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + storage: str | None, + file_path: pydantic.FilePath, + mime_type: str | None, + metadata: dict[str, typing.Any] | None, + offline: bool = False, + ) -> Self: + """Create a new artifact either locally or on the server + + Note all arguments are keyword arguments + + Parameters + ---------- + name : str + the name for this artifact + storage : str | None + the identifier for the storage location for this object + category : "code" | "input" | "output" + the category of this artifact + file_path : pathlib.Path | str + path to the file this artifact represents + mime_type : str | None + the mime type for this file, else this is determined + metadata : dict[str, Any] | None + supply metadata information for this artifact + offline : bool, optional + whether to define this artifact locally, default is False + + """ + _mime_type = mime_type or get_mimetype_for_file(file_path) + + if _mime_type not in get_mimetypes(): + raise ValueError(f"Invalid MIME type '{mime_type}' specified") + + _file_size = file_path.stat().st_size + _file_orig_path = file_path.expanduser().absolute() + _file_checksum = calculate_sha256(f"{file_path}", is_file=True) + + _artifact = FileArtifact( + name=name, + storage=storage, + original_path=os.path.expandvars(_file_orig_path), + size=_file_size, + mime_type=_mime_type, + checksum=_file_checksum, + offline=offline, + metadata=metadata, + ) + + if offline: + return _artifact + + # Firstly submit a request for a new artifact, remove the run IDs + # as these are not an argument for artifact creation + _post_args = _artifact._staging.copy() + _post_args.pop("runs", None) + _artifact._init_data = _artifact._post(**_post_args) + + with open(file_path, "rb") as out_f: + _artifact._upload(file=out_f) + + return _artifact diff --git a/simvue/api/objects/artifact/object.py b/simvue/api/objects/artifact/object.py new file mode 100644 index 00000000..6829f1c4 --- /dev/null +++ b/simvue/api/objects/artifact/object.py @@ -0,0 +1,86 @@ +from .base import ArtifactBase +from simvue.models import NAME_REGEX +from simvue.serialization import serialize_object +from simvue.utilities import calculate_sha256 + +import pydantic +import typing +import sys +import io + +try: + from typing import Self +except ImportError: + from typing_extensions import Self + + +class ObjectArtifact(ArtifactBase): + def __init__( + self, identifier: str | None = None, _read_only: bool = True, **kwargs + ) -> None: + super().__init__(identifier, _read_only, original_path=None, **kwargs) + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + storage: str | None, + obj: typing.Any, + metadata: dict[str, typing.Any] | None, + allow_pickling: bool = True, + offline: bool = False, + ) -> Self: + """Create a new artifact either locally or on the server + + Note all arguments are keyword arguments + + Parameters + ---------- + name : str + the name for this artifact + storage : str | None + the identifier for the storage location for this object + obj : Any + object to serialize and upload + metadata : dict[str, Any] | None + supply metadata information for this artifact + allow_pickling : bool, optional + whether to allow the object to be pickled if no other + serialization found. Default is True + offline : bool, optional + whether to define this artifact locally, default is False + + """ + _serialization = serialize_object(obj, allow_pickling) + + if not _serialization or not (_serialized := _serialization[0]): + raise ValueError(f"Could not serialize object of type '{type(obj)}'") + + if not (_data_type := _serialization[1]) and not allow_pickling: + raise ValueError( + f"Could not serialize object of type '{type(obj)}' without pickling" + ) + + _checksum = calculate_sha256(_serialized, is_file=False) + + _artifact = ObjectArtifact( + name=name, + storage=storage, + size=sys.getsizeof(_serialized), + mime_type=_data_type, + checksum=_checksum, + metadata=metadata, + ) + if offline: + return _artifact + + # Firstly submit a request for a new artifact, remove the run IDs + # as these are not an argument for artifact creation + _post_args = _artifact._staging.copy() + _post_args.pop("runs", None) + _artifact._init_data = _artifact._post(**_post_args) + + _artifact._upload(file=io.BytesIO(_serialized)) + return _artifact diff --git a/simvue/api/objects/storage/fetch.py b/simvue/api/objects/storage/fetch.py index 3e287b07..fcf286b4 100644 --- a/simvue/api/objects/storage/fetch.py +++ b/simvue/api/objects/storage/fetch.py @@ -6,7 +6,6 @@ with an identifier, use a generic storage object. """ -import abc import typing import http import pydantic @@ -37,11 +36,6 @@ def __new__(cls, identifier: str | None = None, **kwargs): raise RuntimeError(f"Unknown backend '{_storage_pre.backend}'") - @classmethod - @abc.abstractmethod - def new(cls, **_) -> Self: - pass - @classmethod @pydantic.validate_call def get( diff --git a/simvue/run.py b/simvue/run.py index 7ce4eefa..d85b7f7b 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -52,7 +52,8 @@ ) from .api.objects import ( Run as RunObject, - Artifact, + FileArtifact, + ObjectArtifact, MetricsThresholdAlert, MetricsRangeAlert, UserAlert, @@ -1279,7 +1280,7 @@ def save_object( _name: str = name or f"{obj.__class__.__name__.lower()}_{id(obj)}" try: - _artifact = Artifact.new_object( + _artifact = ObjectArtifact.new( name=_name, obj=obj, allow_pickling=allow_pickle, @@ -1346,7 +1347,7 @@ def save_file( try: # Register file - _artifact = Artifact.new_file( + _artifact = FileArtifact.new( name=name or stored_file_name, storage=self._storage_id, file_path=file_path, diff --git a/simvue/sender.py b/simvue/sender.py index e66774d5..5605f3f6 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -10,7 +10,6 @@ import logging from concurrent.futures import ThreadPoolExecutor import threading -from simvue.api.objects.base import SimvueObject from simvue.config.user import SimvueConfiguration import simvue.api.objects @@ -57,11 +56,11 @@ def upload_cached_file( _data = json.load(file_path.open()) _exact_type: str = _data.pop("obj_type") try: - _instance_class: SimvueObject = getattr(simvue.api.objects, _exact_type) + _instance_class = getattr(simvue.api.objects, _exact_type) except AttributeError as e: raise RuntimeError(f"Attempt to initialise unknown type '{_exact_type}'") from e # We want to reconnect if there is an online ID stored for this file - if _online_id := id_mapping.get(_current_id, None): + if _online_id := id_mapping.get(_current_id): obj_for_upload = _instance_class( identifier=_online_id, _read_only=False, **_data ) @@ -82,7 +81,7 @@ def upload_cached_file( raise RuntimeError( f"Object of type '{obj_for_upload.__class__.__name__}' has no identifier" ) - if id_mapping.get(_current_id, None): + if id_mapping.get(_current_id): _logger.info(f"Updated {obj_for_upload.__class__.__name__} '{_new_id}'") else: _logger.info(f"Created {obj_for_upload.__class__.__name__} '{_new_id}'") @@ -91,7 +90,7 @@ def upload_cached_file( with lock: id_mapping[_current_id] = _new_id - if obj_type in ["alerts", "runs", "folders", "tags"]: + if obj_type in {"alerts", "runs", "folders", "tags"}: cache_dir.joinpath("server_ids", f"{_current_id}.txt").write_text(_new_id) if ( diff --git a/tests/unit/test_artifact.py b/tests/unit/test_file_artifact.py similarity index 72% rename from tests/unit/test_artifact.py rename to tests/unit/test_file_artifact.py index d34dbc89..1b45a987 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_file_artifact.py @@ -6,14 +6,14 @@ import tempfile import numpy -from simvue.api.objects import Artifact, Run +from simvue.api.objects import FileArtifact, Run from simvue.api.objects.folder import Folder from simvue.sender import sender from simvue.client import Client @pytest.mark.api @pytest.mark.online -def test_artifact_creation_online() -> None: +def test_file_artifact_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" _folder = Folder.new(path=_folder_name) @@ -27,8 +27,8 @@ def test_artifact_creation_online() -> None: _path = pathlib.Path(temp_f.name) with _path.open("w") as out_f: out_f.write(f"Hello World! {_uuid}") - _artifact = Artifact.new_file( - name=f"test_artifact_{_uuid}", + _artifact = FileArtifact.new( + name=f"test_file_artifact_{_uuid}", file_path=_path, storage=None, mime_type=None, @@ -41,18 +41,10 @@ def test_artifact_creation_online() -> None: getattr(_artifact, member) except Exception as e: _failed.append((member, f"{e}")) - assert _artifact.name == f"test_artifact_{_uuid}" + assert _artifact.name == f"test_file_artifact_{_uuid}" _content = b"".join(_artifact.download_content()).decode("UTF-8") assert _content == f"Hello World! {_uuid}" assert _artifact.to_dict() - _test_array = numpy.array(range(10)) - _artifact = Artifact.new_object( - name=f"test_artifact_obj_{_uuid}", - storage=None, - obj=_test_array, - metadata=None - ) - _artifact.attach_to_run(_run.id, "output") _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) if _failed: @@ -61,11 +53,11 @@ def test_artifact_creation_online() -> None: @pytest.mark.api @pytest.mark.offline -def test_artifact_creation_offline(offline_test: pathlib.Path) -> None: +def test_file_artifact_creation_offline(offline_test: pathlib.Path) -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" _folder = Folder.new(path=_folder_name, offline=True) - _run = Run.new(name=f"test_artifact_creation_offline_{_uuid}",folder=_folder_name, offline=True) + _run = Run.new(name=f"test_file_artifact_creation_offline_{_uuid}",folder=_folder_name, offline=True) _path = offline_test.joinpath("hello_world.txt") @@ -74,8 +66,8 @@ def test_artifact_creation_offline(offline_test: pathlib.Path) -> None: _folder.commit() _run.commit() - _artifact = Artifact.new_file( - name=f"test_artifact_{_uuid}", + _artifact = FileArtifact.new( + name=f"test_file_artifact_{_uuid}", file_path=_path, storage=None, mime_type=None, @@ -83,11 +75,11 @@ def test_artifact_creation_offline(offline_test: pathlib.Path) -> None: metadata=None ) _artifact.attach_to_run(_run._identifier, category="input") - assert _artifact.name == f"test_artifact_{_uuid}" + assert _artifact.name == f"test_file_artifact_{_uuid}" sender(offline_test.joinpath(".simvue"), 1, 10) time.sleep(1) client = Client() - _run_id = client.get_run_id_from_name(f"test_artifact_creation_offline_{_uuid}") + _run_id = client.get_run_id_from_name(f"test_file_artifact_creation_offline_{_uuid}") client.get_artifact_as_file(_run_id, _artifact.name, offline_test.joinpath("downloaded").mkdir()) assert offline_test.joinpath("downloaded.txt").read_text() == "Hello World!" _run.delete() From 5edecd55b986c6c88bb656bf6b85a21a7ea64d8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 7 Feb 2025 09:10:09 +0000 Subject: [PATCH 148/163] Fix online artifact refactor --- simvue/api/objects/alert/fetch.py | 8 +-- simvue/api/objects/artifact/base.py | 30 ++++++----- simvue/api/objects/artifact/fetch.py | 10 ++-- simvue/api/objects/base.py | 7 +++ simvue/api/objects/storage/fetch.py | 4 +- tests/unit/test_file_artifact.py | 1 - tests/unit/test_object_artifact.py | 81 ++++++++++++++++++++++++++++ 7 files changed, 119 insertions(+), 22 deletions(-) create mode 100644 tests/unit/test_object_artifact.py diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index dbf2e1e4..9459abd9 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -87,16 +87,16 @@ def get( params={"start": offset, "count": count}, ) + _label: str = _class_instance.__class__.__name__.lower() + _label = _label.replace("base", "") _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], - scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", + scenario=f"Retrieval of {_label}s", ) if (_data := _json_response.get("data")) is None: - raise RuntimeError( - f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" - ) + raise RuntimeError(f"Expected key 'data' for retrieval of {_label}s") _out_dict: dict[str, AlertType] = {} diff --git a/simvue/api/objects/artifact/base.py b/simvue/api/objects/artifact/base.py index 7a1a8224..6b5892a7 100644 --- a/simvue/api/objects/artifact/base.py +++ b/simvue/api/objects/artifact/base.py @@ -15,12 +15,12 @@ try: from typing import Self except ImportError: - pass + from typing_extensions import Self from simvue.api.url import URL from simvue.exception import ObjectNotFoundError from simvue.models import DATETIME_FORMAT -from simvue.api.objects.base import SimvueObject +from simvue.api.objects.base import SimvueObject, staging_check, write_only from simvue.api.objects.run import Run from simvue.api.request import ( put as sv_put, @@ -117,17 +117,8 @@ def _upload(self, file: io.BytesIO) -> None: response=_response, ) - _response = sv_put( - url=f"{self.url}", - data={"uploaded": True}, - headers=self._headers, - ) - - get_json_from_response( - response=_response, - scenario=f"Information server of upload of file for artifact '{self._identifier}'", - expected_status=[http.HTTPStatus.OK], - ) + self.uploaded = True + self.commit() def _get( self, storage: str | None = None, url: str | None = None, **kwargs @@ -181,6 +172,19 @@ def created(self) -> datetime.datetime | None: datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None ) + @property + @staging_check + def uploaded(self) -> bool: + """Returns whether a file was uploaded for this artifact.""" + return self._get_attribute("uploaded") + + @uploaded.setter + @write_only + @pydantic.validate_call + def uploaded(self, is_uploaded: bool) -> None: + """Set if a file was successfully uploaded for this artifact.""" + self._staging["uploaded"] = is_uploaded + @property def download_url(self) -> URL | None: """Retrieve the URL for downloading this artifact""" diff --git a/simvue/api/objects/artifact/fetch.py b/simvue/api/objects/artifact/fetch.py index 4d0a0d0f..f36c29f5 100644 --- a/simvue/api/objects/artifact/fetch.py +++ b/simvue/api/objects/artifact/fetch.py @@ -85,16 +85,20 @@ def get( headers=_class_instance._headers, params={"start": offset, "count": count}, ) + _label: str = _class_instance.__class__.__name__.lower() + _label = _label.replace("base", "") _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], - scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", - expected_type=list, + scenario=f"Retrieval of {_label}s", ) + if (_data := _json_response.get("data")) is None: + raise RuntimeError(f"Expected key 'data' for retrieval of {_label}s") + _out_dict: dict[str, FileArtifact | ObjectArtifact] = {} - for _entry in _json_response: + for _entry in _data: _id = _entry.pop("id") if _entry["original_path"]: yield ( diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 6fa5190c..7f43904f 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -234,6 +234,13 @@ def _get_attribute( try: return self._staging[attribute] except KeyError as e: + # If the key is not in staging, but the object is not in offline mode + # retrieve from the server and update cache instead + if not _offline_state and ( + _attribute := self._get(url=url).get(attribute) + ): + self._staging[attribute] = _attribute + return _attribute raise AttributeError( f"Could not retrieve attribute '{attribute}' " f"for {self._label} '{self._identifier}' from cached data" diff --git a/simvue/api/objects/storage/fetch.py b/simvue/api/objects/storage/fetch.py index fcf286b4..0e697959 100644 --- a/simvue/api/objects/storage/fetch.py +++ b/simvue/api/objects/storage/fetch.py @@ -67,10 +67,12 @@ def get( headers=_class_instance._headers, params={"start": offset, "count": count}, ) + _label: str = _class_instance.__class__.__name__.lower() + _label = _label.replace("base", "") _json_response = get_json_from_response( response=_response, expected_status=[http.HTTPStatus.OK], - scenario=f"Retrieval of {_class_instance.__class__.__name__.lower()}s", + scenario=f"Retrieval of {_label}s", expected_type=list, ) diff --git a/tests/unit/test_file_artifact.py b/tests/unit/test_file_artifact.py index 1b45a987..126ee04d 100644 --- a/tests/unit/test_file_artifact.py +++ b/tests/unit/test_file_artifact.py @@ -4,7 +4,6 @@ import time import pathlib import tempfile -import numpy from simvue.api.objects import FileArtifact, Run from simvue.api.objects.folder import Folder diff --git a/tests/unit/test_object_artifact.py b/tests/unit/test_object_artifact.py new file mode 100644 index 00000000..7d85247d --- /dev/null +++ b/tests/unit/test_object_artifact.py @@ -0,0 +1,81 @@ +import os +import pytest +import uuid +import time +import pathlib +import numpy + +from simvue.api.objects import ObjectArtifact, Run +from simvue.api.objects.folder import Folder +from simvue.sender import sender +from simvue.client import Client + +@pytest.mark.api +@pytest.mark.online +def test_object_artifact_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(folder=_folder_name) + _folder.commit() + _run.commit() + + _failed = [] + + with tempfile.NamedTemporaryFile(suffix=".txt") as temp_f: + _path = pathlib.Path(temp_f.name) + _array = numpy.array(range(10)) + _artifact = ObjectArtifact.new( + name=f"test_object_artifact_{_uuid}", + obj=_array, + storage=None, + metadata=None + ) + _artifact.attach_to_run(_run.id, "input") + time.sleep(1) + for member in _artifact._properties: + try: + getattr(_artifact, member) + except Exception as e: + _failed.append((member, f"{e}")) + assert _artifact.name == f"test_object_artifact_{_uuid}" + assert client.get_artifact(_run_id, _artifact.name) is not None + assert _artifact.to_dict() + _run.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) + if _failed: + raise AssertionError("\n\t-" + "\n\t- ".join(": ".join(i) for i in _failed)) + + +@pytest.mark.api +@pytest.mark.offline +def test_object_artifact_creation_offline(offline_test: pathlib.Path) -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name, offline=True) + _run = Run.new(name=f"test_object_artifact_creation_offline_{_uuid}",folder=_folder_name, offline=True) + + _path = offline_test.joinpath("hello_world.txt") + + with _path.open("w") as out_f: + out_f.write("Hello World!") + + _folder.commit() + _run.commit() + _array = numpy.array(range(10)) + _artifact = ObjectArtifact.new( + name=f"test_object_artifact_{_uuid}", + obj=_array, + storage=None, + metadata=None + ) + _artifact.attach_to_run(_run._identifier, category="input") + assert _artifact.name == f"test_object_artifact_{_uuid}" + sender(offline_test.joinpath(".simvue"), 1, 10) + time.sleep(1) + client = Client() + _run_id = client.get_run_id_from_name(f"test_object_artifact_creation_offline_{_uuid}") + assert client.get_artifact(_run_id, _artifact.name) is not None + _run.delete() + _folder.delete() + From f2310403471172d289f2f995e737e816c68d6598 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 7 Feb 2025 09:14:49 +0000 Subject: [PATCH 149/163] Fix method ordering in new artifact --- simvue/api/objects/artifact/file.py | 6 +++--- simvue/api/objects/artifact/object.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/simvue/api/objects/artifact/file.py b/simvue/api/objects/artifact/file.py index 784cd6b0..193dc79d 100644 --- a/simvue/api/objects/artifact/file.py +++ b/simvue/api/objects/artifact/file.py @@ -67,15 +67,15 @@ def new( metadata=metadata, ) - if offline: - return _artifact - # Firstly submit a request for a new artifact, remove the run IDs # as these are not an argument for artifact creation _post_args = _artifact._staging.copy() _post_args.pop("runs", None) _artifact._init_data = _artifact._post(**_post_args) + if offline: + return _artifact + with open(file_path, "rb") as out_f: _artifact._upload(file=out_f) diff --git a/simvue/api/objects/artifact/object.py b/simvue/api/objects/artifact/object.py index 6829f1c4..b587e7dc 100644 --- a/simvue/api/objects/artifact/object.py +++ b/simvue/api/objects/artifact/object.py @@ -73,14 +73,14 @@ def new( checksum=_checksum, metadata=metadata, ) - if offline: - return _artifact - # Firstly submit a request for a new artifact, remove the run IDs # as these are not an argument for artifact creation _post_args = _artifact._staging.copy() _post_args.pop("runs", None) _artifact._init_data = _artifact._post(**_post_args) + if offline: + return _artifact + _artifact._upload(file=io.BytesIO(_serialized)) return _artifact From f7dbad9f494f57f0442e0aa48abe9d00708efcef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 7 Feb 2025 10:54:58 +0000 Subject: [PATCH 150/163] More fixes --- simvue/api/objects/artifact/base.py | 14 ++++++++------ simvue/api/objects/artifact/file.py | 7 ++----- simvue/api/objects/artifact/object.py | 8 +++----- simvue/run.py | 2 +- 4 files changed, 14 insertions(+), 17 deletions(-) diff --git a/simvue/api/objects/artifact/base.py b/simvue/api/objects/artifact/base.py index 6b5892a7..f84b54f8 100644 --- a/simvue/api/objects/artifact/base.py +++ b/simvue/api/objects/artifact/base.py @@ -15,7 +15,7 @@ try: from typing import Self except ImportError: - from typing_extensions import Self + from typing_extensions import Self # noqa: F401 from simvue.api.url import URL from simvue.exception import ObjectNotFoundError @@ -49,8 +49,6 @@ def __init__( # If the artifact is an online instance, need a place to store the response # from the initial creation self._init_data: dict[str, dict] = {} - if not self._staging.get("runs", None): - self._staging |= {"runs": {}} def commit(self) -> None: self._logger.info("Cannot call method 'commit' on write-once type 'Artifact'") @@ -63,7 +61,6 @@ def attach_to_run(self, run_id: str, category: Category) -> None: super().commit() return - _name = self._staging["name"] _run_artifacts_url = ( URL(self._user_config.server.url) / f"runs/{run_id}/artifacts/{self._init_data['id']}" @@ -77,7 +74,7 @@ def attach_to_run(self, run_id: str, category: Category) -> None: get_json_from_response( expected_status=[http.HTTPStatus.OK], - scenario=f"adding artifact '{_name}' to run '{run_id}'", + scenario=f"adding artifact '{self.name}' to run '{run_id}'", response=_response, ) @@ -92,7 +89,7 @@ def _upload(self, file: io.BytesIO) -> None: super().commit() return - if not (_url := self._init_data.get("url")): + if not (_url := self._staging.get("url")): return _name = self._staging["name"] @@ -117,8 +114,13 @@ def _upload(self, file: io.BytesIO) -> None: response=_response, ) + # Temporarily remove read-only state + self.read_only(False) + + # Update the server status to confirm file uploaded self.uploaded = True self.commit() + self.read_only(True) def _get( self, storage: str | None = None, url: str | None = None, **kwargs diff --git a/simvue/api/objects/artifact/file.py b/simvue/api/objects/artifact/file.py index 193dc79d..b3b2795a 100644 --- a/simvue/api/objects/artifact/file.py +++ b/simvue/api/objects/artifact/file.py @@ -67,11 +67,8 @@ def new( metadata=metadata, ) - # Firstly submit a request for a new artifact, remove the run IDs - # as these are not an argument for artifact creation - _post_args = _artifact._staging.copy() - _post_args.pop("runs", None) - _artifact._init_data = _artifact._post(**_post_args) + _artifact._init_data = _artifact._post(**_artifact._staging) + _artifact._staging["runs"] = {} if offline: return _artifact diff --git a/simvue/api/objects/artifact/object.py b/simvue/api/objects/artifact/object.py index b587e7dc..ba85e07b 100644 --- a/simvue/api/objects/artifact/object.py +++ b/simvue/api/objects/artifact/object.py @@ -73,14 +73,12 @@ def new( checksum=_checksum, metadata=metadata, ) - # Firstly submit a request for a new artifact, remove the run IDs - # as these are not an argument for artifact creation - _post_args = _artifact._staging.copy() - _post_args.pop("runs", None) - _artifact._init_data = _artifact._post(**_post_args) if offline: return _artifact + _artifact._init_data = _artifact._post(**_artifact._staging) + _artifact._upload(file=io.BytesIO(_serialized)) + _artifact._staging["runs"] = {} return _artifact diff --git a/simvue/run.py b/simvue/run.py index d85b7f7b..ea454d5f 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -65,7 +65,7 @@ try: from typing import Self except ImportError: - from typing_extensions import Self + from typing_extensions import Self # noqa: F401 if typing.TYPE_CHECKING: From 120087fa6af1143e40410c6a531a4e3af22a5e41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 7 Feb 2025 11:04:29 +0000 Subject: [PATCH 151/163] Fix online mode file upload --- simvue/api/objects/artifact/base.py | 5 +++-- simvue/api/objects/artifact/file.py | 3 ++- simvue/api/objects/artifact/object.py | 3 ++- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/simvue/api/objects/artifact/base.py b/simvue/api/objects/artifact/base.py index f84b54f8..5cd6e67e 100644 --- a/simvue/api/objects/artifact/base.py +++ b/simvue/api/objects/artifact/base.py @@ -55,9 +55,10 @@ def commit(self) -> None: def attach_to_run(self, run_id: str, category: Category) -> None: """Attach this artifact to a given run""" - self._staging["runs"][run_id] = category + self._init_data["runs"][run_id] = category if self._offline: + self._staging["runs"] = self._init_data["runs"] super().commit() return @@ -119,7 +120,7 @@ def _upload(self, file: io.BytesIO) -> None: # Update the server status to confirm file uploaded self.uploaded = True - self.commit() + super().commit() self.read_only(True) def _get( diff --git a/simvue/api/objects/artifact/file.py b/simvue/api/objects/artifact/file.py index b3b2795a..07ef9e27 100644 --- a/simvue/api/objects/artifact/file.py +++ b/simvue/api/objects/artifact/file.py @@ -68,7 +68,8 @@ def new( ) _artifact._init_data = _artifact._post(**_artifact._staging) - _artifact._staging["runs"] = {} + _artifact._init_data["runs"] = {} + _artifact._staging["url"] = _artifact._init_data["url"] if offline: return _artifact diff --git a/simvue/api/objects/artifact/object.py b/simvue/api/objects/artifact/object.py index ba85e07b..93948da0 100644 --- a/simvue/api/objects/artifact/object.py +++ b/simvue/api/objects/artifact/object.py @@ -78,7 +78,8 @@ def new( return _artifact _artifact._init_data = _artifact._post(**_artifact._staging) + _artifact._init_data["runs"] = {} + _artifact._staging["url"] = _artifact._init_data["url"] _artifact._upload(file=io.BytesIO(_serialized)) - _artifact._staging["runs"] = {} return _artifact From 88eb40d4c8bab546014d7ab7c94648c35cc5eab2 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 7 Feb 2025 11:09:29 +0000 Subject: [PATCH 152/163] Fixing offline and read only propogatin --- simvue/api/objects/artifact/base.py | 2 +- simvue/api/objects/artifact/file.py | 3 ++- simvue/api/objects/artifact/object.py | 2 ++ 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/simvue/api/objects/artifact/base.py b/simvue/api/objects/artifact/base.py index 5cd6e67e..c424253f 100644 --- a/simvue/api/objects/artifact/base.py +++ b/simvue/api/objects/artifact/base.py @@ -55,7 +55,7 @@ def commit(self) -> None: def attach_to_run(self, run_id: str, category: Category) -> None: """Attach this artifact to a given run""" - self._init_data["runs"][run_id] = category + self._staging["runs"][run_id] = category if self._offline: self._staging["runs"] = self._init_data["runs"] diff --git a/simvue/api/objects/artifact/file.py b/simvue/api/objects/artifact/file.py index 07ef9e27..d27e3a61 100644 --- a/simvue/api/objects/artifact/file.py +++ b/simvue/api/objects/artifact/file.py @@ -63,7 +63,8 @@ def new( size=_file_size, mime_type=_mime_type, checksum=_file_checksum, - offline=offline, + _offline=offline, + _read_only=False, metadata=metadata, ) diff --git a/simvue/api/objects/artifact/object.py b/simvue/api/objects/artifact/object.py index 93948da0..31469a1b 100644 --- a/simvue/api/objects/artifact/object.py +++ b/simvue/api/objects/artifact/object.py @@ -72,6 +72,8 @@ def new( mime_type=_data_type, checksum=_checksum, metadata=metadata, + _offline=offline, + _read_only=False, ) if offline: From dc3e2736104573fbb15dac091c45d63da4b2a158 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 7 Feb 2025 11:56:28 +0000 Subject: [PATCH 153/163] Fixes --- simvue/api/objects/artifact/base.py | 5 ++--- simvue/api/objects/artifact/file.py | 11 +++++++++-- simvue/api/objects/artifact/object.py | 5 +++++ tests/unit/test_file_artifact.py | 8 +++----- 4 files changed, 19 insertions(+), 10 deletions(-) diff --git a/simvue/api/objects/artifact/base.py b/simvue/api/objects/artifact/base.py index c424253f..90fa8e10 100644 --- a/simvue/api/objects/artifact/base.py +++ b/simvue/api/objects/artifact/base.py @@ -55,7 +55,7 @@ def commit(self) -> None: def attach_to_run(self, run_id: str, category: Category) -> None: """Attach this artifact to a given run""" - self._staging["runs"][run_id] = category + self._init_data["runs"][run_id] = category if self._offline: self._staging["runs"] = self._init_data["runs"] @@ -80,8 +80,7 @@ def attach_to_run(self, run_id: str, category: Category) -> None: ) def on_reconnect(self, id_mapping: dict[str, str]) -> None: - _offline_staging = dict(self._staging["runs"].items()) - self._staging["runs"] = {} + _offline_staging = dict(self._init_data["runs"].items()) for id, category in _offline_staging.items(): self.attach_to_run(run_id=id_mapping[id], category=category) diff --git a/simvue/api/objects/artifact/file.py b/simvue/api/objects/artifact/file.py index d27e3a61..763dc32b 100644 --- a/simvue/api/objects/artifact/file.py +++ b/simvue/api/objects/artifact/file.py @@ -3,7 +3,7 @@ import typing import pydantic import os - +import pathlib from simvue.models import NAME_REGEX from simvue.utilities import get_mimetype_for_file, get_mimetypes, calculate_sha256 @@ -24,6 +24,7 @@ def new( mime_type: str | None, metadata: dict[str, typing.Any] | None, offline: bool = False, + **kwargs, ) -> Self: """Create a new artifact either locally or on the server @@ -51,11 +52,15 @@ def new( if _mime_type not in get_mimetypes(): raise ValueError(f"Invalid MIME type '{mime_type}' specified") - + file_path = pathlib.Path(file_path) _file_size = file_path.stat().st_size _file_orig_path = file_path.expanduser().absolute() _file_checksum = calculate_sha256(f"{file_path}", is_file=True) + kwargs.pop("original_path", None) + kwargs.pop("size", None) + kwargs.pop("checksum", None) + _artifact = FileArtifact( name=name, storage=storage, @@ -66,7 +71,9 @@ def new( _offline=offline, _read_only=False, metadata=metadata, + **kwargs, ) + _artifact._staging["file_path"] = str(file_path) _artifact._init_data = _artifact._post(**_artifact._staging) _artifact._init_data["runs"] = {} diff --git a/simvue/api/objects/artifact/object.py b/simvue/api/objects/artifact/object.py index 31469a1b..a7ee00c8 100644 --- a/simvue/api/objects/artifact/object.py +++ b/simvue/api/objects/artifact/object.py @@ -31,6 +31,7 @@ def new( metadata: dict[str, typing.Any] | None, allow_pickling: bool = True, offline: bool = False, + **kwargs, ) -> Self: """Create a new artifact either locally or on the server @@ -65,6 +66,9 @@ def new( _checksum = calculate_sha256(_serialized, is_file=False) + kwargs.pop("size", None) + kwargs.pop("checksum", None) + _artifact = ObjectArtifact( name=name, storage=storage, @@ -74,6 +78,7 @@ def new( metadata=metadata, _offline=offline, _read_only=False, + **kwargs, ) if offline: diff --git a/tests/unit/test_file_artifact.py b/tests/unit/test_file_artifact.py index 126ee04d..7f2c5abd 100644 --- a/tests/unit/test_file_artifact.py +++ b/tests/unit/test_file_artifact.py @@ -76,11 +76,9 @@ def test_file_artifact_creation_offline(offline_test: pathlib.Path) -> None: _artifact.attach_to_run(_run._identifier, category="input") assert _artifact.name == f"test_file_artifact_{_uuid}" sender(offline_test.joinpath(".simvue"), 1, 10) - time.sleep(1) - client = Client() - _run_id = client.get_run_id_from_name(f"test_file_artifact_creation_offline_{_uuid}") - client.get_artifact_as_file(_run_id, _artifact.name, offline_test.joinpath("downloaded").mkdir()) - assert offline_test.joinpath("downloaded.txt").read_text() == "Hello World!" + import pdb; pdb.set_trace() + _content = b"".join(_artifact.download_content()).decode("UTF-8") + assert _content == f"Hello World! {_uuid}" _run.delete() _folder.delete() From 257b57e44abcee5de8347e61bd311574914dcddf Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 7 Feb 2025 12:45:48 +0000 Subject: [PATCH 154/163] Lots of fixes fdor aertifact offline --- simvue/api/objects/artifact/base.py | 2 +- simvue/api/objects/artifact/file.py | 10 +++++++--- simvue/api/objects/artifact/object.py | 13 +++++++++---- tests/unit/test_file_artifact.py | 24 ++++++++++++++++-------- 4 files changed, 33 insertions(+), 16 deletions(-) diff --git a/simvue/api/objects/artifact/base.py b/simvue/api/objects/artifact/base.py index 90fa8e10..dac009f5 100644 --- a/simvue/api/objects/artifact/base.py +++ b/simvue/api/objects/artifact/base.py @@ -80,7 +80,7 @@ def attach_to_run(self, run_id: str, category: Category) -> None: ) def on_reconnect(self, id_mapping: dict[str, str]) -> None: - _offline_staging = dict(self._init_data["runs"].items()) + _offline_staging = self._init_data["runs"].copy() for id, category in _offline_staging.items(): self.attach_to_run(run_id=id_mapping[id], category=category) diff --git a/simvue/api/objects/artifact/file.py b/simvue/api/objects/artifact/file.py index 763dc32b..7878d514 100644 --- a/simvue/api/objects/artifact/file.py +++ b/simvue/api/objects/artifact/file.py @@ -74,10 +74,14 @@ def new( **kwargs, ) _artifact._staging["file_path"] = str(file_path) + if offline: + _artifact._init_data = {} + + else: + _artifact._init_data = _artifact._post(**_artifact._staging) + _artifact._staging["url"] = _artifact._init_data["url"] - _artifact._init_data = _artifact._post(**_artifact._staging) - _artifact._init_data["runs"] = {} - _artifact._staging["url"] = _artifact._init_data["url"] + _artifact._init_data["runs"] = kwargs.get("runs") or {} if offline: return _artifact diff --git a/simvue/api/objects/artifact/object.py b/simvue/api/objects/artifact/object.py index a7ee00c8..77cfff56 100644 --- a/simvue/api/objects/artifact/object.py +++ b/simvue/api/objects/artifact/object.py @@ -82,11 +82,16 @@ def new( ) if offline: - return _artifact + _artifact._init_data = {} + + else: + _artifact._init_data = _artifact._post(**_artifact._staging) + _artifact._staging["url"] = _artifact._init_data["url"] + + _artifact._init_data["runs"] = kwargs.get("runs") or {} - _artifact._init_data = _artifact._post(**_artifact._staging) - _artifact._init_data["runs"] = {} - _artifact._staging["url"] = _artifact._init_data["url"] + if offline: + return _artifact _artifact._upload(file=io.BytesIO(_serialized)) return _artifact diff --git a/tests/unit/test_file_artifact.py b/tests/unit/test_file_artifact.py index 7f2c5abd..f21b8ca1 100644 --- a/tests/unit/test_file_artifact.py +++ b/tests/unit/test_file_artifact.py @@ -1,11 +1,10 @@ -import os import pytest import uuid import time import pathlib import tempfile - -from simvue.api.objects import FileArtifact, Run +import json +from simvue.api.objects import FileArtifact, Run, Artifact from simvue.api.objects.folder import Folder from simvue.sender import sender from simvue.client import Client @@ -61,7 +60,7 @@ def test_file_artifact_creation_offline(offline_test: pathlib.Path) -> None: _path = offline_test.joinpath("hello_world.txt") with _path.open("w") as out_f: - out_f.write("Hello World!") + out_f.write(f"Hello World! {_uuid}") _folder.commit() _run.commit() @@ -74,10 +73,19 @@ def test_file_artifact_creation_offline(offline_test: pathlib.Path) -> None: metadata=None ) _artifact.attach_to_run(_run._identifier, category="input") - assert _artifact.name == f"test_file_artifact_{_uuid}" - sender(offline_test.joinpath(".simvue"), 1, 10) - import pdb; pdb.set_trace() - _content = b"".join(_artifact.download_content()).decode("UTF-8") + + with _artifact._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert _local_data.get("name") == f"test_file_artifact_{_uuid}" + assert _local_data.get("runs") == {_run._identifier: "input"} + + _id_mapping = sender(offline_test.joinpath(".simvue"), 1, 10) + time.sleep(1) + + _online_artifact = Artifact(_id_mapping[_artifact.id]) + assert _online_artifact.name == _artifact.name + _content = b"".join(_online_artifact.download_content()).decode("UTF-8") assert _content == f"Hello World! {_uuid}" _run.delete() _folder.delete() From b43897aace02e0943f41f3eda9b730144bb8d19e Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 7 Feb 2025 14:25:19 +0000 Subject: [PATCH 155/163] Started fixing object upload tests --- tests/unit/test_object_artifact.py | 36 ++++++++++++++---------------- 1 file changed, 17 insertions(+), 19 deletions(-) diff --git a/tests/unit/test_object_artifact.py b/tests/unit/test_object_artifact.py index 7d85247d..edf73597 100644 --- a/tests/unit/test_object_artifact.py +++ b/tests/unit/test_object_artifact.py @@ -22,25 +22,23 @@ def test_object_artifact_creation_online() -> None: _failed = [] - with tempfile.NamedTemporaryFile(suffix=".txt") as temp_f: - _path = pathlib.Path(temp_f.name) - _array = numpy.array(range(10)) - _artifact = ObjectArtifact.new( - name=f"test_object_artifact_{_uuid}", - obj=_array, - storage=None, - metadata=None - ) - _artifact.attach_to_run(_run.id, "input") - time.sleep(1) - for member in _artifact._properties: - try: - getattr(_artifact, member) - except Exception as e: - _failed.append((member, f"{e}")) - assert _artifact.name == f"test_object_artifact_{_uuid}" - assert client.get_artifact(_run_id, _artifact.name) is not None - assert _artifact.to_dict() + _array = numpy.array(range(10)) + _artifact = ObjectArtifact.new( + name=f"test_object_artifact_{_uuid}", + obj=_array, + storage=None, + metadata=None + ) + _artifact.attach_to_run(_run.id, "input") + time.sleep(1) + for member in _artifact._properties: + try: + getattr(_artifact, member) + except Exception as e: + _failed.append((member, f"{e}")) + + _downloaded = _artifact.download_content() + import pdb; pdb.set_trace() _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) if _failed: From 6bbc2f78b1d6bcca97187d5d880959a95fd14db2 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 7 Feb 2025 15:59:09 +0000 Subject: [PATCH 156/163] Changes to make object offline mode work --- simvue/api/objects/artifact/object.py | 38 +++++++++++----- simvue/sender.py | 6 +++ tests/unit/test_object_artifact.py | 65 +++++++++++++-------------- 3 files changed, 65 insertions(+), 44 deletions(-) diff --git a/simvue/api/objects/artifact/object.py b/simvue/api/objects/artifact/object.py index 77cfff56..b7e00668 100644 --- a/simvue/api/objects/artifact/object.py +++ b/simvue/api/objects/artifact/object.py @@ -18,7 +18,7 @@ class ObjectArtifact(ArtifactBase): def __init__( self, identifier: str | None = None, _read_only: bool = True, **kwargs ) -> None: - super().__init__(identifier, _read_only, original_path=None, **kwargs) + super().__init__(identifier, _read_only, original_path="", **kwargs) @classmethod @pydantic.validate_call @@ -54,20 +54,29 @@ def new( whether to define this artifact locally, default is False """ - _serialization = serialize_object(obj, allow_pickling) + # If the object has been saved as a bytes file, obj will be None + if obj is None: + try: + _data_type = kwargs.pop("mime_type") + _serialized = kwargs.pop("serialized") + _checksum = kwargs.pop("checksum") + kwargs.pop("size") + kwargs.pop("original_path") + except KeyError: + raise ValueError("Must provide an object to be saved, not None.") - if not _serialization or not (_serialized := _serialization[0]): - raise ValueError(f"Could not serialize object of type '{type(obj)}'") + else: + _serialization = serialize_object(obj, allow_pickling) - if not (_data_type := _serialization[1]) and not allow_pickling: - raise ValueError( - f"Could not serialize object of type '{type(obj)}' without pickling" - ) + if not _serialization or not (_serialized := _serialization[0]): + raise ValueError(f"Could not serialize object of type '{type(obj)}'") - _checksum = calculate_sha256(_serialized, is_file=False) + if not (_data_type := _serialization[1]) and not allow_pickling: + raise ValueError( + f"Could not serialize object of type '{type(obj)}' without pickling" + ) - kwargs.pop("size", None) - kwargs.pop("checksum", None) + _checksum = calculate_sha256(_serialized, is_file=False) _artifact = ObjectArtifact( name=name, @@ -83,6 +92,13 @@ def new( if offline: _artifact._init_data = {} + _artifact._staging["obj"] = None + _artifact._local_staging_file.parent.mkdir(parents=True, exist_ok=True) + with open( + _artifact._local_staging_file.parent.joinpath(f"{_artifact.id}.object"), + "wb", + ) as file: + file.write(_serialized) else: _artifact._init_data = _artifact._post(**_artifact._staging) diff --git a/simvue/sender.py b/simvue/sender.py index 5605f3f6..6d07b351 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -59,6 +59,12 @@ def upload_cached_file( _instance_class = getattr(simvue.api.objects, _exact_type) except AttributeError as e: raise RuntimeError(f"Attempt to initialise unknown type '{_exact_type}'") from e + + # If it is an ObjectArtifact, need to load the object as bytes from a different file + if issubclass(_instance_class, simvue.api.objects.ObjectArtifact): + with open(file_path.parent.joinpath(f"{_current_id}.object"), "rb") as file: + _data["serialized"] = file.read() + # We want to reconnect if there is an online ID stored for this file if _online_id := id_mapping.get(_current_id): obj_for_upload = _instance_class( diff --git a/tests/unit/test_object_artifact.py b/tests/unit/test_object_artifact.py index edf73597..ae1bd464 100644 --- a/tests/unit/test_object_artifact.py +++ b/tests/unit/test_object_artifact.py @@ -1,14 +1,13 @@ -import os import pytest import uuid import time import pathlib import numpy - -from simvue.api.objects import ObjectArtifact, Run +import json +from simvue.api.objects import ObjectArtifact, Run, Artifact from simvue.api.objects.folder import Folder from simvue.sender import sender -from simvue.client import Client +from simvue.serialization import _deserialize_numpy_array @pytest.mark.api @pytest.mark.online @@ -16,12 +15,10 @@ def test_object_artifact_creation_online() -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" _folder = Folder.new(path=_folder_name) - _run = Run.new(folder=_folder_name) + _run = Run.new(name=f"test_object_artifact_run_{_uuid}", folder=_folder_name) _folder.commit() _run.commit() - _failed = [] - _array = numpy.array(range(10)) _artifact = ObjectArtifact.new( name=f"test_object_artifact_{_uuid}", @@ -31,18 +28,12 @@ def test_object_artifact_creation_online() -> None: ) _artifact.attach_to_run(_run.id, "input") time.sleep(1) - for member in _artifact._properties: - try: - getattr(_artifact, member) - except Exception as e: - _failed.append((member, f"{e}")) - - _downloaded = _artifact.download_content() - import pdb; pdb.set_trace() + + _downloaded = _deserialize_numpy_array(next(_artifact.download_content())) + assert numpy.array_equal(_downloaded, _array) + _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) - if _failed: - raise AssertionError("\n\t-" + "\n\t- ".join(": ".join(i) for i in _failed)) @pytest.mark.api @@ -51,29 +42,37 @@ def test_object_artifact_creation_offline(offline_test: pathlib.Path) -> None: _uuid: str = f"{uuid.uuid4()}".split("-")[0] _folder_name = f"/simvue_unit_testing/{_uuid}" _folder = Folder.new(path=_folder_name, offline=True) - _run = Run.new(name=f"test_object_artifact_creation_offline_{_uuid}",folder=_folder_name, offline=True) - - _path = offline_test.joinpath("hello_world.txt") - - with _path.open("w") as out_f: - out_f.write("Hello World!") - + _run = Run.new(name=f"test_object_artifact_offline_run_{_uuid}", folder=_folder_name, offline=True) _folder.commit() _run.commit() + _array = numpy.array(range(10)) _artifact = ObjectArtifact.new( - name=f"test_object_artifact_{_uuid}", + name=f"test_object_artifact_offline_{_uuid}", obj=_array, storage=None, - metadata=None + metadata=None, + offline=True ) - _artifact.attach_to_run(_run._identifier, category="input") - assert _artifact.name == f"test_object_artifact_{_uuid}" - sender(offline_test.joinpath(".simvue"), 1, 10) + _artifact.attach_to_run(_run.id, "input") + + with _artifact._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert _local_data.get("name") == f"test_object_artifact_offline_{_uuid}" + assert _local_data.get("mime_type") == "application/vnd.simvue.numpy.v1" + assert _local_data.get("runs") == {_run.id: "input"} + + _id_mapping = sender(offline_test.joinpath(".simvue"), 1, 10) time.sleep(1) - client = Client() - _run_id = client.get_run_id_from_name(f"test_object_artifact_creation_offline_{_uuid}") - assert client.get_artifact(_run_id, _artifact.name) is not None + + _online_artifact = Artifact(_id_mapping.get(_artifact.id)) + + assert _online_artifact.name == f"test_object_artifact_offline_{_uuid}" + assert _online_artifact.mime_type == "application/vnd.simvue.numpy.v1" + + _downloaded = _deserialize_numpy_array(next(_online_artifact.download_content())) + assert numpy.array_equal(_downloaded, _array) _run.delete() - _folder.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) From e2ba66b87f889fa04dbab334be5c4638c4079759 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 7 Feb 2025 16:00:35 +0000 Subject: [PATCH 157/163] delete object file once sent --- simvue/sender.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/simvue/sender.py b/simvue/sender.py index 6d07b351..ca087d72 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -92,6 +92,8 @@ def upload_cached_file( else: _logger.info(f"Created {obj_for_upload.__class__.__name__} '{_new_id}'") file_path.unlink(missing_ok=True) + if issubclass(_instance_class, simvue.api.objects.ObjectArtifact): + file_path.parent.joinpath(f"{_current_id}.object").unlink() with lock: id_mapping[_current_id] = _new_id From d37274aa8eaff3cf904122cf58796fed619115ed Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 7 Feb 2025 16:11:22 +0000 Subject: [PATCH 158/163] Improved events alert tests --- tests/unit/test_event_alert.py | 59 +++++++++++++++++++++++++++++----- 1 file changed, 51 insertions(+), 8 deletions(-) diff --git a/tests/unit/test_event_alert.py b/tests/unit/test_event_alert.py index 17a94246..d55fd08c 100644 --- a/tests/unit/test_event_alert.py +++ b/tests/unit/test_event_alert.py @@ -5,6 +5,7 @@ import uuid from simvue.api.objects import Alert, EventsAlert +from simvue.sender import sender @pytest.mark.api @pytest.mark.online @@ -46,10 +47,28 @@ def test_event_alert_creation_offline() -> None: assert _alert.alert.pattern == "completed" assert _alert.name == f"events_alert_{_uuid}" assert _alert.notification == "none" + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("source") == "events" + assert _local_data.get("alert").get("frequency") == 1 + assert _local_data.get("alert").get("pattern") == "completed" + assert _local_data.get("name") == f"events_alert_{_uuid}" + assert _local_data.get("notification") == "none" + + _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + # Get online ID and retrieve alert + _online_alert = Alert(_id_mapping.get(_alert.id)) + assert _online_alert.source == "events" + assert _online_alert.alert.frequency == 1 + assert _online_alert.alert.pattern == "completed" + assert _online_alert.name == f"events_alert_{_uuid}" + assert _online_alert.notification == "none" _alert.delete() - - + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() @pytest.mark.api @pytest.mark.online @@ -87,15 +106,39 @@ def test_event_alert_modification_offline() -> None: description=None ) _alert.commit() - time.sleep(1) - _new_alert = Alert(_alert.id) - assert isinstance(_new_alert, EventsAlert) + _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + # Get online ID and retrieve alert + _online_alert = Alert(_id_mapping.get(_alert.id)) + assert _online_alert.source == "events" + assert _online_alert.alert.frequency == 1 + assert _online_alert.alert.pattern == "completed" + assert _online_alert.name == f"events_alert_{_uuid}" + assert _online_alert.notification == "none" + + _new_alert = EventsAlert(_alert.id) _new_alert.read_only(False) _new_alert.description = "updated!" - _new_alert.commit() - assert _new_alert.description == "updated!" - _new_alert.delete() + + # Since changes havent been sent, check online run not updated + _online_alert.refresh() + assert _online_alert.description != "updated!" + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("description") == "updated!" + + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + _online_alert.refresh() + assert _online_alert.description == "updated!" + + _online_alert.read_only(False) + _online_alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() @pytest.mark.api From 5a8d3a54a29f53af71e822e035ecf0d6a390156b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 10 Feb 2025 12:57:16 +0000 Subject: [PATCH 159/163] Further offline fixes --- simvue/api/objects/alert/fetch.py | 4 ++-- simvue/api/objects/alert/user.py | 22 ++++++++++++++++++++ simvue/api/objects/artifact/fetch.py | 15 +++++++------ simvue/api/objects/artifact/object.py | 1 + simvue/api/objects/storage/fetch.py | 9 ++------ simvue/client.py | 2 +- tests/conftest.py | 8 +++++-- tests/functional/test_run_artifact_upload.py | 4 ++-- tests/functional/test_run_class.py | 2 +- tests/functional/test_run_execute_process.py | 3 +++ 10 files changed, 49 insertions(+), 21 deletions(-) diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py index 9459abd9..6b5e089e 100644 --- a/simvue/api/objects/alert/fetch.py +++ b/simvue/api/objects/alert/fetch.py @@ -78,13 +78,13 @@ def get( # Currently no alert filters kwargs.pop("filters", None) - _class_instance = AlertBase(_local=True, _read_only=True, **kwargs) + _class_instance = AlertBase(_local=True, _read_only=True) _url = f"{_class_instance._base_url}" _response = sv_get( _url, headers=_class_instance._headers, - params={"start": offset, "count": count}, + params={"start": offset, "count": count} | kwargs, ) _label: str = _class_instance.__class__.__name__.lower() diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index d91f11dd..342fc7ae 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -23,6 +23,10 @@ class UserAlert(AlertBase): """Connect to/create a user defined alert either locally or on server""" + def __init__(self, identifier: str | None = None, **kwargs) -> None: + super().__init__(identifier, **kwargs) + self._local_status: dict[str, str | None] = kwargs.pop("status", {}) + @classmethod @pydantic.validate_call def new( @@ -70,9 +74,27 @@ def get( """Return only UserAlerts""" raise NotImplementedError("Retrieve of only user alerts is not yet supported") + def get_status(self, run_id: str) -> typing.Literal["ok", "critical"] | None: + """Retrieve current alert status for the given run""" + if self._offline: + return self._staging.get("status", self._local_status).get(run_id) + + return super().get_status(run_id) + + def on_reconnect(self, id_mapping: dict[str, str]) -> None: + """Set status update on reconnect""" + for run_id, status in self._staging.get("status", self._local_status).items(): + self.set_status(run_id, status) + @pydantic.validate_call def set_status(self, run_id: str, status: typing.Literal["ok", "critical"]) -> None: """Set the status of this alert for a given run""" + if self._offline: + if "status" not in self._staging: + self._staging["status"] = {} + self._staging["status"][run_id] = status + return + _response = sv_put( url=self.url / "status" / run_id, data={"status": status}, diff --git a/simvue/api/objects/artifact/fetch.py b/simvue/api/objects/artifact/fetch.py index f36c29f5..88f582ce 100644 --- a/simvue/api/objects/artifact/fetch.py +++ b/simvue/api/objects/artifact/fetch.py @@ -17,8 +17,8 @@ class Artifact: def __new__(cls, identifier: str | None = None, **kwargs): """Retrieve an object representing an Artifact by id""" - _storage_pre = ArtifactBase(identifier=identifier, **kwargs) - if _storage_pre.original_path: + _artifact_pre = ArtifactBase(identifier=identifier, **kwargs) + if _artifact_pre.original_path: return FileArtifact(identifier=identifier, **kwargs) else: return ObjectArtifact(identifier=identifier, **kwargs) @@ -49,7 +49,7 @@ def from_name( _first_result: dict[str, typing.Any] = _json_response[0] _artifact_id: str = _first_result.pop("id") - return Artifact.__new__( + return Artifact( identifier=_artifact_id, run=run_id, **_first_result, @@ -60,7 +60,10 @@ def from_name( @classmethod @pydantic.validate_call def get( - cls, count: int | None = None, offset: int | None = None, **kwargs + cls, + count: int | None = None, + offset: int | None = None, + **kwargs, ) -> typing.Generator[tuple[str, FileArtifact | ObjectArtifact], None, None]: """Returns artifacts associated with the current user. @@ -78,12 +81,12 @@ def get( the artifact itself as a class instance """ - _class_instance = ArtifactBase(_local=True, _read_only=True, **kwargs) + _class_instance = ArtifactBase(_local=True, _read_only=True) _url = f"{_class_instance._base_url}" _response = sv_get( _url, headers=_class_instance._headers, - params={"start": offset, "count": count}, + params={"start": offset, "count": count} | kwargs, ) _label: str = _class_instance.__class__.__name__.lower() _label = _label.replace("base", "") diff --git a/simvue/api/objects/artifact/object.py b/simvue/api/objects/artifact/object.py index b7e00668..daf8ec4f 100644 --- a/simvue/api/objects/artifact/object.py +++ b/simvue/api/objects/artifact/object.py @@ -18,6 +18,7 @@ class ObjectArtifact(ArtifactBase): def __init__( self, identifier: str | None = None, _read_only: bool = True, **kwargs ) -> None: + kwargs.pop("original_path", None) super().__init__(identifier, _read_only, original_path="", **kwargs) @classmethod diff --git a/simvue/api/objects/storage/fetch.py b/simvue/api/objects/storage/fetch.py index 0e697959..ce72e784 100644 --- a/simvue/api/objects/storage/fetch.py +++ b/simvue/api/objects/storage/fetch.py @@ -17,11 +17,6 @@ from .file import FileStorage from .base import StorageBase -try: - from typing import Self -except ImportError: - from typing_extensions import Self - class Storage: """Generic Simvue storage retrieval class""" @@ -60,12 +55,12 @@ def get( # Currently no storage filters kwargs.pop("filters", None) - _class_instance = StorageBase(_local=True, _read_only=True, **kwargs) + _class_instance = StorageBase(_local=True, _read_only=True) _url = f"{_class_instance._base_url}" _response = sv_get( _url, headers=_class_instance._headers, - params={"start": offset, "count": count}, + params={"start": offset, "count": count} | kwargs, ) _label: str = _class_instance.__class__.__name__.lower() _label = _label.replace("base", "") diff --git a/simvue/client.py b/simvue/client.py index 990ac522..45f152c5 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -549,7 +549,7 @@ def get_artifact_as_file( if there was a failure during retrieval of information from the server """ - _artifacts = self._retrieve_artifacts_from_server(run_id, name) + _artifacts = self._retrieve_artifacts_from_server(run_id, name, count=1) try: _id, _artifact = next(_artifacts) diff --git a/tests/conftest.py b/tests/conftest.py index 4369b3c2..f383f133 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,4 @@ +import contextlib from numpy import fix import pytest import pytest_mock @@ -10,6 +11,7 @@ import pathlib import logging from simvue.api.objects.artifact import Artifact +from simvue.exception import ObjectNotFoundError import simvue.run as sv_run import simvue.api.objects as sv_api_obj import simvue.config.user as sv_cfg @@ -59,9 +61,11 @@ def create_test_run(request) -> typing.Generator[typing.Tuple[sv_run.Run, dict], with sv_run.Run() as run: _test_run_data = setup_test_run(run, True, request) yield run, _test_run_data - sv_api_obj.Folder(identifier=run._folder.id).delete(recursive=True, delete_runs=True, runs_only=False) + with contextlib.suppress(ObjectNotFoundError): + sv_api_obj.Folder(identifier=run._folder.id).delete(recursive=True, delete_runs=True, runs_only=False) for alert_id in _test_run_data.get("alert_ids", []): - sv_api_obj.Alert(identifier=alert_id).delete() + with contextlib.suppress(ObjectNotFoundError): + sv_api_obj.Alert(identifier=alert_id).delete() clear_out_files() diff --git a/tests/functional/test_run_artifact_upload.py b/tests/functional/test_run_artifact_upload.py index afdb840e..b870e0c7 100644 --- a/tests/functional/test_run_artifact_upload.py +++ b/tests/functional/test_run_artifact_upload.py @@ -7,7 +7,7 @@ import tempfile import pytest -from simvue.api.objects import Run, Artifact, storage +from simvue.api.objects import Run, FileArtifact, storage from simvue.api.objects.folder import Folder @@ -24,7 +24,7 @@ def test_add_artifact_to_run() -> None: with open(tempf.name, "w") as in_f: in_f.write("Hello") - _artifact = Artifact.new_file( + _artifact = FileArtifact.new( name=f"test_{_uuid}", storage=None, file_path=pathlib.Path(tempf.name), diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 7d749782..5de51a8a 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -215,7 +215,7 @@ def test_offline_tags(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None run_tags = [tag for tag in tags if tag[1].name == run_data["tags"][-1]] assert len(run_tags) == 1 client.delete_tag(run_tags[0][0]) - + @pytest.mark.run diff --git a/tests/functional/test_run_execute_process.py b/tests/functional/test_run_execute_process.py index 00752472..4453e33a 100644 --- a/tests/functional/test_run_execute_process.py +++ b/tests/functional/test_run_execute_process.py @@ -7,14 +7,17 @@ import simvue.sender as sv_send from simvue import Run, Client +from simvue.sender import sender @pytest.mark.executor @pytest.mark.offline def test_monitor_processes(create_plain_run_offline: tuple[Run, dict]): + _run: Run _run, _ = create_plain_run_offline _run.add_process("process_1", "Hello world!", executable="echo", n=True) _run.add_process("process_2", "bash" if sys.platform != "win32" else "powershell", debug=True, c="exit 0") _run.add_process("process_3", "ls", "-ltr") + sender(_run._sv_obj._local_staging_file.parents[1], 1, 10, ["runs", "alerts"]) @pytest.mark.executor From e7a71fb83441490e3c7c947291ff67487e4706d6 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 10 Feb 2025 13:08:02 +0000 Subject: [PATCH 160/163] Fixed execute process test --- simvue/run.py | 14 +++++++------- tests/functional/test_run_execute_process.py | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index ea454d5f..af7a5aa0 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1922,13 +1922,13 @@ def log_alert( self._error('state must be either "ok" or "critical"') return False - _alert = Alert(identifier=identifier) - if not isinstance(_alert, UserAlert): - self._error( - f"Cannot update state for alert '{identifier}' " - f"of type '{_alert.__class__.__name__.lower()}'" - ) - return False + _alert = UserAlert(identifier=identifier) + # if not isinstance(_alert, UserAlert): + # self._error( + # f"Cannot update state for alert '{identifier}' " + # f"of type '{_alert.__class__.__name__.lower()}'" + # ) + # return False _alert.read_only(False) _alert.set_status(run_id=self._id, status=state) _alert.commit() diff --git a/tests/functional/test_run_execute_process.py b/tests/functional/test_run_execute_process.py index 4453e33a..6c071809 100644 --- a/tests/functional/test_run_execute_process.py +++ b/tests/functional/test_run_execute_process.py @@ -17,7 +17,7 @@ def test_monitor_processes(create_plain_run_offline: tuple[Run, dict]): _run.add_process("process_1", "Hello world!", executable="echo", n=True) _run.add_process("process_2", "bash" if sys.platform != "win32" else "powershell", debug=True, c="exit 0") _run.add_process("process_3", "ls", "-ltr") - sender(_run._sv_obj._local_staging_file.parents[1], 1, 10, ["runs", "alerts"]) + sender(_run._sv_obj._local_staging_file.parents[1], 1, 10, ["folders", "runs", "alerts"]) @pytest.mark.executor From 9bf09baadb9ead6f1cb7dd011c7b6ca86888c353 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 10 Feb 2025 13:53:17 +0000 Subject: [PATCH 161/163] UserAlert wip --- simvue/api/objects/alert/user.py | 4 ++-- tests/unit/test_user_alert.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index 342fc7ae..9ddcd6e1 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -83,8 +83,8 @@ def get_status(self, run_id: str) -> typing.Literal["ok", "critical"] | None: def on_reconnect(self, id_mapping: dict[str, str]) -> None: """Set status update on reconnect""" - for run_id, status in self._staging.get("status", self._local_status).items(): - self.set_status(run_id, status) + for offline_id, status in self._staging.get("status", {}).items(): + self.set_status(id_mapping.get(offline_id), status) @pydantic.validate_call def set_status(self, run_id: str, status: typing.Literal["ok", "critical"]) -> None: diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py index 0f658ef0..f2c2998c 100644 --- a/tests/unit/test_user_alert.py +++ b/tests/unit/test_user_alert.py @@ -172,4 +172,33 @@ def test_user_alert_status() -> None: _run.delete() _folder.delete(recursive=True, runs_only=False, delete_runs=True) _alert.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_user_alert_status_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = UserAlert.new( + name=f"users_alert_{_uuid}", + notification="none", + description=None, + offline=True + ) + _alert.commit() + _folder = Folder.new(path=f"/simvue_unit_tests/{_uuid}", offline=True) + _run = Run.new(folder=f"/simvue_unit_tests/{_uuid}", offline=True) + _folder.commit() + _run.alerts = [_alert.id] + _run.commit() + + sender(_alert._local_staging_file.parents[1], 1, 10, ["folders", "runs", "alerts"]) + time.sleep(1) + + _alert.set_status(_run.id, "critical") + _alert.commit() + import pdb; pdb.set_trace() + time.sleep(1) + _run.delete() + _folder.delete(recursive=True, runs_only=False, delete_runs=True) + _alert.delete() From 60961d21a540e55aa411bc27597965aff6a6d3a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 10 Feb 2025 14:28:29 +0000 Subject: [PATCH 162/163] Removed debug --- tests/unit/test_user_alert.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py index f2c2998c..71d08336 100644 --- a/tests/unit/test_user_alert.py +++ b/tests/unit/test_user_alert.py @@ -190,10 +190,10 @@ def test_user_alert_status_offline() -> None: _folder.commit() _run.alerts = [_alert.id] _run.commit() - + sender(_alert._local_staging_file.parents[1], 1, 10, ["folders", "runs", "alerts"]) time.sleep(1) - + _alert.set_status(_run.id, "critical") _alert.commit() import pdb; pdb.set_trace() From 1a604e1ad2589f4180cf79fc262331f5c89a074d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 10 Feb 2025 14:29:56 +0000 Subject: [PATCH 163/163] Bump version to alpha for v2 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index acb1b784..bbbfd633 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "simvue" -version = "1.1.4" +version = "2.0.0a0" description = "Simulation tracking and monitoring" authors = [ {name = "Simvue Development Team", email = "info@simvue.io"}