From 7e4c70365b91df2ea3dd6a47565b27dc43467948 Mon Sep 17 00:00:00 2001 From: Jan-Lukas Wynen Date: Fri, 19 Apr 2024 15:06:28 +0200 Subject: [PATCH 1/7] Drop support for python 3.9 --- .github/workflows/ci.yml | 5 ++-- pyproject.toml | 3 +- src/scitacean/filesystem.py | 6 +--- src/scitacean/testing/backend/_backend.py | 12 ++------ src/scitacean/testing/sftp/_sftp.py | 35 +++++++---------------- tox.ini | 9 +----- 6 files changed, 19 insertions(+), 51 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c0a2ffc9..2a2b067b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -44,9 +44,8 @@ jobs: - {python: '3.12', os: ubuntu-22.04, tox: py312-full} - {python: '3.11', os: ubuntu-22.04, tox: py311-full} - {python: '3.10', os: ubuntu-22.04, tox: py310-full} - - {python: '3.9', os: ubuntu-22.04, tox: py39-full} - - {python: '3.9', os: macos-12, tox: py39} - - {python: '3.9', os: windows-2022, tox: py39} + - {python: '3.10', os: macos-12, tox: py310} + - {python: '3.10', os: windows-2022, tox: py310} steps: - run: sudo apt install --yes docker-compose if: ${{ contains(matrix.variant.os, 'ubuntu') }} diff --git a/pyproject.toml b/pyproject.toml index 27a65d7c..f936180d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,14 +20,13 @@ classifiers = [ "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: Scientific/Engineering", "Typing :: Typed", ] -requires-python = ">=3.9" +requires-python = ">=3.10" dependencies = [ "email-validator", "pydantic >= 2", diff --git a/src/scitacean/filesystem.py b/src/scitacean/filesystem.py index 31883290..08fd7f4d 100644 --- a/src/scitacean/filesystem.py +++ b/src/scitacean/filesystem.py @@ -196,11 +196,7 @@ def file_modification_time(path: Path) -> datetime: def _new_hash(algorithm: str) -> Any: - try: - return hashlib.new(algorithm, usedforsecurity=False) - except TypeError: - # Fallback for Python < 3.9 - return hashlib.new(algorithm) + return hashlib.new(algorithm, usedforsecurity=False) # size based on http://git.savannah.gnu.org/gitweb/?p=coreutils.git;a=blob;f=src/ioblksize.h;h=ed2f4a9c4d77462f357353eb73ee4306c28b37f1;hb=HEAD#l23 # noqa: E501 diff --git a/src/scitacean/testing/backend/_backend.py b/src/scitacean/testing/backend/_backend.py index d565ac96..d9daf0f5 100644 --- a/src/scitacean/testing/backend/_backend.py +++ b/src/scitacean/testing/backend/_backend.py @@ -18,16 +18,10 @@ def _read_yaml(filename: str) -> Any: - if hasattr(importlib.resources, "files"): - # Use new API added in Python 3.9 - return yaml.safe_load( - importlib.resources.files("scitacean.testing.backend") - .joinpath(filename) - .read_text() - ) - # Old API, deprecated as of Python 3.11 return yaml.safe_load( - importlib.resources.read_text("scitacean.testing.backend", filename) + importlib.resources.files("scitacean.testing.backend") + .joinpath(filename) + .read_text() ) diff --git a/src/scitacean/testing/sftp/_sftp.py b/src/scitacean/testing/sftp/_sftp.py index 71464286..824739ab 100644 --- a/src/scitacean/testing/sftp/_sftp.py +++ b/src/scitacean/testing/sftp/_sftp.py @@ -25,15 +25,11 @@ class SFTPAccess: def _read_resource_text(filename: str) -> str: - if hasattr(importlib.resources, "files"): - # Use new API added in Python 3.9 - return ( - importlib.resources.files("scitacean.testing.sftp") - .joinpath(filename) - .read_text() - ) - # Old API, deprecated as of Python 3.11 - return importlib.resources.read_text("scitacean.testing.sftp", filename) + return ( + importlib.resources.files("scitacean.testing.sftp") + .joinpath(filename) + .read_text() + ) def _read_resource_yaml(filename: str) -> Any: @@ -53,21 +49,12 @@ def _docker_file() -> str: def _seed_files() -> Iterable[Tuple[str, str]]: - if hasattr(importlib.resources, "files"): - # Use new API added in Python 3.9 - yield from ( - (file.name, file.read_text()) - for file in importlib.resources.files("scitacean.testing.sftp") - .joinpath("sftp_server_seed") - .iterdir() - ) - else: - # Old API, deprecated as of Python 3.11 - with importlib.resources.path( - "scitacean.testing.sftp", "sftp_server_seed" - ) as seed_dir: - for path in seed_dir.iterdir(): - yield path.name, path.read_text() + yield from ( + (file.name, file.read_text()) + for file in importlib.resources.files("scitacean.testing.sftp") + .joinpath("sftp_server_seed") + .iterdir() + ) def local_access() -> SFTPAccess: diff --git a/tox.ini b/tox.ini index 005ff614..302c1226 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py{39,310,311,312}-full +envlist = py{310,311,312}-full isolated_build = true [testenv] @@ -8,13 +8,6 @@ commands = full: python -m pytest --backend-tests --sftp-tests !full: python -m pytest -[testenv:pydantic1] -envlist = py{311}-full -deps = -r requirements-pydantic1/test.txt -commands = - full: python -m pytest --backend-tests --sftp-tests - !full: python -m pytest - [testenv:docs] description = invoke sphinx-build to build the HTML docs basepython = python3.11 From 2c39271859d23a3e644b1561a459b578dbeb92bc Mon Sep 17 00:00:00 2001 From: Jan-Lukas Wynen Date: Fri, 19 Apr 2024 15:06:36 +0200 Subject: [PATCH 2/7] Update dependencies --- requirements/ci.txt | 6 +++++- requirements/dev.txt | 4 ++-- requirements/docs.txt | 10 +++++----- requirements/static.txt | 2 +- requirements/test.txt | 6 ++++++ requirements/wheels.txt | 4 ++++ 6 files changed, 23 insertions(+), 9 deletions(-) diff --git a/requirements/ci.txt b/requirements/ci.txt index e8ef0de6..3c15085a 100644 --- a/requirements/ci.txt +++ b/requirements/ci.txt @@ -29,7 +29,11 @@ pluggy==1.4.0 # via tox pyproject-api==1.6.1 # via tox +tomli==2.0.1 + # via + # pyproject-api + # tox tox==4.14.2 # via -r ci.in -virtualenv==20.25.1 +virtualenv==20.25.3 # via tox diff --git a/requirements/dev.txt b/requirements/dev.txt index 3e098f5a..1dbe1479 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -38,7 +38,7 @@ httpx==0.27.0 # via jupyterlab isoduration==20.11.0 # via jsonschema -json5==0.9.24 +json5==0.9.25 # via jupyterlab-server jsonpointer==2.4 # via jsonschema @@ -83,7 +83,7 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -ruff==0.3.7 +ruff==0.4.1 # via -r dev.in send2trash==1.8.3 # via jupyter-server diff --git a/requirements/docs.txt b/requirements/docs.txt index 2d1ee954..2d40ac7e 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -82,7 +82,7 @@ markupsafe==2.1.5 # via # jinja2 # nbconvert -matplotlib-inline==0.1.6 +matplotlib-inline==0.1.7 # via # ipykernel # ipython @@ -136,7 +136,7 @@ pygments==2.17.2 # sphinx python-dotenv==1.0.1 # via pydantic-settings -pyzmq==25.1.2 +pyzmq==26.0.1 # via # ipykernel # jupyter-client @@ -152,7 +152,7 @@ snowballstemmer==2.2.0 # via sphinx soupsieve==2.5 # via beautifulsoup4 -sphinx==7.2.6 +sphinx==7.3.7 # via # -r docs.in # autodoc-pydantic @@ -162,7 +162,7 @@ sphinx==7.2.6 # sphinx-autodoc-typehints # sphinx-copybutton # sphinx-design -sphinx-autodoc-typehints==2.0.1 +sphinx-autodoc-typehints==2.1.0 # via -r docs.in sphinx-copybutton==0.5.2 # via -r docs.in @@ -188,7 +188,7 @@ tornado==6.4 # via # ipykernel # jupyter-client -traitlets==5.14.2 +traitlets==5.14.3 # via # comm # ipykernel diff --git a/requirements/static.txt b/requirements/static.txt index 99028a0d..e786a781 100644 --- a/requirements/static.txt +++ b/requirements/static.txt @@ -21,7 +21,7 @@ pre-commit==3.7.0 # via -r static.in pyyaml==6.0.1 # via pre-commit -virtualenv==20.25.1 +virtualenv==20.25.3 # via pre-commit # The following packages are considered to be unsafe in a requirements file: diff --git a/requirements/test.txt b/requirements/test.txt index 4f6e8799..aea316dd 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,6 +8,10 @@ -r base.txt attrs==23.2.0 # via hypothesis +exceptiongroup==1.2.1 + # via + # hypothesis + # pytest execnet==2.1.1 # via pytest-xdist filelock[typing]==3.13.4 @@ -38,3 +42,5 @@ pyyaml==6.0.1 # via -r test.in sortedcontainers==2.4.0 # via hypothesis +tomli==2.0.1 + # via pytest diff --git a/requirements/wheels.txt b/requirements/wheels.txt index 730ed229..ff60a184 100644 --- a/requirements/wheels.txt +++ b/requirements/wheels.txt @@ -11,3 +11,7 @@ packaging==24.0 # via build pyproject-hooks==1.0.0 # via build +tomli==2.0.1 + # via + # build + # pyproject-hooks From af8a73def9c08aeb4ccb6592b25cd25b697d39ff Mon Sep 17 00:00:00 2001 From: Jan-Lukas Wynen Date: Fri, 19 Apr 2024 15:11:12 +0200 Subject: [PATCH 3/7] Run pyupgrade ruff --select=UP --- src/scitacean/_base_model.py | 54 +- src/scitacean/_html_repr/__init__.py | 7 +- src/scitacean/_html_repr/_attachment_html.py | 3 +- src/scitacean/_html_repr/_common_html.py | 14 +- src/scitacean/_html_repr/_dataset_html.py | 9 +- src/scitacean/_html_repr/_resources.py | 2 +- src/scitacean/_internal/dataclass_wrapper.py | 9 +- src/scitacean/_internal/file_counter.py | 4 +- src/scitacean/client.py | 77 +-- src/scitacean/datablock.py | 49 +- src/scitacean/dataset.py | 53 +- src/scitacean/file.py | 80 ++- src/scitacean/filesystem.py | 16 +- src/scitacean/model.py | 644 +++++++++--------- src/scitacean/pid.py | 12 +- src/scitacean/testing/_pytest_helpers.py | 11 +- src/scitacean/testing/backend/_backend.py | 8 +- .../testing/backend/_pytest_helpers.py | 3 +- src/scitacean/testing/backend/config.py | 3 +- src/scitacean/testing/backend/fixtures.py | 12 +- src/scitacean/testing/backend/seed.py | 19 +- src/scitacean/testing/client.py | 55 +- src/scitacean/testing/sftp/_pytest_helpers.py | 3 +- src/scitacean/testing/sftp/_sftp.py | 9 +- src/scitacean/testing/sftp/fixtures.py | 10 +- src/scitacean/testing/strategies.py | 24 +- src/scitacean/testing/transfer.py | 27 +- src/scitacean/thumbnail.py | 23 +- src/scitacean/transfer/link.py | 10 +- src/scitacean/transfer/sftp.py | 26 +- src/scitacean/transfer/util.py | 5 +- src/scitacean/typing.py | 6 +- src/scitacean/util/credentials.py | 10 +- src/scitacean/util/formatter.py | 13 +- tests/common/files.py | 6 +- tests/dataset_fields_test.py | 11 +- tests/download_test.py | 3 +- tests/model_test.py | 4 +- tests/transfer/sftp_test.py | 4 +- tools/model-generation/generate_models.py | 7 +- tools/model-generation/spec/__init__.py | 64 +- tools/model-generation/spec/schema.py | 16 +- tools/model-generation/templates/__init__.py | 2 +- 43 files changed, 703 insertions(+), 724 deletions(-) diff --git a/src/scitacean/_base_model.py b/src/scitacean/_base_model.py index c8838524..4c02411d 100644 --- a/src/scitacean/_base_model.py +++ b/src/scitacean/_base_model.py @@ -6,18 +6,12 @@ from __future__ import annotations import dataclasses +from collections.abc import Iterable from datetime import datetime from typing import ( Any, ClassVar, - Dict, - Iterable, - List, - Optional, - Tuple, - Type, TypeVar, - Union, overload, ) @@ -58,8 +52,8 @@ class BaseModel(pydantic.BaseModel): extra="forbid", ) - _user_mask: ClassVar[Tuple[str, ...]] - _masked_fields: ClassVar[Optional[Tuple[str, ...]]] = None + _user_mask: ClassVar[tuple[str, ...]] + _masked_fields: ClassVar[tuple[str, ...] | None] = None # Some schemas contain fields that we don't want to use in Scitacean. # Normally, omitting them from the model would result in an error when @@ -68,7 +62,7 @@ class BaseModel(pydantic.BaseModel): # Those will be silently dropped by __init__. # Note also the comment for _IGNORED_KWARGS below. def __init_subclass__( - cls, /, masked: Optional[Iterable[str]] = None, **kwargs: Any + cls, /, masked: Iterable[str] | None = None, **kwargs: Any ) -> None: super().__init_subclass__(**kwargs) cls._user_mask = tuple(masked) if masked is not None else () @@ -77,7 +71,7 @@ def __init__(self, **kwargs: Any) -> None: self._delete_ignored_args(kwargs) super().__init__(**kwargs) - def _delete_ignored_args(self, args: Dict[str, Any]) -> None: + def _delete_ignored_args(self, args: dict[str, Any]) -> None: if self._masked_fields is None: self._init_mask(self) for key in self._masked_fields: # type: ignore[union-attr] @@ -88,7 +82,7 @@ def _delete_ignored_args(self, args: Dict[str, Any]) -> None: # So initialization needs to be deferred until the first instantiation of the model. # The mask is cached afterward. @classmethod - def _init_mask(cls: Type[ModelType], instance: ModelType) -> None: + def _init_mask(cls: type[ModelType], instance: ModelType) -> None: def get_name(name: str, field: Any) -> Any: return field.alias if field.alias is not None else name @@ -99,7 +93,7 @@ def get_name(name: str, field: Any) -> Any: cls._masked_fields = cls._user_mask + default_mask @classmethod - def user_model_type(cls) -> Optional[Type[BaseUserModel]]: + def user_model_type(cls) -> type[BaseUserModel] | None: """Return the user model type for this model. Returns ``None`` if there is no user model, e.g., for ``Dataset`` @@ -108,7 +102,7 @@ def user_model_type(cls) -> Optional[Type[BaseUserModel]]: return None @classmethod - def upload_model_type(cls) -> Optional[Type[BaseModel]]: + def upload_model_type(cls) -> type[BaseModel] | None: """Return the upload model type for this model. Returns ``None`` if the model cannot be uploaded or this is an upload model. @@ -116,7 +110,7 @@ def upload_model_type(cls) -> Optional[Type[BaseModel]]: return None @classmethod - def download_model_type(cls) -> Optional[Type[BaseModel]]: + def download_model_type(cls) -> type[BaseModel] | None: """Return the download model type for this model. Returns ``None`` if this is a download model. @@ -132,7 +126,7 @@ class BaseUserModel: """ @classmethod - def _download_model_dict(cls, download_model: Any) -> Dict[str, Any]: + def _download_model_dict(cls, download_model: Any) -> dict[str, Any]: return { field.name: getattr( download_model, _model_field_name_of(cls.__name__, field.name) @@ -140,7 +134,7 @@ def _download_model_dict(cls, download_model: Any) -> Dict[str, Any]: for field in dataclasses.fields(cls) } - def _upload_model_dict(self) -> Dict[str, Any]: + def _upload_model_dict(self) -> dict[str, Any]: _check_ready_for_upload(self) return { _model_field_name_of(self.__class__.__name__, field.name): getattr( @@ -158,7 +152,7 @@ def make_upload_model(self) -> BaseModel: raise NotImplementedError("Function does not exist for BaseUserModel") @classmethod - def upload_model_type(cls) -> Optional[Type[BaseModel]]: + def upload_model_type(cls) -> type[BaseModel] | None: """Return the upload model type for this user model. Returns ``None`` if the model cannot be uploaded. @@ -166,14 +160,14 @@ def upload_model_type(cls) -> Optional[Type[BaseModel]]: return None @classmethod - def download_model_type(cls) -> Type[BaseModel]: + def download_model_type(cls) -> type[BaseModel]: """Return the download model type for this user model.""" # There is no sensible default value here as there always exists a download # model. # All child classes must implement this function. raise NotImplementedError("Function does not exist for BaseUserModel") - def _repr_html_(self) -> Optional[str]: + def _repr_html_(self) -> str | None: """Return an HTML representation of the model if possible.""" from ._html_repr import user_model_html_repr @@ -181,7 +175,7 @@ def _repr_html_(self) -> Optional[str]: def construct( - model: Type[PydanticModelType], + model: type[PydanticModelType], *, _strict_validation: bool = True, _quiet: bool = False, @@ -229,7 +223,7 @@ def construct( return model.model_construct(**fields) -def validate_datetime(value: Optional[Union[str, datetime]]) -> Optional[datetime]: +def validate_datetime(value: str | datetime | None) -> datetime | None: """Convert strings to datetimes. This uses dateutil.parser.parse instead of Pydantic's builtin parser in order to @@ -247,13 +241,13 @@ def validate_drop(_: Any) -> None: return None -def validate_emails(value: Optional[str]) -> Optional[str]: +def validate_emails(value: str | None) -> str | None: if value is None: return value return ";".join(pydantic.validate_email(item)[1] for item in value.split(";")) -def validate_orcids(value: Optional[str]) -> Optional[str]: +def validate_orcids(value: str | None) -> str | None: if value is None: return value try: @@ -278,12 +272,12 @@ def convert_download_to_user_model(download_model: BaseModel) -> BaseUserModel: @overload def convert_download_to_user_model( download_model: Iterable[BaseModel], -) -> List[BaseUserModel]: ... +) -> list[BaseUserModel]: ... def convert_download_to_user_model( - download_model: Optional[Union[BaseModel, Iterable[BaseModel]]], -) -> Optional[Union[BaseUserModel, List[BaseUserModel]]]: + download_model: BaseModel | Iterable[BaseModel] | None, +) -> BaseUserModel | list[BaseUserModel] | None: """Construct user models from download models.""" if download_model is None: return download_model @@ -305,12 +299,12 @@ def convert_user_to_upload_model(user_model: BaseUserModel) -> BaseModel: ... @overload def convert_user_to_upload_model( user_model: Iterable[BaseUserModel], -) -> List[BaseModel]: ... +) -> list[BaseModel]: ... def convert_user_to_upload_model( - user_model: Optional[Union[BaseUserModel, Iterable[BaseUserModel]]], -) -> Optional[Union[BaseModel, List[BaseModel]]]: + user_model: BaseUserModel | Iterable[BaseUserModel] | None, +) -> BaseModel | list[BaseModel] | None: """Construct upload models from user models.""" if user_model is None: return None diff --git a/src/scitacean/_html_repr/__init__.py b/src/scitacean/_html_repr/__init__.py index 826dc8f4..a2fc78ae 100644 --- a/src/scitacean/_html_repr/__init__.py +++ b/src/scitacean/_html_repr/__init__.py @@ -4,8 +4,9 @@ from __future__ import annotations +from collections.abc import Callable from functools import lru_cache -from typing import TYPE_CHECKING, Any, Callable, Dict, Optional +from typing import TYPE_CHECKING, Any from ._attachment_html import attachment_html_repr from ._dataset_html import dataset_html_repr @@ -15,13 +16,13 @@ @lru_cache(maxsize=1) -def _user_model_reprs() -> Dict[type, Callable[[Any], str]]: +def _user_model_reprs() -> dict[type, Callable[[Any], str]]: from ..model import Attachment return {Attachment: attachment_html_repr} -def user_model_html_repr(user_model: BaseUserModel) -> Optional[str]: +def user_model_html_repr(user_model: BaseUserModel) -> str | None: """HTML representation of a user model f implemented.""" if (repr_fn := _user_model_reprs().get(type(user_model))) is not None: return repr_fn(user_model) diff --git a/src/scitacean/_html_repr/_attachment_html.py b/src/scitacean/_html_repr/_attachment_html.py index b856d8da..2b0931f9 100644 --- a/src/scitacean/_html_repr/_attachment_html.py +++ b/src/scitacean/_html_repr/_attachment_html.py @@ -5,7 +5,6 @@ from __future__ import annotations import dataclasses -from typing import List from ..model import Attachment, UploadAttachment from . import _resources @@ -50,7 +49,7 @@ def _format_field(field: Field) -> str: } -def _get_fields(attachment: Attachment) -> List[Field]: +def _get_fields(attachment: Attachment) -> list[Field]: fields = [ Field( name=_strip_leading_underscore(field.name), diff --git a/src/scitacean/_html_repr/_common_html.py b/src/scitacean/_html_repr/_common_html.py index 3d66e41b..3ad789e9 100644 --- a/src/scitacean/_html_repr/_common_html.py +++ b/src/scitacean/_html_repr/_common_html.py @@ -4,7 +4,7 @@ import html from datetime import datetime -from typing import Any, List, Optional +from typing import Any from .._internal.dataclass_wrapper import dataclass_optional_args from ..filesystem import RemotePath @@ -21,7 +21,7 @@ class Field: description: str read_only: bool required: bool - error: Optional[str] + error: str | None main: bool @@ -35,11 +35,11 @@ class Field: Lifecycle: "Lifecycle", PID: "PID", RemotePath: "RemotePath", - List[str]: "list[str]", - List[PID]: "list[PID]", - List[Relationship]: "list[Relationship]", - List[Technique]: "list[Technique]", - List[dict]: "list[dict]", # type: ignore[type-arg] + list[str]: "list[str]", + list[PID]: "list[PID]", + list[Relationship]: "list[Relationship]", + list[Technique]: "list[Technique]", + list[dict]: "list[dict]", # type: ignore[type-arg] } diff --git a/src/scitacean/_html_repr/_dataset_html.py b/src/scitacean/_html_repr/_dataset_html.py index ba4fb499..4e0659aa 100644 --- a/src/scitacean/_html_repr/_dataset_html.py +++ b/src/scitacean/_html_repr/_dataset_html.py @@ -3,7 +3,8 @@ """HTML representations of datasets for Jupyter.""" import html -from typing import Any, Dict, Iterable, List, Optional +from collections.abc import Iterable +from typing import Any import pydantic @@ -132,7 +133,7 @@ def _format_field(field: Field) -> str: } -def _get_fields(dset: Dataset) -> List[Field]: +def _get_fields(dset: Dataset) -> list[Field]: validation = _validate(dset) fields = [ Field( @@ -155,12 +156,12 @@ def _get_fields(dset: Dataset) -> List[Field]: ) -def _check_error(field: Dataset.Field, validation: Dict[str, str]) -> Optional[str]: +def _check_error(field: Dataset.Field, validation: dict[str, str]) -> str | None: field_spec = next(filter(lambda f: f.name == field.name, Dataset.fields())) return validation.get(field_spec.scicat_name, None) -def _validate(dset: Dataset) -> Dict[str, str]: +def _validate(dset: Dataset) -> dict[str, str]: def single_elem(xs: Iterable[Any]) -> Any: (x,) = xs return x diff --git a/src/scitacean/_html_repr/_resources.py b/src/scitacean/_html_repr/_resources.py index 06aaaf15..ae15e5f9 100644 --- a/src/scitacean/_html_repr/_resources.py +++ b/src/scitacean/_html_repr/_resources.py @@ -75,6 +75,6 @@ def attachment_style() -> str: return f"{common_style()}" -@lru_cache() +@lru_cache def image(name: str) -> str: return _read_text(name, "images") diff --git a/src/scitacean/_internal/dataclass_wrapper.py b/src/scitacean/_internal/dataclass_wrapper.py index f040ba5b..fc78b535 100644 --- a/src/scitacean/_internal/dataclass_wrapper.py +++ b/src/scitacean/_internal/dataclass_wrapper.py @@ -3,7 +3,8 @@ """Python version-independent dataclasses.""" import dataclasses -from typing import Any, Callable, Type, TypeVar +from collections.abc import Callable +from typing import Any, TypeVar T = TypeVar("T") @@ -11,8 +12,6 @@ try: from typing import dataclass_transform except ImportError: - from typing import Tuple, Union - F = TypeVar("F") def dataclass_transform( @@ -21,7 +20,7 @@ def dataclass_transform( order_default: bool = False, kw_only_default: bool = False, frozen_default: bool = False, - field_specifiers: Tuple[Union[Type[Any], Callable[..., Any]], ...] = (), + field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (), **kwargs: Any, ) -> Callable[[T], T]: def impl(f: F) -> F: @@ -33,7 +32,7 @@ def impl(f: F) -> F: @dataclass_transform() def dataclass_optional_args( kw_only: bool = False, slots: bool = False, **kwargs: Any -) -> Callable[[Type[T]], Type[T]]: +) -> Callable[[type[T]], type[T]]: """Create a dataclass with modern arguments.""" try: # Python 3.10+ diff --git a/src/scitacean/_internal/file_counter.py b/src/scitacean/_internal/file_counter.py index c888cbd9..10fee912 100644 --- a/src/scitacean/_internal/file_counter.py +++ b/src/scitacean/_internal/file_counter.py @@ -1,9 +1,9 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2024 SciCat Project (https://github.com/SciCatProject/scitacean) +from collections.abc import Generator from contextlib import contextmanager from pathlib import Path -from typing import Generator import filelock @@ -61,7 +61,7 @@ def decrement(self) -> Generator[int, None, None]: yield count def _read(self) -> int: - with open(self._path, "r") as f: + with open(self._path) as f: c = int(f.read()) return c diff --git a/src/scitacean/client.py b/src/scitacean/client.py index 3206ae77..1465fd80 100644 --- a/src/scitacean/client.py +++ b/src/scitacean/client.py @@ -8,9 +8,10 @@ import datetime import re import warnings +from collections.abc import Callable, Iterable, Iterator from contextlib import contextmanager from pathlib import Path -from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Tuple, Union +from typing import Any, Union from urllib.parse import quote_plus import requests @@ -45,7 +46,7 @@ def __init__( self, *, client: ScicatClient, - file_transfer: Optional[FileTransfer], + file_transfer: FileTransfer | None, ): """Initialize a client. @@ -60,8 +61,8 @@ def from_token( cls, *, url: str, - token: Union[str, StrStorage], - file_transfer: Optional[FileTransfer] = None, + token: str | StrStorage, + file_transfer: FileTransfer | None = None, ) -> Client: """Create a new client and authenticate with a token. @@ -90,9 +91,9 @@ def from_credentials( cls, *, url: str, - username: Union[str, StrStorage], - password: Union[str, StrStorage], - file_transfer: Optional[FileTransfer] = None, + username: str | StrStorage, + password: str | StrStorage, + file_transfer: FileTransfer | None = None, ) -> Client: """Create a new client and authenticate with username and password. @@ -122,7 +123,7 @@ def from_credentials( @classmethod def without_login( - cls, *, url: str, file_transfer: Optional[FileTransfer] = None + cls, *, url: str, file_transfer: FileTransfer | None = None ) -> Client: """Create a new client without authentication. @@ -154,13 +155,13 @@ def scicat(self) -> ScicatClient: return self._client @property - def file_transfer(self) -> Optional[FileTransfer]: + def file_transfer(self) -> FileTransfer | None: """Stored handler for file down-/uploads.""" return self._file_transfer def get_dataset( self, - pid: Union[str, PID], + pid: str | PID, strict_validation: bool = False, attachments: bool = False, ) -> Dataset: @@ -332,8 +333,8 @@ def upload_new_sample_now(self, sample: model.Sample) -> model.Sample: return model.Sample.from_download_model(finalized_model) def _upload_orig_datablocks( - self, orig_datablocks: Optional[List[model.UploadOrigDatablock]] - ) -> List[model.DownloadOrigDatablock]: + self, orig_datablocks: list[model.UploadOrigDatablock] | None + ) -> list[model.DownloadOrigDatablock]: if not orig_datablocks: return [] @@ -351,8 +352,8 @@ def _upload_orig_datablocks( ) from exc def _upload_attachments_for_dataset( - self, attachments: List[model.UploadAttachment], *, dataset_id: PID - ) -> List[model.DownloadAttachment]: + self, attachments: list[model.UploadAttachment], *, dataset_id: PID + ) -> list[model.DownloadAttachment]: try: return [ self.scicat.create_attachment_for_dataset( @@ -399,9 +400,9 @@ def download_files( self, dataset: Dataset, *, - target: Union[str, Path], + target: str | Path, select: FileSelector = True, - checksum_algorithm: Optional[str] = None, + checksum_algorithm: str | None = None, force: bool = False, ) -> Dataset: r"""Download files of a dataset. @@ -558,13 +559,13 @@ class ScicatClient: def __init__( self, url: str, - token: Optional[Union[str, StrStorage]], - timeout: Optional[datetime.timedelta], + token: str | StrStorage | None, + timeout: datetime.timedelta | None, ): # Need to add a final / self._base_url = url[:-1] if url.endswith("/") else url self._timeout = datetime.timedelta(seconds=10) if timeout is None else timeout - self._token: Optional[StrStorage] = ( + self._token: StrStorage | None = ( SecretStr(token) if isinstance(token, str) else token ) @@ -572,8 +573,8 @@ def __init__( def from_token( cls, url: str, - token: Union[str, StrStorage], - timeout: Optional[datetime.timedelta] = None, + token: str | StrStorage, + timeout: datetime.timedelta | None = None, ) -> ScicatClient: """Create a new low-level client and authenticate with a token. @@ -597,9 +598,9 @@ def from_token( def from_credentials( cls, url: str, - username: Union[str, StrStorage], - password: Union[str, StrStorage], - timeout: Optional[datetime.timedelta] = None, + username: str | StrStorage, + password: str | StrStorage, + timeout: datetime.timedelta | None = None, ) -> ScicatClient: """Create a new low-level client and authenticate with username and password. @@ -639,7 +640,7 @@ def from_credentials( @classmethod def without_login( - cls, url: str, timeout: Optional[datetime.timedelta] = None + cls, url: str, timeout: datetime.timedelta | None = None ) -> ScicatClient: """Create a new low-level client without authentication. @@ -704,7 +705,7 @@ def get_dataset_model( def get_orig_datablocks( self, pid: PID, strict_validation: bool = False - ) -> List[model.DownloadOrigDatablock]: + ) -> list[model.DownloadOrigDatablock]: """Fetch all orig datablocks from SciCat for a given dataset. Parameters @@ -740,7 +741,7 @@ def get_orig_datablocks( def get_attachments_for_dataset( self, pid: PID, strict_validation: bool = False - ) -> List[model.DownloadAttachment]: + ) -> list[model.DownloadAttachment]: """Fetch all attachments from SciCat for a given dataset. Parameters @@ -820,7 +821,7 @@ def get_sample_model( ) def create_dataset_model( - self, dset: Union[model.UploadDerivedDataset, model.UploadRawDataset] + self, dset: model.UploadDerivedDataset | model.UploadRawDataset ) -> model.DownloadDataset: """Create a new dataset in SciCat. @@ -975,7 +976,7 @@ def create_sample_model(self, sample: model.UploadSample) -> model.DownloadSampl ) def validate_dataset_model( - self, dset: Union[model.UploadDerivedDataset, model.UploadRawDataset] + self, dset: model.UploadDerivedDataset | model.UploadRawDataset ) -> None: """Validate a dataset in SciCat. @@ -999,7 +1000,7 @@ def validate_dataset_model( raise ValueError(f"Dataset {dset} did not pass validation in SciCat.") def _send_to_scicat( - self, *, cmd: str, url: str, data: Optional[model.BaseModel] = None + self, *, cmd: str, url: str, data: model.BaseModel | None = None ) -> requests.Response: if self._token is not None: token = self._token.get_str() @@ -1038,7 +1039,7 @@ def _call_endpoint( *, cmd: str, url: str, - data: Optional[model.BaseModel] = None, + data: model.BaseModel | None = None, operation: str, ) -> Any: full_url = _url_concat(self._base_url, url) @@ -1080,7 +1081,7 @@ def _strip_token(error: Any, token: str) -> str: def _make_orig_datablock( - fields: Dict[str, Any], strict_validation: bool + fields: dict[str, Any], strict_validation: bool ) -> model.DownloadOrigDatablock: files = [ model.construct( @@ -1162,7 +1163,7 @@ def _get_token( FileSelector = Union[ - bool, str, List[str], Tuple[str], re.Pattern[str], Callable[[File], bool] + bool, str, list[str], tuple[str], re.Pattern[str], Callable[[File], bool] ] @@ -1180,14 +1181,14 @@ def _file_selector(select: FileSelector) -> Callable[[File], bool]: return select -def _select_files(select: FileSelector, dataset: Dataset) -> List[File]: +def _select_files(select: FileSelector, dataset: Dataset) -> list[File]: selector = _file_selector(select) return [f for f in dataset.files if selector(f)] def _remove_up_to_date_local_files( - files: List[File], checksum_algorithm: Optional[str] -) -> List[File]: + files: list[File], checksum_algorithm: str | None +) -> list[File]: def is_up_to_date(file: File) -> bool: if checksum_algorithm is not None: file = dataclasses.replace(file, checksum_algorithm=checksum_algorithm) @@ -1202,7 +1203,7 @@ def is_up_to_date(file: File) -> bool: ] -def _files_to_upload(files: Iterable[File]) -> List[File]: +def _files_to_upload(files: Iterable[File]) -> list[File]: for file in files: if file.is_on_local and file.is_on_remote: raise ValueError( @@ -1220,7 +1221,7 @@ class _NullUploadConnection: when there are no files to upload. """ - def upload_files(self, *files: File) -> List[File]: + def upload_files(self, *files: File) -> list[File]: """Raise if given files.""" if files: raise RuntimeError("Internal error: Bad upload connection") diff --git a/src/scitacean/datablock.py b/src/scitacean/datablock.py index c2299c44..8792c609 100644 --- a/src/scitacean/datablock.py +++ b/src/scitacean/datablock.py @@ -6,8 +6,9 @@ from __future__ import annotations import dataclasses +from collections.abc import Iterable, Iterator from datetime import datetime -from typing import TYPE_CHECKING, Iterable, Iterator, List, Optional +from typing import TYPE_CHECKING from .file import File from .model import DownloadOrigDatablock, UploadOrigDatablock @@ -29,21 +30,21 @@ class OrigDatablock: models for communication with a server. """ - _files: List[File] = dataclasses.field(init=False) - checksum_algorithm: Optional[str] = None - instrument_group: Optional[str] = None - owner_group: Optional[str] = None - init_files: dataclasses.InitVar[Optional[Iterable[File]]] = None - _access_groups: Optional[List[str]] = None - _created_at: Optional[datetime] = None - _created_by: Optional[str] = None - _dataset_id: Optional[PID] = None - _id: Optional[str] = None - _is_published: Optional[bool] = None - _updated_at: Optional[datetime] = None - _updated_by: Optional[str] = None - - def __post_init__(self, init_files: Optional[Iterable[File]]) -> None: + _files: list[File] = dataclasses.field(init=False) + checksum_algorithm: str | None = None + instrument_group: str | None = None + owner_group: str | None = None + init_files: dataclasses.InitVar[Iterable[File] | None] = None + _access_groups: list[str] | None = None + _created_at: datetime | None = None + _created_by: str | None = None + _dataset_id: PID | None = None + _id: str | None = None + _is_published: bool | None = None + _updated_at: datetime | None = None + _updated_by: str | None = None + + def __post_init__(self, init_files: Iterable[File] | None) -> None: self._files = list(init_files) if init_files is not None else [] @classmethod @@ -93,42 +94,42 @@ def size(self) -> int: return sum(file.size for file in self.files) @property - def access_groups(self) -> Optional[List[str]]: + def access_groups(self) -> list[str] | None: """Access groups for this datablock.""" return self._access_groups @property - def created_at(self) -> Optional[datetime]: + def created_at(self) -> datetime | None: """Creation time of this orig datablock.""" return self._created_at @property - def created_by(self) -> Optional[str]: + def created_by(self) -> str | None: """User who created this orig datablock.""" return self._created_by @property - def updated_at(self) -> Optional[datetime]: + def updated_at(self) -> datetime | None: """Last update time of this orig datablock.""" return self._updated_at @property - def updated_by(self) -> Optional[str]: + def updated_by(self) -> str | None: """User who last updated this datablock.""" return self._updated_by @property - def dataset_id(self) -> Optional[PID]: + def dataset_id(self) -> PID | None: """PID of the dataset this datablock belongs to.""" return self._dataset_id @property - def datablock_id(self) -> Optional[str]: + def datablock_id(self) -> str | None: """ID of this datablock.""" return self._id @property - def is_published(self) -> Optional[bool]: + def is_published(self) -> bool | None: """Return whether the datablock is public on SciCat.""" return self._is_published diff --git a/src/scitacean/dataset.py b/src/scitacean/dataset.py index c8a32769..a63c7773 100644 --- a/src/scitacean/dataset.py +++ b/src/scitacean/dataset.py @@ -6,19 +6,12 @@ import dataclasses import itertools +from collections.abc import Generator, Iterable from datetime import datetime, timezone from pathlib import Path from typing import ( Any, - Dict, - Generator, - Iterable, - List, Literal, - Optional, - Tuple, - Type, - Union, ) from ._base_model import convert_download_to_user_model, convert_user_to_upload_model @@ -46,8 +39,8 @@ class Dataset(DatasetBase): def from_download_models( cls, dataset_model: DownloadDataset, - orig_datablock_models: List[DownloadOrigDatablock], - attachment_models: Optional[Iterable[DownloadAttachment]] = None, + orig_datablock_models: list[DownloadOrigDatablock], + attachment_models: Iterable[DownloadAttachment] | None = None, ) -> Dataset: """Construct a new dataset from SciCat download models. @@ -83,8 +76,8 @@ def from_download_models( @classmethod def fields( cls, - dataset_type: Optional[Union[DatasetType, Literal["derived", "raw"]]] = None, - read_only: Optional[bool] = None, + dataset_type: DatasetType | Literal["derived", "raw"] | None = None, + read_only: bool | None = None, ) -> Generator[Dataset.Field, None, None]: """Iterate over dataset fields. @@ -179,7 +172,7 @@ def size(self) -> int: return sum(file.size for file in self.files) @property - def files(self) -> Tuple[File, ...]: + def files(self) -> tuple[File, ...]: """Files linked with the dataset.""" return tuple( itertools.chain.from_iterable( @@ -188,7 +181,7 @@ def files(self) -> Tuple[File, ...]: ) @property - def attachments(self) -> Optional[List[Attachment]]: + def attachments(self) -> list[Attachment] | None: """List of attachments for this dataset. This property can be in two distinct 'falsy' states: @@ -202,7 +195,7 @@ def attachments(self) -> Optional[List[Attachment]]: return self._attachments @attachments.setter - def attachments(self, attachments: Optional[List[Attachment]]) -> None: + def attachments(self, attachments: list[Attachment] | None) -> None: """List of attachments for this dataset. See the docs of the getter for an explanation of ``None`` vs ``[]``. @@ -210,15 +203,15 @@ def attachments(self, attachments: Optional[List[Attachment]]) -> None: """ self._attachments = attachments - def add_files(self, *files: File, datablock: Union[int, str, PID] = -1) -> None: + def add_files(self, *files: File, datablock: int | str | PID = -1) -> None: """Add files to the dataset.""" self._get_or_add_orig_datablock(datablock).add_files(*files) def add_local_files( self, - *paths: Union[str, Path], - base_path: Union[str, Path] = "", - datablock: Union[int, str] = -1, + *paths: str | Path, + base_path: str | Path = "", + datablock: int | str = -1, ) -> None: """Add files on the local file system to the dataset. @@ -246,8 +239,8 @@ def add_local_files( def replace( self, *, - _read_only: Optional[Dict[str, Any]] = None, - _orig_datablocks: Optional[List[OrigDatablock]] = None, + _read_only: dict[str, Any] | None = None, + _orig_datablocks: list[OrigDatablock] | None = None, **replacements: Any, ) -> Dataset: """Return a new dataset with replaced fields. @@ -268,7 +261,7 @@ def replace( """ _read_only = _read_only or {} - def get_val(source: Dict[str, Any], name: str) -> Any: + def get_val(source: dict[str, Any], name: str) -> Any: try: return source.pop(name) except KeyError: @@ -399,7 +392,7 @@ def new_or_old(old: File) -> File: ] ) - def add_orig_datablock(self, *, checksum_algorithm: Optional[str]) -> OrigDatablock: + def add_orig_datablock(self, *, checksum_algorithm: str | None) -> OrigDatablock: """Append a new orig datablock to the list of orig datablocks. Parameters @@ -425,7 +418,7 @@ def _lookup_orig_datablock(self, id_: str) -> OrigDatablock: except StopIteration: raise KeyError(f"No OrigDatablock with id {id_}") from None - def _get_or_add_orig_datablock(self, key: Union[int, str, PID]) -> OrigDatablock: + def _get_or_add_orig_datablock(self, key: int | str | PID) -> OrigDatablock: if isinstance(key, PID): key = str(PID) if isinstance(key, str): @@ -437,9 +430,9 @@ def _get_or_add_orig_datablock(self, key: Union[int, str, PID]) -> OrigDatablock ) return self._orig_datablocks[key] - def make_upload_model(self) -> Union[UploadDerivedDataset, UploadRawDataset]: + def make_upload_model(self) -> UploadDerivedDataset | UploadRawDataset: """Construct a SciCat upload model from self.""" - model: Union[Type[UploadRawDataset], Type[UploadDerivedDataset]] = ( + model: type[UploadRawDataset] | type[UploadDerivedDataset] = ( UploadRawDataset if self.type == DatasetType.RAW else UploadDerivedDataset ) # Datablocks are not included here because they are handled separately @@ -481,7 +474,7 @@ def make_datablock_upload_models(self) -> DatablockUploadModels: ] ) - def make_attachment_upload_models(self) -> List[UploadAttachment]: + def make_attachment_upload_models(self) -> list[UploadAttachment]: """Build models for all registered attachments. Raises @@ -524,8 +517,8 @@ def keys(self) -> Iterable[str]: """ from itertools import chain - all_fields = set((field.name for field in self.fields())) - my_fields = set((field.name for field in self.fields(dataset_type=self.type))) + all_fields = set(field.name for field in self.fields()) + my_fields = set(field.name for field in self.fields(dataset_type=self.type)) other_fields = all_fields - my_fields invalid_fields = ( f_name for f_name in other_fields if getattr(self, f_name) is not None @@ -637,5 +630,5 @@ class DatablockUploadModels: # TODO # datablocks: Optional[List[UploadDatablock]] - orig_datablocks: Optional[List[UploadOrigDatablock]] + orig_datablocks: list[UploadOrigDatablock] | None """Orig datablocks""" diff --git a/src/scitacean/file.py b/src/scitacean/file.py index ef2ff33d..952075a2 100644 --- a/src/scitacean/file.py +++ b/src/scitacean/file.py @@ -8,7 +8,7 @@ import warnings from datetime import datetime, timezone from pathlib import Path -from typing import NoReturn, Optional, Union, cast +from typing import NoReturn, cast import dateutil.parser @@ -43,37 +43,35 @@ class File: └───────────> local+remote <───────────┘ """ - local_path: Optional[Path] + local_path: Path | None """Path to the file on the local filesystem.""" remote_path: RemotePath """Path to the file on the remote filesystem.""" - remote_gid: Optional[str] + remote_gid: str | None """Unix group ID on remote.""" - remote_perm: Optional[str] + remote_perm: str | None """Unix file mode on remote.""" - remote_uid: Optional[str] + remote_uid: str | None """Unix user ID on remote.""" - checksum_algorithm: Optional[str] = None + checksum_algorithm: str | None = None """Algorithm to use for checksums.""" - _remote_size: Optional[int] = dataclasses.field(default=None, repr=False) - _remote_creation_time: Optional[datetime] = dataclasses.field( - default=None, repr=False - ) - _remote_checksum: Optional[str] = dataclasses.field(default=None, repr=False) - _checksum_cache: Optional[_Checksum] = dataclasses.field( + _remote_size: int | None = dataclasses.field(default=None, repr=False) + _remote_creation_time: datetime | None = dataclasses.field(default=None, repr=False) + _remote_checksum: str | None = dataclasses.field(default=None, repr=False) + _checksum_cache: _Checksum | None = dataclasses.field( default=None, compare=False, repr=False ) @classmethod def from_local( cls, - path: Union[str, Path], + path: str | Path, *, - base_path: Union[str, Path] = "", - remote_path: Optional[Union[str, RemotePath]] = None, - remote_uid: Optional[str] = None, - remote_gid: Optional[str] = None, - remote_perm: Optional[str] = None, + base_path: str | Path = "", + remote_path: str | RemotePath | None = None, + remote_uid: str | None = None, + remote_gid: str | None = None, + remote_perm: str | None = None, ) -> File: """Link a file on the local filesystem. @@ -126,14 +124,14 @@ def from_local( @classmethod def from_remote( cls, - remote_path: Union[str, RemotePath], + remote_path: str | RemotePath, size: int, - creation_time: Union[datetime, str], - checksum: Optional[str] = None, - checksum_algorithm: Optional[str] = None, - remote_uid: Optional[str] = None, - remote_gid: Optional[str] = None, - remote_perm: Optional[str] = None, + creation_time: datetime | str, + checksum: str | None = None, + checksum_algorithm: str | None = None, + remote_uid: str | None = None, + remote_gid: str | None = None, + remote_perm: str | None = None, ) -> File: """Construct a new file object for a remote file. @@ -195,8 +193,8 @@ def from_download_model( cls, model: DownloadDataFile, *, - checksum_algorithm: Optional[str] = None, - local_path: Optional[Union[str, Path]] = None, + checksum_algorithm: str | None = None, + local_path: str | Path | None = None, ) -> File: """Construct a new file object from a SciCat download model. @@ -248,7 +246,7 @@ def creation_time(self) -> datetime: return file_modification_time(cast(Path, self.local_path)) return self._remote_creation_time # type: ignore[return-value] - def checksum(self) -> Optional[str]: + def checksum(self) -> str | None: """Return the checksum of the file. This can take a long time to compute for large files. @@ -270,9 +268,7 @@ def checksum(self) -> Optional[str]: algorithm=self.checksum_algorithm, ) - def remote_access_path( - self, source_folder: Union[RemotePath, str] - ) -> Optional[RemotePath]: + def remote_access_path(self, source_folder: RemotePath | str) -> RemotePath | None: """Full path to the file on the remote if it exists.""" return (source_folder / self.remote_path) if self.is_on_remote else None @@ -348,12 +344,12 @@ def make_model(self, *, for_archive: bool = False) -> UploadDataFile: def uploaded( self, *, - remote_path: Optional[Union[str, RemotePath]] = None, - remote_uid: Optional[str] = None, - remote_gid: Optional[str] = None, - remote_perm: Optional[str] = None, - remote_creation_time: Optional[datetime] = None, - remote_size: Optional[int] = None, + remote_path: str | RemotePath | None = None, + remote_uid: str | None = None, + remote_gid: str | None = None, + remote_perm: str | None = None, + remote_creation_time: datetime | None = None, + remote_size: int | None = None, ) -> File: """Return new file metadata after an upload. @@ -397,7 +393,7 @@ def uploaded( **{key: val for key, val in args.items() if val is not None}, # type: ignore[arg-type] ) - def downloaded(self, *, local_path: Union[str, Path]) -> File: + def downloaded(self, *, local_path: str | Path) -> File: """Return new file metadata after a download. Assumes that the input file exists on remote. @@ -480,10 +476,10 @@ class _Checksum: """Compute and cache the checksum of a file.""" def __init__(self) -> None: - self._value: Optional[str] = None - self._path: Optional[Path] = None - self._algorithm: Optional[str] = None - self._access_time: Optional[datetime] = None + self._value: str | None = None + self._path: Path | None = None + self._algorithm: str | None = None + self._access_time: datetime | None = None def get(self, *, path: Path, algorithm: str) -> str: if self._is_out_of_date(path=path, algorithm=algorithm): diff --git a/src/scitacean/filesystem.py b/src/scitacean/filesystem.py index 08fd7f4d..f3243a3e 100644 --- a/src/scitacean/filesystem.py +++ b/src/scitacean/filesystem.py @@ -16,7 +16,7 @@ import re from datetime import datetime, timezone from pathlib import Path, PurePath -from typing import Any, Optional, TypeVar, Union +from typing import Any, TypeVar from pydantic import GetCoreSchemaHandler from pydantic_core import core_schema @@ -36,7 +36,7 @@ class RemotePath: the two should almost never be mixed. """ - def __init__(self, *path_segments: Union[str, RemotePath]) -> None: + def __init__(self, *path_segments: str | RemotePath) -> None: """Initialize from given path segments.""" for segment in path_segments: if isinstance(segment, (PurePath, Path)): # type: ignore[unreachable] @@ -66,7 +66,7 @@ def to_local(self) -> PurePath: segments = ["/"] + segments[1:] return PurePath(*segments) - def __truediv__(self, other: Union[str, RemotePath]) -> RemotePath: + def __truediv__(self, other: str | RemotePath) -> RemotePath: """Join two path segments.""" if isinstance(other, (PurePath, Path)): # type: ignore[unreachable] raise TypeError("OS paths are not supported when concatenating RemotePath.") @@ -107,7 +107,7 @@ def name(self) -> str: return self._path.rstrip("/").rsplit("/", 1)[-1] @property - def suffix(self) -> Optional[str]: + def suffix(self) -> str | None: """The file extension including a leading period.""" parts = self.name.rsplit(".", 1) if len(parts) == 1: @@ -152,7 +152,7 @@ def trunc(seg: str) -> str: return RemotePath("/".join(map(trunc, self._path.split("/")))) @classmethod - def validate(cls, value: Union[str, RemotePath]) -> RemotePath: + def validate(cls, value: str | RemotePath) -> RemotePath: """Pydantic validator for RemotePath fields.""" return RemotePath(value) @@ -173,7 +173,7 @@ def __get_pydantic_core_schema__( ) -def _posix(path: Union[str, RemotePath]) -> str: +def _posix(path: str | RemotePath) -> str: return path.posix if isinstance(path, RemotePath) else path @@ -200,7 +200,7 @@ def _new_hash(algorithm: str) -> Any: # size based on http://git.savannah.gnu.org/gitweb/?p=coreutils.git;a=blob;f=src/ioblksize.h;h=ed2f4a9c4d77462f357353eb73ee4306c28b37f1;hb=HEAD#l23 # noqa: E501 -def checksum_of_file(path: Union[str, Path], *, algorithm: str) -> str: +def checksum_of_file(path: str | Path, *, algorithm: str) -> str: """Compute the checksum of a local file. Parameters @@ -223,7 +223,7 @@ def checksum_of_file(path: Union[str, Path], *, algorithm: str) -> str: return chk.hexdigest() # type: ignore[no-any-return] -P = TypeVar("P", bound=Union[str, Path, RemotePath]) +P = TypeVar("P", bound=str | Path | RemotePath) def escape_path(path: P) -> P: diff --git a/src/scitacean/model.py b/src/scitacean/model.py index 5ade5255..d500972c 100644 --- a/src/scitacean/model.py +++ b/src/scitacean/model.py @@ -81,7 +81,7 @@ from __future__ import annotations from datetime import datetime -from typing import Any, Dict, List, Optional, Type +from typing import Any import pydantic from pydantic import NonNegativeInt @@ -105,54 +105,54 @@ class DownloadDataset( BaseModel, masked=("attachments", "datablocks", "origdatablocks") ): - contactEmail: Optional[str] = None - creationLocation: Optional[str] = None - creationTime: Optional[datetime] = None - inputDatasets: Optional[List[PID]] = None - investigator: Optional[str] = None - numberOfFilesArchived: Optional[NonNegativeInt] = None - owner: Optional[str] = None - ownerGroup: Optional[str] = None - principalInvestigator: Optional[str] = None - sourceFolder: Optional[RemotePath] = None - type: Optional[DatasetType] = None - usedSoftware: Optional[List[str]] = None - accessGroups: Optional[List[str]] = None - version: Optional[str] = None - classification: Optional[str] = None - comment: Optional[str] = None - createdAt: Optional[datetime] = None - createdBy: Optional[str] = None - dataFormat: Optional[str] = None - dataQualityMetrics: Optional[int] = None - description: Optional[str] = None - endTime: Optional[datetime] = None - history: Optional[None] = None - instrumentGroup: Optional[str] = None - instrumentId: Optional[str] = None - isPublished: Optional[bool] = None - jobLogData: Optional[str] = None - jobParameters: Optional[Dict[str, Any]] = None - keywords: Optional[List[str]] = None - license: Optional[str] = None - datasetlifecycle: Optional[DownloadLifecycle] = None - scientificMetadata: Optional[Dict[str, Any]] = None - datasetName: Optional[str] = None - numberOfFiles: Optional[NonNegativeInt] = None - orcidOfOwner: Optional[str] = None - ownerEmail: Optional[str] = None - packedSize: Optional[NonNegativeInt] = None - pid: Optional[PID] = None - proposalId: Optional[str] = None - relationships: Optional[List[DownloadRelationship]] = None - sampleId: Optional[str] = None - sharedWith: Optional[List[str]] = None - size: Optional[NonNegativeInt] = None - sourceFolderHost: Optional[str] = None - techniques: Optional[List[DownloadTechnique]] = None - updatedAt: Optional[datetime] = None - updatedBy: Optional[str] = None - validationStatus: Optional[str] = None + contactEmail: str | None = None + creationLocation: str | None = None + creationTime: datetime | None = None + inputDatasets: list[PID] | None = None + investigator: str | None = None + numberOfFilesArchived: NonNegativeInt | None = None + owner: str | None = None + ownerGroup: str | None = None + principalInvestigator: str | None = None + sourceFolder: RemotePath | None = None + type: DatasetType | None = None + usedSoftware: list[str] | None = None + accessGroups: list[str] | None = None + version: str | None = None + classification: str | None = None + comment: str | None = None + createdAt: datetime | None = None + createdBy: str | None = None + dataFormat: str | None = None + dataQualityMetrics: int | None = None + description: str | None = None + endTime: datetime | None = None + history: None | None = None + instrumentGroup: str | None = None + instrumentId: str | None = None + isPublished: bool | None = None + jobLogData: str | None = None + jobParameters: dict[str, Any] | None = None + keywords: list[str] | None = None + license: str | None = None + datasetlifecycle: DownloadLifecycle | None = None + scientificMetadata: dict[str, Any] | None = None + datasetName: str | None = None + numberOfFiles: NonNegativeInt | None = None + orcidOfOwner: str | None = None + ownerEmail: str | None = None + packedSize: NonNegativeInt | None = None + pid: PID | None = None + proposalId: str | None = None + relationships: list[DownloadRelationship] | None = None + sampleId: str | None = None + sharedWith: list[str] | None = None + size: NonNegativeInt | None = None + sourceFolderHost: str | None = None + techniques: list[DownloadTechnique] | None = None + updatedAt: datetime | None = None + updatedBy: str | None = None + validationStatus: str | None = None @pydantic.field_validator( "creationTime", "createdAt", "endTime", "updatedAt", mode="before" @@ -176,37 +176,37 @@ def _validate_orcids(cls, value: Any) -> Any: class UploadDerivedDataset(BaseModel): contactEmail: str creationTime: datetime - inputDatasets: List[PID] + inputDatasets: list[PID] investigator: str numberOfFilesArchived: NonNegativeInt owner: str ownerGroup: str sourceFolder: RemotePath type: DatasetType - usedSoftware: List[str] - accessGroups: Optional[List[str]] = None - classification: Optional[str] = None - comment: Optional[str] = None - dataQualityMetrics: Optional[int] = None - description: Optional[str] = None - instrumentGroup: Optional[str] = None - isPublished: Optional[bool] = None - jobLogData: Optional[str] = None - jobParameters: Optional[Dict[str, Any]] = None - keywords: Optional[List[str]] = None - license: Optional[str] = None - scientificMetadata: Optional[Dict[str, Any]] = None - datasetName: Optional[str] = None - numberOfFiles: Optional[NonNegativeInt] = None - orcidOfOwner: Optional[str] = None - ownerEmail: Optional[str] = None - packedSize: Optional[NonNegativeInt] = None - relationships: Optional[List[UploadRelationship]] = None - sharedWith: Optional[List[str]] = None - size: Optional[NonNegativeInt] = None - sourceFolderHost: Optional[str] = None - techniques: Optional[List[UploadTechnique]] = None - validationStatus: Optional[str] = None + usedSoftware: list[str] + accessGroups: list[str] | None = None + classification: str | None = None + comment: str | None = None + dataQualityMetrics: int | None = None + description: str | None = None + instrumentGroup: str | None = None + isPublished: bool | None = None + jobLogData: str | None = None + jobParameters: dict[str, Any] | None = None + keywords: list[str] | None = None + license: str | None = None + scientificMetadata: dict[str, Any] | None = None + datasetName: str | None = None + numberOfFiles: NonNegativeInt | None = None + orcidOfOwner: str | None = None + ownerEmail: str | None = None + packedSize: NonNegativeInt | None = None + relationships: list[UploadRelationship] | None = None + sharedWith: list[str] | None = None + size: NonNegativeInt | None = None + sourceFolderHost: str | None = None + techniques: list[UploadTechnique] | None = None + validationStatus: str | None = None @pydantic.field_validator("creationTime", mode="before") def _validate_datetime(cls, value: Any) -> Any: @@ -231,32 +231,32 @@ class UploadRawDataset(BaseModel): principalInvestigator: str sourceFolder: RemotePath type: DatasetType - accessGroups: Optional[List[str]] = None - classification: Optional[str] = None - comment: Optional[str] = None - dataFormat: Optional[str] = None - dataQualityMetrics: Optional[int] = None - description: Optional[str] = None - endTime: Optional[datetime] = None - instrumentGroup: Optional[str] = None - instrumentId: Optional[str] = None - isPublished: Optional[bool] = None - keywords: Optional[List[str]] = None - license: Optional[str] = None - scientificMetadata: Optional[Dict[str, Any]] = None - datasetName: Optional[str] = None - numberOfFiles: Optional[NonNegativeInt] = None - orcidOfOwner: Optional[str] = None - ownerEmail: Optional[str] = None - packedSize: Optional[NonNegativeInt] = None - proposalId: Optional[str] = None - relationships: Optional[List[UploadRelationship]] = None - sampleId: Optional[str] = None - sharedWith: Optional[List[str]] = None - size: Optional[NonNegativeInt] = None - sourceFolderHost: Optional[str] = None - techniques: Optional[List[UploadTechnique]] = None - validationStatus: Optional[str] = None + accessGroups: list[str] | None = None + classification: str | None = None + comment: str | None = None + dataFormat: str | None = None + dataQualityMetrics: int | None = None + description: str | None = None + endTime: datetime | None = None + instrumentGroup: str | None = None + instrumentId: str | None = None + isPublished: bool | None = None + keywords: list[str] | None = None + license: str | None = None + scientificMetadata: dict[str, Any] | None = None + datasetName: str | None = None + numberOfFiles: NonNegativeInt | None = None + orcidOfOwner: str | None = None + ownerEmail: str | None = None + packedSize: NonNegativeInt | None = None + proposalId: str | None = None + relationships: list[UploadRelationship] | None = None + sampleId: str | None = None + sharedWith: list[str] | None = None + size: NonNegativeInt | None = None + sourceFolderHost: str | None = None + techniques: list[UploadTechnique] | None = None + validationStatus: str | None = None @pydantic.field_validator("creationTime", "endTime", mode="before") def _validate_datetime(cls, value: Any) -> Any: @@ -272,147 +272,147 @@ def _validate_orcids(cls, value: Any) -> Any: class DownloadAttachment(BaseModel): - caption: Optional[str] = None - ownerGroup: Optional[str] = None - accessGroups: Optional[List[str]] = None - createdAt: Optional[datetime] = None - createdBy: Optional[str] = None - datasetId: Optional[PID] = None - id: Optional[str] = None - instrumentGroup: Optional[str] = None - isPublished: Optional[bool] = None - proposalId: Optional[str] = None - sampleId: Optional[str] = None - thumbnail: Optional[Thumbnail] = None - updatedAt: Optional[datetime] = None - updatedBy: Optional[str] = None + caption: str | None = None + ownerGroup: str | None = None + accessGroups: list[str] | None = None + createdAt: datetime | None = None + createdBy: str | None = None + datasetId: PID | None = None + id: str | None = None + instrumentGroup: str | None = None + isPublished: bool | None = None + proposalId: str | None = None + sampleId: str | None = None + thumbnail: Thumbnail | None = None + updatedAt: datetime | None = None + updatedBy: str | None = None @pydantic.field_validator("createdAt", "updatedAt", mode="before") def _validate_datetime(cls, value: Any) -> Any: return validate_datetime(value) @classmethod - def user_model_type(cls) -> Type[Attachment]: + def user_model_type(cls) -> type[Attachment]: return Attachment @classmethod - def upload_model_type(cls) -> Type[UploadAttachment]: + def upload_model_type(cls) -> type[UploadAttachment]: return UploadAttachment class UploadAttachment(BaseModel): caption: str ownerGroup: str - accessGroups: Optional[List[str]] = None - datasetId: Optional[PID] = None - id: Optional[str] = None - instrumentGroup: Optional[str] = None - proposalId: Optional[str] = None - sampleId: Optional[str] = None - thumbnail: Optional[Thumbnail] = None + accessGroups: list[str] | None = None + datasetId: PID | None = None + id: str | None = None + instrumentGroup: str | None = None + proposalId: str | None = None + sampleId: str | None = None + thumbnail: Thumbnail | None = None @classmethod - def user_model_type(cls) -> Type[Attachment]: + def user_model_type(cls) -> type[Attachment]: return Attachment @classmethod - def download_model_type(cls) -> Type[DownloadAttachment]: + def download_model_type(cls) -> type[DownloadAttachment]: return DownloadAttachment class DownloadOrigDatablock(BaseModel): - dataFileList: Optional[List[DownloadDataFile]] = None - datasetId: Optional[PID] = None - size: Optional[NonNegativeInt] = None - id: Optional[str] = pydantic.Field(alias="_id", default=None) - accessGroups: Optional[List[str]] = None - chkAlg: Optional[str] = None - createdAt: Optional[datetime] = None - createdBy: Optional[str] = None - instrumentGroup: Optional[str] = None - isPublished: Optional[bool] = None - ownerGroup: Optional[str] = None - updatedAt: Optional[datetime] = None - updatedBy: Optional[str] = None + dataFileList: list[DownloadDataFile] | None = None + datasetId: PID | None = None + size: NonNegativeInt | None = None + id: str | None = pydantic.Field(alias="_id", default=None) + accessGroups: list[str] | None = None + chkAlg: str | None = None + createdAt: datetime | None = None + createdBy: str | None = None + instrumentGroup: str | None = None + isPublished: bool | None = None + ownerGroup: str | None = None + updatedAt: datetime | None = None + updatedBy: str | None = None @pydantic.field_validator("createdAt", "updatedAt", mode="before") def _validate_datetime(cls, value: Any) -> Any: return validate_datetime(value) @classmethod - def upload_model_type(cls) -> Type[UploadOrigDatablock]: + def upload_model_type(cls) -> type[UploadOrigDatablock]: return UploadOrigDatablock class UploadOrigDatablock(BaseModel): - dataFileList: List[UploadDataFile] + dataFileList: list[UploadDataFile] datasetId: PID size: NonNegativeInt - accessGroups: Optional[List[str]] = None - chkAlg: Optional[str] = None - instrumentGroup: Optional[str] = None - ownerGroup: Optional[str] = None + accessGroups: list[str] | None = None + chkAlg: str | None = None + instrumentGroup: str | None = None + ownerGroup: str | None = None @classmethod - def download_model_type(cls) -> Type[DownloadOrigDatablock]: + def download_model_type(cls) -> type[DownloadOrigDatablock]: return DownloadOrigDatablock class DownloadDatablock(BaseModel): - archiveId: Optional[str] = None - dataFileList: Optional[List[DownloadDataFile]] = None - packedSize: Optional[NonNegativeInt] = None - size: Optional[NonNegativeInt] = None - version: Optional[str] = None - id: Optional[str] = pydantic.Field(alias="_id", default=None) - accessGroups: Optional[List[str]] = None - chkAlg: Optional[str] = None - createdAt: Optional[datetime] = None - createdBy: Optional[str] = None - datasetId: Optional[PID] = None - instrumentGroup: Optional[str] = None - isPublished: Optional[bool] = None - ownerGroup: Optional[str] = None - updatedAt: Optional[datetime] = None - updatedBy: Optional[str] = None + archiveId: str | None = None + dataFileList: list[DownloadDataFile] | None = None + packedSize: NonNegativeInt | None = None + size: NonNegativeInt | None = None + version: str | None = None + id: str | None = pydantic.Field(alias="_id", default=None) + accessGroups: list[str] | None = None + chkAlg: str | None = None + createdAt: datetime | None = None + createdBy: str | None = None + datasetId: PID | None = None + instrumentGroup: str | None = None + isPublished: bool | None = None + ownerGroup: str | None = None + updatedAt: datetime | None = None + updatedBy: str | None = None @pydantic.field_validator("createdAt", "updatedAt", mode="before") def _validate_datetime(cls, value: Any) -> Any: return validate_datetime(value) @classmethod - def upload_model_type(cls) -> Type[UploadDatablock]: + def upload_model_type(cls) -> type[UploadDatablock]: return UploadDatablock class UploadDatablock(BaseModel): archiveId: str - dataFileList: List[UploadDataFile] + dataFileList: list[UploadDataFile] packedSize: NonNegativeInt size: NonNegativeInt version: str - chkAlg: Optional[str] = None + chkAlg: str | None = None @classmethod - def download_model_type(cls) -> Type[DownloadDatablock]: + def download_model_type(cls) -> type[DownloadDatablock]: return DownloadDatablock class DownloadLifecycle(BaseModel): - archivable: Optional[bool] = None - archiveRetentionTime: Optional[datetime] = None - archiveReturnMessage: Optional[Dict[str, Any]] = None - archiveStatusMessage: Optional[str] = None - dateOfDiskPurging: Optional[datetime] = None - dateOfPublishing: Optional[datetime] = None - exportedTo: Optional[str] = None - isOnCentralDisk: Optional[bool] = None - publishable: Optional[bool] = None - publishedOn: Optional[datetime] = None - retrievable: Optional[bool] = None - retrieveIntegrityCheck: Optional[bool] = None - retrieveReturnMessage: Optional[Dict[str, Any]] = None - retrieveStatusMessage: Optional[str] = None + archivable: bool | None = None + archiveRetentionTime: datetime | None = None + archiveReturnMessage: dict[str, Any] | None = None + archiveStatusMessage: str | None = None + dateOfDiskPurging: datetime | None = None + dateOfPublishing: datetime | None = None + exportedTo: str | None = None + isOnCentralDisk: bool | None = None + publishable: bool | None = None + publishedOn: datetime | None = None + retrievable: bool | None = None + retrieveIntegrityCheck: bool | None = None + retrieveReturnMessage: dict[str, Any] | None = None + retrieveStatusMessage: str | None = None @pydantic.field_validator( "archiveRetentionTime", @@ -425,20 +425,20 @@ def _validate_datetime(cls, value: Any) -> Any: return validate_datetime(value) @classmethod - def user_model_type(cls) -> Type[Lifecycle]: + def user_model_type(cls) -> type[Lifecycle]: return Lifecycle class DownloadTechnique(BaseModel): - name: Optional[str] = None - pid: Optional[str] = None + name: str | None = None + pid: str | None = None @classmethod - def user_model_type(cls) -> Type[Technique]: + def user_model_type(cls) -> type[Technique]: return Technique @classmethod - def upload_model_type(cls) -> Type[UploadTechnique]: + def upload_model_type(cls) -> type[UploadTechnique]: return UploadTechnique @@ -447,24 +447,24 @@ class UploadTechnique(BaseModel): pid: str @classmethod - def user_model_type(cls) -> Type[Technique]: + def user_model_type(cls) -> type[Technique]: return Technique @classmethod - def download_model_type(cls) -> Type[DownloadTechnique]: + def download_model_type(cls) -> type[DownloadTechnique]: return DownloadTechnique class DownloadRelationship(BaseModel): - pid: Optional[PID] = None - relationship: Optional[str] = None + pid: PID | None = None + relationship: str | None = None @classmethod - def user_model_type(cls) -> Type[Relationship]: + def user_model_type(cls) -> type[Relationship]: return Relationship @classmethod - def upload_model_type(cls) -> Type[UploadRelationship]: + def upload_model_type(cls) -> type[UploadRelationship]: return UploadRelationship @@ -473,43 +473,43 @@ class UploadRelationship(BaseModel): relationship: str @classmethod - def user_model_type(cls) -> Type[Relationship]: + def user_model_type(cls) -> type[Relationship]: return Relationship @classmethod - def download_model_type(cls) -> Type[DownloadRelationship]: + def download_model_type(cls) -> type[DownloadRelationship]: return DownloadRelationship class DownloadHistory(BaseModel): - id: Optional[str] = pydantic.Field(alias="_id", default=None) - updatedAt: Optional[datetime] = None - updatedBy: Optional[datetime] = None + id: str | None = pydantic.Field(alias="_id", default=None) + updatedAt: datetime | None = None + updatedBy: datetime | None = None @pydantic.field_validator("updatedAt", mode="before") def _validate_datetime(cls, value: Any) -> Any: return validate_datetime(value) @classmethod - def user_model_type(cls) -> Type[History]: + def user_model_type(cls) -> type[History]: return History class DownloadDataFile(BaseModel): - path: Optional[str] = None - size: Optional[NonNegativeInt] = None - time: Optional[datetime] = None - chk: Optional[str] = None - gid: Optional[str] = None - perm: Optional[str] = None - uid: Optional[str] = None + path: str | None = None + size: NonNegativeInt | None = None + time: datetime | None = None + chk: str | None = None + gid: str | None = None + perm: str | None = None + uid: str | None = None @pydantic.field_validator("time", mode="before") def _validate_datetime(cls, value: Any) -> Any: return validate_datetime(value) @classmethod - def upload_model_type(cls) -> Type[UploadDataFile]: + def upload_model_type(cls) -> type[UploadDataFile]: return UploadDataFile @@ -517,74 +517,74 @@ class UploadDataFile(BaseModel): path: str size: NonNegativeInt time: datetime - chk: Optional[str] = None - gid: Optional[str] = None - perm: Optional[str] = None - uid: Optional[str] = None + chk: str | None = None + gid: str | None = None + perm: str | None = None + uid: str | None = None @pydantic.field_validator("time", mode="before") def _validate_datetime(cls, value: Any) -> Any: return validate_datetime(value) @classmethod - def download_model_type(cls) -> Type[DownloadDataFile]: + def download_model_type(cls) -> type[DownloadDataFile]: return DownloadDataFile class DownloadInstrument(BaseModel): - customMetadata: Optional[Dict[str, Any]] = None - name: Optional[str] = None - pid: Optional[str] = None - uniqueName: Optional[str] = None + customMetadata: dict[str, Any] | None = None + name: str | None = None + pid: str | None = None + uniqueName: str | None = None @classmethod - def user_model_type(cls) -> Type[Instrument]: + def user_model_type(cls) -> type[Instrument]: return Instrument class DownloadSample(BaseModel): - ownerGroup: Optional[str] = None - accessGroups: Optional[List[str]] = None - createdAt: Optional[datetime] = None - createdBy: Optional[str] = None - description: Optional[str] = None - instrumentGroup: Optional[str] = None - isPublished: Optional[bool] = None - owner: Optional[str] = None - sampleCharacteristics: Optional[Dict[str, Any]] = None - sampleId: Optional[str] = None - updatedAt: Optional[datetime] = None - updatedBy: Optional[str] = None + ownerGroup: str | None = None + accessGroups: list[str] | None = None + createdAt: datetime | None = None + createdBy: str | None = None + description: str | None = None + instrumentGroup: str | None = None + isPublished: bool | None = None + owner: str | None = None + sampleCharacteristics: dict[str, Any] | None = None + sampleId: str | None = None + updatedAt: datetime | None = None + updatedBy: str | None = None @pydantic.field_validator("createdAt", "updatedAt", mode="before") def _validate_datetime(cls, value: Any) -> Any: return validate_datetime(value) @classmethod - def user_model_type(cls) -> Type[Sample]: + def user_model_type(cls) -> type[Sample]: return Sample @classmethod - def upload_model_type(cls) -> Type[UploadSample]: + def upload_model_type(cls) -> type[UploadSample]: return UploadSample class UploadSample(BaseModel): ownerGroup: str - accessGroups: Optional[List[str]] = None - description: Optional[str] = None - instrumentGroup: Optional[str] = None - isPublished: Optional[bool] = None - owner: Optional[str] = None - sampleCharacteristics: Optional[Dict[str, Any]] = None - sampleId: Optional[str] = None + accessGroups: list[str] | None = None + description: str | None = None + instrumentGroup: str | None = None + isPublished: bool | None = None + owner: str | None = None + sampleCharacteristics: dict[str, Any] | None = None + sampleId: str | None = None @classmethod - def user_model_type(cls) -> Type[Sample]: + def user_model_type(cls) -> type[Sample]: return Sample @classmethod - def download_model_type(cls) -> Type[DownloadSample]: + def download_model_type(cls) -> type[DownloadSample]: return DownloadSample @@ -592,37 +592,37 @@ def download_model_type(cls) -> Type[DownloadSample]: class Attachment(BaseUserModel): caption: str owner_group: str - access_groups: Optional[List[str]] = None - dataset_id: Optional[PID] = None - id: Optional[str] = None - instrument_group: Optional[str] = None - proposal_id: Optional[str] = None - sample_id: Optional[str] = None - thumbnail: Optional[Thumbnail] = None - _created_at: Optional[datetime] = None - _created_by: Optional[str] = None - _is_published: Optional[bool] = None - _updated_at: Optional[datetime] = None - _updated_by: Optional[str] = None + access_groups: list[str] | None = None + dataset_id: PID | None = None + id: str | None = None + instrument_group: str | None = None + proposal_id: str | None = None + sample_id: str | None = None + thumbnail: Thumbnail | None = None + _created_at: datetime | None = None + _created_by: str | None = None + _is_published: bool | None = None + _updated_at: datetime | None = None + _updated_by: str | None = None @property - def created_at(self) -> Optional[datetime]: + def created_at(self) -> datetime | None: return self._created_at @property - def created_by(self) -> Optional[str]: + def created_by(self) -> str | None: return self._created_by @property - def is_published(self) -> Optional[bool]: + def is_published(self) -> bool | None: return self._is_published @property - def updated_at(self) -> Optional[datetime]: + def updated_at(self) -> datetime | None: return self._updated_at @property - def updated_by(self) -> Optional[str]: + def updated_by(self) -> str | None: return self._updated_by @classmethod @@ -635,85 +635,85 @@ def make_upload_model(self) -> UploadAttachment: return UploadAttachment(**self._upload_model_dict()) @classmethod - def upload_model_type(cls) -> Type[UploadAttachment]: + def upload_model_type(cls) -> type[UploadAttachment]: return UploadAttachment @classmethod - def download_model_type(cls) -> Type[DownloadAttachment]: + def download_model_type(cls) -> type[DownloadAttachment]: return DownloadAttachment @dataclass_optional_args(kw_only=True, slots=True) class Lifecycle(BaseUserModel): - _archivable: Optional[bool] = None - _archive_retention_time: Optional[datetime] = None - _archive_return_message: Optional[Dict[str, Any]] = None - _archive_status_message: Optional[str] = None - _date_of_disk_purging: Optional[datetime] = None - _date_of_publishing: Optional[datetime] = None - _exported_to: Optional[str] = None - _is_on_central_disk: Optional[bool] = None - _publishable: Optional[bool] = None - _published_on: Optional[datetime] = None - _retrievable: Optional[bool] = None - _retrieve_integrity_check: Optional[bool] = None - _retrieve_return_message: Optional[Dict[str, Any]] = None - _retrieve_status_message: Optional[str] = None + _archivable: bool | None = None + _archive_retention_time: datetime | None = None + _archive_return_message: dict[str, Any] | None = None + _archive_status_message: str | None = None + _date_of_disk_purging: datetime | None = None + _date_of_publishing: datetime | None = None + _exported_to: str | None = None + _is_on_central_disk: bool | None = None + _publishable: bool | None = None + _published_on: datetime | None = None + _retrievable: bool | None = None + _retrieve_integrity_check: bool | None = None + _retrieve_return_message: dict[str, Any] | None = None + _retrieve_status_message: str | None = None @property - def archivable(self) -> Optional[bool]: + def archivable(self) -> bool | None: return self._archivable @property - def archive_retention_time(self) -> Optional[datetime]: + def archive_retention_time(self) -> datetime | None: return self._archive_retention_time @property - def archive_return_message(self) -> Optional[Dict[str, Any]]: + def archive_return_message(self) -> dict[str, Any] | None: return self._archive_return_message @property - def archive_status_message(self) -> Optional[str]: + def archive_status_message(self) -> str | None: return self._archive_status_message @property - def date_of_disk_purging(self) -> Optional[datetime]: + def date_of_disk_purging(self) -> datetime | None: return self._date_of_disk_purging @property - def date_of_publishing(self) -> Optional[datetime]: + def date_of_publishing(self) -> datetime | None: return self._date_of_publishing @property - def exported_to(self) -> Optional[str]: + def exported_to(self) -> str | None: return self._exported_to @property - def is_on_central_disk(self) -> Optional[bool]: + def is_on_central_disk(self) -> bool | None: return self._is_on_central_disk @property - def publishable(self) -> Optional[bool]: + def publishable(self) -> bool | None: return self._publishable @property - def published_on(self) -> Optional[datetime]: + def published_on(self) -> datetime | None: return self._published_on @property - def retrievable(self) -> Optional[bool]: + def retrievable(self) -> bool | None: return self._retrievable @property - def retrieve_integrity_check(self) -> Optional[bool]: + def retrieve_integrity_check(self) -> bool | None: return self._retrieve_integrity_check @property - def retrieve_return_message(self) -> Optional[Dict[str, Any]]: + def retrieve_return_message(self) -> dict[str, Any] | None: return self._retrieve_return_message @property - def retrieve_status_message(self) -> Optional[str]: + def retrieve_status_message(self) -> str | None: return self._retrieve_status_message @classmethod @@ -722,7 +722,7 @@ def from_download_model(cls, download_model: DownloadLifecycle) -> Lifecycle: return cls(**cls._download_model_dict(download_model)) @classmethod - def download_model_type(cls) -> Type[DownloadLifecycle]: + def download_model_type(cls) -> type[DownloadLifecycle]: return DownloadLifecycle @@ -741,11 +741,11 @@ def make_upload_model(self) -> UploadTechnique: return UploadTechnique(**self._upload_model_dict()) @classmethod - def upload_model_type(cls) -> Type[UploadTechnique]: + def upload_model_type(cls) -> type[UploadTechnique]: return UploadTechnique @classmethod - def download_model_type(cls) -> Type[DownloadTechnique]: + def download_model_type(cls) -> type[DownloadTechnique]: return DownloadTechnique @@ -764,30 +764,30 @@ def make_upload_model(self) -> UploadRelationship: return UploadRelationship(**self._upload_model_dict()) @classmethod - def upload_model_type(cls) -> Type[UploadRelationship]: + def upload_model_type(cls) -> type[UploadRelationship]: return UploadRelationship @classmethod - def download_model_type(cls) -> Type[DownloadRelationship]: + def download_model_type(cls) -> type[DownloadRelationship]: return DownloadRelationship @dataclass_optional_args(kw_only=True, slots=True) class History(BaseUserModel): - __id: Optional[str] = None - _updated_at: Optional[datetime] = None - _updated_by: Optional[datetime] = None + __id: str | None = None + _updated_at: datetime | None = None + _updated_by: datetime | None = None @property - def _id(self) -> Optional[str]: + def _id(self) -> str | None: return self.__id @property - def updated_at(self) -> Optional[datetime]: + def updated_at(self) -> datetime | None: return self._updated_at @property - def updated_by(self) -> Optional[datetime]: + def updated_by(self) -> datetime | None: return self._updated_by @classmethod @@ -796,31 +796,31 @@ def from_download_model(cls, download_model: DownloadHistory) -> History: return cls(**cls._download_model_dict(download_model)) @classmethod - def download_model_type(cls) -> Type[DownloadHistory]: + def download_model_type(cls) -> type[DownloadHistory]: return DownloadHistory @dataclass_optional_args(kw_only=True, slots=True) class Instrument(BaseUserModel): - _custom_metadata: Optional[Dict[str, Any]] = None - _name: Optional[str] = None - _pid: Optional[str] = None - _unique_name: Optional[str] = None + _custom_metadata: dict[str, Any] | None = None + _name: str | None = None + _pid: str | None = None + _unique_name: str | None = None @property - def custom_metadata(self) -> Optional[Dict[str, Any]]: + def custom_metadata(self) -> dict[str, Any] | None: return self._custom_metadata @property - def name(self) -> Optional[str]: + def name(self) -> str | None: return self._name @property - def pid(self) -> Optional[str]: + def pid(self) -> str | None: return self._pid @property - def unique_name(self) -> Optional[str]: + def unique_name(self) -> str | None: return self._unique_name @classmethod @@ -829,39 +829,39 @@ def from_download_model(cls, download_model: DownloadInstrument) -> Instrument: return cls(**cls._download_model_dict(download_model)) @classmethod - def download_model_type(cls) -> Type[DownloadInstrument]: + def download_model_type(cls) -> type[DownloadInstrument]: return DownloadInstrument @dataclass_optional_args(kw_only=True, slots=True) class Sample(BaseUserModel): owner_group: str - access_groups: Optional[List[str]] = None - description: Optional[str] = None - instrument_group: Optional[str] = None - is_published: Optional[bool] = None - owner: Optional[str] = None - sample_characteristics: Optional[Dict[str, Any]] = None - sample_id: Optional[str] = None - _created_at: Optional[datetime] = None - _created_by: Optional[str] = None - _updated_at: Optional[datetime] = None - _updated_by: Optional[str] = None + access_groups: list[str] | None = None + description: str | None = None + instrument_group: str | None = None + is_published: bool | None = None + owner: str | None = None + sample_characteristics: dict[str, Any] | None = None + sample_id: str | None = None + _created_at: datetime | None = None + _created_by: str | None = None + _updated_at: datetime | None = None + _updated_by: str | None = None @property - def created_at(self) -> Optional[datetime]: + def created_at(self) -> datetime | None: return self._created_at @property - def created_by(self) -> Optional[str]: + def created_by(self) -> str | None: return self._created_by @property - def updated_at(self) -> Optional[datetime]: + def updated_at(self) -> datetime | None: return self._updated_at @property - def updated_by(self) -> Optional[str]: + def updated_by(self) -> str | None: return self._updated_by @classmethod @@ -874,11 +874,11 @@ def make_upload_model(self) -> UploadSample: return UploadSample(**self._upload_model_dict()) @classmethod - def upload_model_type(cls) -> Type[UploadSample]: + def upload_model_type(cls) -> type[UploadSample]: return UploadSample @classmethod - def download_model_type(cls) -> Type[DownloadSample]: + def download_model_type(cls) -> type[DownloadSample]: return DownloadSample diff --git a/src/scitacean/pid.py b/src/scitacean/pid.py index db8dff95..a845ffb9 100644 --- a/src/scitacean/pid.py +++ b/src/scitacean/pid.py @@ -5,7 +5,7 @@ from __future__ import annotations import uuid -from typing import Any, Optional, Union +from typing import Any from pydantic import GetCoreSchemaHandler, ValidationError from pydantic_core import core_schema @@ -32,7 +32,7 @@ class PID: __slots__ = ("_pid", "_prefix") - def __init__(self, *, pid: str, prefix: Optional[str] = None): + def __init__(self, *, pid: str, prefix: str | None = None): """Initialize an instance from individual components. Parameters @@ -46,7 +46,7 @@ def __init__(self, *, pid: str, prefix: Optional[str] = None): self._prefix = prefix @classmethod - def parse(cls, x: Union[str, PID]) -> PID: + def parse(cls, x: str | PID) -> PID: """Build a PID from a string. The string is split at the first "/" to determine @@ -73,7 +73,7 @@ def parse(cls, x: Union[str, PID]) -> PID: return PID(prefix=pieces[0], pid=pieces[1]) @classmethod - def generate(cls, *, prefix: Optional[str] = None) -> PID: + def generate(cls, *, prefix: str | None = None) -> PID: """Create a new unique PID. Uses UUID4 to generate the ID. @@ -96,7 +96,7 @@ def pid(self) -> str: return self._pid @property - def prefix(self) -> Optional[str]: + def prefix(self) -> str | None: """Prefix part of the ID if there is one.""" return self._prefix @@ -122,7 +122,7 @@ def __eq__(self, other: object) -> bool: return self.prefix == other.prefix and self.pid == other.pid @classmethod - def validate(cls, value: Union[str, PID]) -> PID: + def validate(cls, value: str | PID) -> PID: """Pydantic validator for PID fields.""" if isinstance(value, str): return PID.parse(value) diff --git a/src/scitacean/testing/_pytest_helpers.py b/src/scitacean/testing/_pytest_helpers.py index 8951b9e5..d3a0ac05 100644 --- a/src/scitacean/testing/_pytest_helpers.py +++ b/src/scitacean/testing/_pytest_helpers.py @@ -2,7 +2,6 @@ # Copyright (c) 2024 SciCat Project (https://github.com/SciCatProject/scitacean) from pathlib import Path -from typing import Optional, Tuple, Union import pytest @@ -30,20 +29,20 @@ def root_tmp_dir( def init_pytest_work_dir( request: pytest.FixtureRequest, tmp_path_factory: pytest.TempPathFactory, - name: Optional[str], -) -> Tuple[Path, Union[FileCounter, NullCounter]]: + name: str | None, +) -> tuple[Path, FileCounter | NullCounter]: """Create a working directory and initialize an atomic counter and lock for it.""" return init_work_dir(request, root_tmp_dir(request, tmp_path_factory), name) def init_work_dir( - request: pytest.FixtureRequest, base_path: Path, name: Optional[str] -) -> Tuple[Path, Union[FileCounter, NullCounter]]: + request: pytest.FixtureRequest, base_path: Path, name: str | None +) -> tuple[Path, FileCounter | NullCounter]: """Create a working directory and initialize an atomic counter and lock for it.""" target_dir = base_path / name if name else base_path target_dir.mkdir(exist_ok=True) - counter: Union[FileCounter, NullCounter] + counter: FileCounter | NullCounter if using_xdist(request): counter = FileCounter(target_dir / "counter") else: diff --git a/src/scitacean/testing/backend/_backend.py b/src/scitacean/testing/backend/_backend.py index d9daf0f5..b0c6b8a1 100644 --- a/src/scitacean/testing/backend/_backend.py +++ b/src/scitacean/testing/backend/_backend.py @@ -5,7 +5,7 @@ import time from copy import deepcopy from pathlib import Path -from typing import Any, Dict, Union +from typing import Any, Union from urllib.parse import urljoin import requests @@ -25,14 +25,14 @@ def _read_yaml(filename: str) -> Any: ) -def _docker_compose_template() -> Dict[str, Any]: +def _docker_compose_template() -> dict[str, Any]: template = _read_yaml("docker-compose-backend-template.yaml") return template # type: ignore[no-any-return] def _apply_config( - template: Dict[str, Any], account_config_path: Path -) -> Dict[str, Any]: + template: dict[str, Any], account_config_path: Path +) -> dict[str, Any]: res = deepcopy(template) scicat = res["services"]["scicat"] ports = scicat["ports"][0].split(":") diff --git a/src/scitacean/testing/backend/_pytest_helpers.py b/src/scitacean/testing/backend/_pytest_helpers.py index 736c0d49..475af964 100644 --- a/src/scitacean/testing/backend/_pytest_helpers.py +++ b/src/scitacean/testing/backend/_pytest_helpers.py @@ -1,11 +1,10 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2024 SciCat Project (https://github.com/SciCatProject/scitacean) -from typing import Optional import pytest -_COMMAND_LINE_OPTION: Optional[str] = None +_COMMAND_LINE_OPTION: str | None = None def add_pytest_option(parser: pytest.Parser, option: str = "--backend-tests") -> None: diff --git a/src/scitacean/testing/backend/config.py b/src/scitacean/testing/backend/config.py index f7216d1d..c7ed8583 100644 --- a/src/scitacean/testing/backend/config.py +++ b/src/scitacean/testing/backend/config.py @@ -5,7 +5,6 @@ import json from dataclasses import dataclass from pathlib import Path -from typing import Union @dataclass @@ -39,7 +38,7 @@ def credentials(self) -> dict[str, str]: "password": self.password, } - def dump(self) -> dict[str, Union[str, bool]]: + def dump(self) -> dict[str, str | bool]: """Return a dict that can be serialized to functionalAccounts.json.""" return { "username": self.username, diff --git a/src/scitacean/testing/backend/fixtures.py b/src/scitacean/testing/backend/fixtures.py index ff6912bf..631c30c5 100644 --- a/src/scitacean/testing/backend/fixtures.py +++ b/src/scitacean/testing/backend/fixtures.py @@ -4,9 +4,9 @@ """Pytest fixtures to manage and access a local SciCat backend.""" import logging +from collections.abc import Generator from contextlib import contextmanager from pathlib import Path -from typing import Generator, Optional, Type, Union import pytest @@ -73,7 +73,7 @@ def fake_client(scicat_access: SciCatAccess) -> FakeClient: @pytest.fixture() -def real_client(scicat_access: SciCatAccess, scicat_backend: bool) -> Optional[Client]: +def real_client(scicat_access: SciCatAccess, scicat_backend: bool) -> Client | None: """Fixture that returns a real client if backend tests are enabled. Returns @@ -92,7 +92,7 @@ def real_client(scicat_access: SciCatAccess, scicat_backend: bool) -> Optional[C @pytest.fixture(params=["real", "fake"]) -def client(request, scicat_backend) -> Union[Client, FakeClient]: +def client(request, scicat_backend) -> Client | FakeClient: """Fixture that returns a real and a fake client. Using this fixture makes tests run twice, once with a real client @@ -156,7 +156,7 @@ def scicat_backend(request, tmp_path_factory, scicat_access): @contextmanager def _prepare_without_backend( scicat_access: SciCatAccess, - counter: Union[FileCounter, NullCounter], + counter: FileCounter | NullCounter, target_dir: Path, ) -> Generator[None, None, None]: with counter.increment() as count: @@ -172,7 +172,7 @@ def _prepare_without_backend( @contextmanager def _prepare_with_backend( scicat_access: SciCatAccess, - counter: Union[FileCounter, NullCounter], + counter: FileCounter | NullCounter, target_dir: Path, ) -> Generator[None, None, None]: try: @@ -194,7 +194,7 @@ def _prepare_with_backend( def _seed_database( - client_class: Union[Type[Client], Type[FakeClient]], + client_class: type[Client] | type[FakeClient], scicat_access: SciCatAccess, target_dir: Path, ) -> None: diff --git a/src/scitacean/testing/backend/seed.py b/src/scitacean/testing/backend/seed.py index 3e2ddfce..c5b0ceeb 100644 --- a/src/scitacean/testing/backend/seed.py +++ b/src/scitacean/testing/backend/seed.py @@ -10,7 +10,6 @@ import pickle from copy import deepcopy from pathlib import Path -from typing import Dict, List, Union from dateutil.parser import parse as parse_datetime @@ -34,7 +33,7 @@ from .config import SITE, SciCatAccess, SciCatUser # Dataset models to upload to the database. -_DATASETS: Dict[str, Union[UploadRawDataset, UploadDerivedDataset]] = { +_DATASETS: dict[str, UploadRawDataset | UploadDerivedDataset] = { "raw": UploadRawDataset( ownerGroup="PLACEHOLDER", accessGroups=["uu", "faculty"], @@ -131,7 +130,7 @@ } # Orig datablocks to upload to the database. -_ORIG_DATABLOCKS: Dict[str, List[UploadOrigDatablock]] = { +_ORIG_DATABLOCKS: dict[str, list[UploadOrigDatablock]] = { "raw": [ UploadOrigDatablock( datasetId=PID(pid="PLACEHOLDER"), @@ -203,7 +202,7 @@ ], } -_ATTACHMENTS: Dict[str, List[UploadAttachment]] = { +_ATTACHMENTS: dict[str, list[UploadAttachment]] = { "derived": [ UploadAttachment( caption="Process Overview", @@ -223,16 +222,16 @@ ] } -INITIAL_DATASETS: Dict[str, DownloadDataset] = {} +INITIAL_DATASETS: dict[str, DownloadDataset] = {} """Initial datasets in the testing database.""" -INITIAL_ORIG_DATABLOCKS: Dict[str, List[DownloadOrigDatablock]] = {} +INITIAL_ORIG_DATABLOCKS: dict[str, list[DownloadOrigDatablock]] = {} """Initial orig datablocks in the testing database.""" -INITIAL_ATTACHMENTS: Dict[str, List[DownloadAttachment]] = {} +INITIAL_ATTACHMENTS: dict[str, list[DownloadAttachment]] = {} def _apply_config_dataset( - dset: Union[UploadRawDataset, UploadDerivedDataset], user: SciCatUser -) -> Union[UploadRawDataset, UploadDerivedDataset]: + dset: UploadRawDataset | UploadDerivedDataset, user: SciCatUser +) -> UploadRawDataset | UploadDerivedDataset: dset = deepcopy(dset) dset.owner = user.username dset.ownerGroup = user.group @@ -258,7 +257,7 @@ def _apply_config_attachment( def _create_dataset_model( - client: Client, dset: Union[UploadRawDataset, UploadDerivedDataset] + client: Client, dset: UploadRawDataset | UploadDerivedDataset ) -> DownloadDataset: uploaded = client.scicat.create_dataset_model(dset) # pid is a str if validation fails but we need a PID for fake clients. diff --git a/src/scitacean/testing/client.py b/src/scitacean/testing/client.py index 88765e1a..dadb5faf 100644 --- a/src/scitacean/testing/client.py +++ b/src/scitacean/testing/client.py @@ -7,8 +7,9 @@ import datetime import functools import uuid +from collections.abc import Callable from copy import deepcopy -from typing import Any, Callable, Dict, List, Optional, Tuple, Union +from typing import Any from .. import model from ..client import Client, ScicatClient @@ -105,8 +106,8 @@ class FakeClient(Client): def __init__( self, *, - file_transfer: Optional[FileTransfer] = None, - disable: Optional[Dict[str, Exception]] = None, + file_transfer: FileTransfer | None = None, + disable: dict[str, Exception] | None = None, ) -> None: """Initialize a fake client with empty dataset storage. @@ -123,18 +124,18 @@ def __init__( super().__init__(client=FakeScicatClient(self), file_transfer=file_transfer) self.disabled = {} if disable is None else dict(disable) - self.datasets: Dict[PID, model.DownloadDataset] = {} - self.orig_datablocks: Dict[PID, List[model.DownloadOrigDatablock]] = {} - self.attachments: Dict[PID, List[model.DownloadAttachment]] = {} - self.samples: Dict[str, model.DownloadSample] = {} + self.datasets: dict[PID, model.DownloadDataset] = {} + self.orig_datablocks: dict[PID, list[model.DownloadOrigDatablock]] = {} + self.attachments: dict[PID, list[model.DownloadAttachment]] = {} + self.samples: dict[str, model.DownloadSample] = {} @classmethod def from_token( cls, *, url: str, - token: Union[str, StrStorage], - file_transfer: Optional[FileTransfer] = None, + token: str | StrStorage, + file_transfer: FileTransfer | None = None, ) -> FakeClient: """Create a new fake client. @@ -147,9 +148,9 @@ def from_credentials( cls, *, url: str, - username: Union[str, StrStorage], - password: Union[str, StrStorage], - file_transfer: Optional[FileTransfer] = None, + username: str | StrStorage, + password: str | StrStorage, + file_transfer: FileTransfer | None = None, ) -> FakeClient: """Create a new fake client. @@ -159,7 +160,7 @@ def from_credentials( @classmethod def without_login( - cls, *, url: str, file_transfer: Optional[FileTransfer] = None + cls, *, url: str, file_transfer: FileTransfer | None = None ) -> FakeClient: """Create a new fake client. @@ -189,7 +190,7 @@ def get_dataset_model( @_conditionally_disabled def get_orig_datablocks( self, pid: PID, strict_validation: bool = False - ) -> List[model.DownloadOrigDatablock]: + ) -> list[model.DownloadOrigDatablock]: """Fetch an orig datablock from SciCat.""" _ = strict_validation # unused by fake try: @@ -202,7 +203,7 @@ def get_orig_datablocks( @_conditionally_disabled def get_attachments_for_dataset( self, pid: PID, strict_validation: bool = False - ) -> List[model.DownloadAttachment]: + ) -> list[model.DownloadAttachment]: """Fetch all attachments from SciCat for a given dataset.""" _ = strict_validation # unused by fake return self.main.attachments.get(pid) or [] @@ -220,7 +221,7 @@ def get_sample_model( @_conditionally_disabled def create_dataset_model( - self, dset: Union[model.UploadDerivedDataset, model.UploadRawDataset] + self, dset: model.UploadDerivedDataset | model.UploadRawDataset ) -> model.DownloadDataset: """Create a new dataset in SciCat.""" ingested = _process_dataset(dset) @@ -249,7 +250,7 @@ def create_orig_datablock( def create_attachment_for_dataset( self, attachment: model.UploadAttachment, - dataset_id: Optional[PID] = None, + dataset_id: PID | None = None, ) -> model.DownloadAttachment: """Create a new attachment for a dataset in SciCat.""" if dataset_id is None: @@ -281,14 +282,14 @@ def create_sample_model(self, sample: model.UploadSample) -> model.DownloadSampl @_conditionally_disabled def validate_dataset_model( - self, dset: Union[model.UploadDerivedDataset, model.UploadRawDataset] + self, dset: model.UploadDerivedDataset | model.UploadRawDataset ) -> None: """Validate model remotely in SciCat.""" # Models were locally validated on construction, assume they are valid. pass -def _model_dict(mod: model.BaseModel) -> Dict[str, Any]: +def _model_dict(mod: model.BaseModel) -> dict[str, Any]: return { key: deepcopy(val) for key in mod.model_fields.keys() @@ -311,7 +312,7 @@ def _process_data_file(file: model.UploadDataFile) -> model.DownloadDataFile: def _process_dataset( - dset: Union[model.UploadDerivedDataset, model.UploadRawDataset], + dset: model.UploadDerivedDataset | model.UploadRawDataset, ) -> model.DownloadDataset: created_at = datetime.datetime.now(tz=datetime.timezone.utc) # TODO use user login if possible @@ -362,7 +363,7 @@ def _process_orig_datablock( def _process_attachment( - attachment: model.UploadAttachment, dataset_id: Optional[PID] = None + attachment: model.UploadAttachment, dataset_id: PID | None = None ) -> model.DownloadAttachment: created_at = datetime.datetime.now(tz=datetime.timezone.utc) fields = _model_dict(attachment) @@ -402,13 +403,13 @@ def _process_sample(sample: model.UploadSample) -> model.DownloadSample: def process_uploaded_dataset( - dataset: Union[model.UploadDerivedDataset, model.UploadRawDataset], - orig_datablocks: Optional[List[model.UploadOrigDatablock]], - attachments: Optional[List[model.UploadAttachment]], -) -> Tuple[ + dataset: model.UploadDerivedDataset | model.UploadRawDataset, + orig_datablocks: list[model.UploadOrigDatablock] | None, + attachments: list[model.UploadAttachment] | None, +) -> tuple[ model.DownloadDataset, - Optional[List[model.DownloadOrigDatablock]], - Optional[List[model.DownloadAttachment]], + list[model.DownloadOrigDatablock] | None, + list[model.DownloadAttachment] | None, ]: """Process a dataset as if it was uploaded to SciCat. diff --git a/src/scitacean/testing/sftp/_pytest_helpers.py b/src/scitacean/testing/sftp/_pytest_helpers.py index a9c2b5fe..25c25aa6 100644 --- a/src/scitacean/testing/sftp/_pytest_helpers.py +++ b/src/scitacean/testing/sftp/_pytest_helpers.py @@ -1,11 +1,10 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2024 SciCat Project (https://github.com/SciCatProject/scitacean) -from typing import Optional import pytest -_COMMAND_LINE_OPTION: Optional[str] = None +_COMMAND_LINE_OPTION: str | None = None def add_pytest_option(parser: pytest.Parser, option: str = "--sftp-tests") -> None: diff --git a/src/scitacean/testing/sftp/_sftp.py b/src/scitacean/testing/sftp/_sftp.py index 824739ab..ee128390 100644 --- a/src/scitacean/testing/sftp/_sftp.py +++ b/src/scitacean/testing/sftp/_sftp.py @@ -2,10 +2,11 @@ # Copyright (c) 2024 SciCat Project (https://github.com/SciCatProject/scitacean) import importlib.resources import time +from collections.abc import Iterable from dataclasses import dataclass from functools import lru_cache from pathlib import Path -from typing import Any, Dict, Iterable, Tuple, Union +from typing import Any import paramiko import yaml @@ -37,7 +38,7 @@ def _read_resource_yaml(filename: str) -> Any: @lru_cache(maxsize=1) -def _docker_compose_file() -> Dict[str, Any]: +def _docker_compose_file() -> dict[str, Any]: return _read_resource_yaml( # type: ignore[no-any-return] "docker-compose-sftp-server.yaml" ) @@ -48,7 +49,7 @@ def _docker_file() -> str: return _read_resource_text("Dockerfile-sftp-server") -def _seed_files() -> Iterable[Tuple[str, str]]: +def _seed_files() -> Iterable[tuple[str, str]]: yield from ( (file.name, file.read_text()) for file in importlib.resources.files("scitacean.testing.sftp") @@ -76,7 +77,7 @@ def _copy_seed(target_seed_dir: Path) -> None: target_seed_dir.joinpath(name).write_text(content) -def configure(target_dir: Union[Path, str]) -> Path: +def configure(target_dir: Path | str) -> Path: """Generate a config file for docker compose and copy seed data.""" target_dir = Path(target_dir) target_seed_dir = target_dir / "data" / "seed" diff --git a/src/scitacean/testing/sftp/fixtures.py b/src/scitacean/testing/sftp/fixtures.py index 0b6e479d..a9796d1f 100644 --- a/src/scitacean/testing/sftp/fixtures.py +++ b/src/scitacean/testing/sftp/fixtures.py @@ -4,8 +4,8 @@ """Pytest fixtures to manage and access a local SFTP server.""" import logging +from collections.abc import Callable, Generator from pathlib import Path -from typing import Callable, Generator, Optional import pytest from paramiko import SFTPClient, SSHClient @@ -40,7 +40,7 @@ def sftp_access(request: pytest.FixtureRequest) -> SFTPAccess: @pytest.fixture(scope="session") def sftp_base_dir( request: pytest.FixtureRequest, tmp_path_factory: pytest.TempPathFactory -) -> Optional[Path]: +) -> Path | None: """Fixture that returns the base working directory for the SFTP server setup. Returns @@ -59,7 +59,7 @@ def sftp_base_dir( @pytest.fixture(scope="session") -def sftp_data_dir(sftp_base_dir: Optional[Path]) -> Optional[Path]: +def sftp_data_dir(sftp_base_dir: Path | None) -> Path | None: """Fixture that returns the data directory for the SFTP server setup. Returns @@ -89,8 +89,8 @@ def require_sftp_fileserver(request, sftp_fileserver) -> None: def sftp_fileserver( request: pytest.FixtureRequest, sftp_access: SFTPAccess, - sftp_base_dir: Optional[Path], - sftp_data_dir: Optional[Path], + sftp_base_dir: Path | None, + sftp_data_dir: Path | None, sftp_connect_with_username_password, ) -> Generator[bool, None, None]: """Fixture to declare that a test needs a local SFTP server. diff --git a/src/scitacean/testing/strategies.py b/src/scitacean/testing/strategies.py index 9bce82d3..54958677 100644 --- a/src/scitacean/testing/strategies.py +++ b/src/scitacean/testing/strategies.py @@ -31,7 +31,7 @@ import string from functools import partial -from typing import Any, Dict, Optional +from typing import Any, Optional from email_validator import EmailNotValidError, ValidatedEmail, validate_email from hypothesis import strategies as st @@ -42,14 +42,14 @@ # email_validator and by extension pydantic is more picky than hypothesis # so make sure that generated emails actually pass model validation. -def _validate_email(email: str) -> Optional[ValidatedEmail]: +def _validate_email(email: str) -> ValidatedEmail | None: try: return validate_email(email, check_deliverability=False) except EmailNotValidError: return None -def _is_valid_email(validated_email: Optional[ValidatedEmail]) -> bool: +def _is_valid_email(validated_email: ValidatedEmail | None) -> bool: return validated_email is not None @@ -107,7 +107,7 @@ def multi_emails(max_emails: int = 2) -> st.SearchStrategy[str]: ).map(lambda email: ";".join(email)) -def _email_field_strategy(field: Dataset.Field) -> st.SearchStrategy[Optional[str]]: +def _email_field_strategy(field: Dataset.Field) -> st.SearchStrategy[str | None]: if field.required: return multi_emails() return st.none() | multi_emails() @@ -133,7 +133,7 @@ def make_orcid(digits: str) -> str: return st.text(alphabet="0123456789", min_size=16, max_size=16).map(make_orcid) -def _orcid_field_strategy(field: Dataset.Field) -> st.SearchStrategy[Optional[str]]: +def _orcid_field_strategy(field: Dataset.Field) -> st.SearchStrategy[str | None]: if field.required: return orcids() return st.none() | orcids() @@ -141,7 +141,7 @@ def _orcid_field_strategy(field: Dataset.Field) -> st.SearchStrategy[Optional[st def _scientific_metadata_strategy( field: Dataset.Field, -) -> st.SearchStrategy[Dict[str, Any]]: +) -> st.SearchStrategy[dict[str, Any]]: return st.dictionaries( keys=st.text(), values=st.text() | st.dictionaries(keys=st.text(), values=st.text()), @@ -150,13 +150,13 @@ def _scientific_metadata_strategy( def _job_parameters_strategy( field: Dataset.Field, -) -> st.SearchStrategy[Optional[Dict[str, str]]]: - return st.from_type(Optional[Dict[str, str]]) # type: ignore[arg-type] +) -> st.SearchStrategy[dict[str, str] | None]: + return st.from_type(Optional[dict[str, str]]) # type: ignore[arg-type] def _lifecycle_strategy( field: Dataset.Field, -) -> st.SearchStrategy[Optional[model.Lifecycle]]: +) -> st.SearchStrategy[model.Lifecycle | None]: # Lifecycle contains fields that have `Any` types which `st.from_type` can't handle. return st.sampled_from((None, model.Lifecycle())) @@ -197,7 +197,7 @@ def _field_strategy(field: Dataset.Field) -> st.SearchStrategy[Any]: def _make_dataset( - *, type: DatasetType, args: Dict[str, Any], read_only: Dict[str, Any] + *, type: DatasetType, args: dict[str, Any], read_only: dict[str, Any] ) -> Dataset: dset = Dataset(type=type, **args) for key, val in read_only.items(): @@ -206,7 +206,7 @@ def _make_dataset( def datasets( - type: Optional[DatasetType] = None, for_upload: bool = False, **fields: Any + type: DatasetType | None = None, for_upload: bool = False, **fields: Any ) -> st.SearchStrategy[Dataset]: """A strategy for generating datasets. @@ -294,7 +294,7 @@ def make_arg(field: Dataset.Field) -> st.SearchStrategy[Any]: return make_fixed_arg(field.name) return _field_strategy(field) - def make_args(read_only: bool) -> Dict[str, st.SearchStrategy[Any]]: + def make_args(read_only: bool) -> dict[str, st.SearchStrategy[Any]]: return { field.name: make_arg(field) for field in Dataset.fields(read_only=read_only, dataset_type=type) diff --git a/src/scitacean/testing/transfer.py b/src/scitacean/testing/transfer.py index ba120c0f..b319c650 100644 --- a/src/scitacean/testing/transfer.py +++ b/src/scitacean/testing/transfer.py @@ -2,9 +2,10 @@ # Copyright (c) 2024 SciCat Project (https://github.com/SciCatProject/scitacean) """Fake file transfer.""" +from collections.abc import Iterator from contextlib import contextmanager from pathlib import Path -from typing import Any, Dict, Iterator, List, Optional, Union +from typing import Any try: from pyfakefs.fake_filesystem import FakeFilesystem @@ -22,7 +23,7 @@ class FakeDownloadConnection: """'Download' files from a fake file transfer.""" - def __init__(self, fs: Optional[FakeFilesystem], files: Dict[RemotePath, bytes]): + def __init__(self, fs: FakeFilesystem | None, files: dict[RemotePath, bytes]): self.files = files self.fs = fs @@ -34,7 +35,7 @@ def download_file(self, *, remote: RemotePath, local: Path) -> None: with open(local, "wb") as f: f.write(self.files[remote]) - def download_files(self, *, remote: List[RemotePath], local: List[Path]) -> None: + def download_files(self, *, remote: list[RemotePath], local: list[Path]) -> None: """Download multiple files.""" for r, l in zip(remote, local): self.download_file(remote=r, local=l) @@ -45,8 +46,8 @@ class FakeUploadConnection: def __init__( self, - files: Dict[RemotePath, bytes], - reverted: Dict[RemotePath, bytes], + files: dict[RemotePath, bytes], + reverted: dict[RemotePath, bytes], source_folder: RemotePath, ): self.files = files @@ -56,7 +57,7 @@ def __init__( def _remote_path(self, filename: RemotePath) -> RemotePath: return self._source_folder / filename - def _upload_file(self, *, remote: RemotePath, local: Optional[Path]) -> RemotePath: + def _upload_file(self, *, remote: RemotePath, local: Path | None) -> RemotePath: if local is None: raise ValueError(f"No local path for file {remote}") remote = self._remote_path(remote) @@ -64,7 +65,7 @@ def _upload_file(self, *, remote: RemotePath, local: Optional[Path]) -> RemotePa self.files[remote] = f.read() return remote - def upload_files(self, *files: File) -> List[File]: + def upload_files(self, *files: File) -> list[File]: """Upload files.""" for file in files: self._upload_file(remote=file.remote_path, local=file.local_path) @@ -108,10 +109,10 @@ def test_upload(fs): def __init__( self, *, - fs: Optional[FakeFilesystem] = None, - files: Optional[Dict[Union[str, RemotePath], bytes]] = None, - reverted: Optional[Dict[Union[str, RemotePath], bytes]] = None, - source_folder: Optional[Union[str, RemotePath]] = None, + fs: FakeFilesystem | None = None, + files: dict[str | RemotePath, bytes] | None = None, + reverted: dict[str | RemotePath, bytes] | None = None, + source_folder: str | RemotePath | None = None, ): """Initialize a file transfer. @@ -156,8 +157,8 @@ def connect_for_upload(self, dataset: Dataset) -> Iterator[FakeUploadConnection] def _remote_path_dict( - d: Optional[Dict[Union[str, RemotePath], bytes]], -) -> Dict[RemotePath, bytes]: + d: dict[str | RemotePath, bytes] | None, +) -> dict[RemotePath, bytes]: if d is None: return {} return {RemotePath(path): contents for path, contents in d.items()} diff --git a/src/scitacean/thumbnail.py b/src/scitacean/thumbnail.py index 6bf239d5..d79d31e8 100644 --- a/src/scitacean/thumbnail.py +++ b/src/scitacean/thumbnail.py @@ -8,7 +8,8 @@ import mimetypes import os import re -from typing import Any, Callable, Dict, Optional, Union +from collections.abc import Callable +from typing import Any from pydantic import GetCoreSchemaHandler from pydantic_core import core_schema @@ -54,15 +55,15 @@ class Thumbnail: data: bytes = thumbnail.decoded_data() """ - mime: Optional[str] + mime: str | None """Complete MIME type in the form ``type/subtype``.""" _encoded_data: str def __init__( self, - mime: Optional[str], - data: Optional[bytes] = None, - _encoded_data: Optional[str] = None, + mime: str | None, + data: bytes | None = None, + _encoded_data: str | None = None, ) -> None: """Create a new thumbnail object. @@ -93,7 +94,7 @@ def __init__( self.mime = mime @classmethod - def load_file(cls, path: Union[os.PathLike[str], str]) -> Thumbnail: + def load_file(cls, path: os.PathLike[str] | str) -> Thumbnail: """Construct a thumbnail from data loaded from a file. Parameters @@ -113,7 +114,7 @@ def load_file(cls, path: Union[os.PathLike[str], str]) -> Thumbnail: return Thumbnail(mime=mimetypes.guess_type(path)[0], _encoded_data=encoded_data) @classmethod - def parse(cls, encoded: Union[str, Thumbnail], /) -> Thumbnail: + def parse(cls, encoded: str | Thumbnail, /) -> Thumbnail: """Construct a thumbnail from a string as used by SciCat. Parameters @@ -163,14 +164,14 @@ def serialize(self) -> str: return mime_str + self.encoded_data() @property - def mime_type(self) -> Optional[str]: + def mime_type(self) -> str | None: """The MIME type, i.e., the first part of ``type/subtype``.""" if self.mime is None: return None return self.mime.split("/", 1)[0] @property - def mime_subtype(self) -> Optional[str]: + def mime_subtype(self) -> str | None: """The MIME subtype, i.e., the second part of ``type/subtype``.""" if self.mime is None: return None @@ -192,11 +193,11 @@ def __repr__(self) -> str: def _repr_mimebundle_( self, include: Any = None, exclude: Any = None - ) -> Dict[str, Union[bytes, str]]: + ) -> dict[str, bytes | str]: def decoded() -> bytes: return self.decoded_data() - repr_fns: dict[str, Callable[[], Union[bytes, str]]] = { + repr_fns: dict[str, Callable[[], bytes | str]] = { "image/png": decoded, "image/jpeg": decoded, "image/svg+xml": decoded, diff --git a/src/scitacean/transfer/link.py b/src/scitacean/transfer/link.py index 40c52a5c..5fb461b7 100644 --- a/src/scitacean/transfer/link.py +++ b/src/scitacean/transfer/link.py @@ -2,9 +2,9 @@ # Copyright (c) 2024 SciCat Project (https://github.com/SciCatProject/scitacean) """File transfer that creates symlinks.""" +from collections.abc import Iterator from contextlib import contextmanager from pathlib import Path -from typing import Iterator, List, Optional, Union from ..dataset import Dataset from ..file import File @@ -20,7 +20,7 @@ class LinkDownloadConnection: :meth:`scitacean.transfer.link.LinkFileTransfer.connect_for_download`. """ - def download_files(self, *, remote: List[RemotePath], local: List[Path]) -> None: + def download_files(self, *, remote: list[RemotePath], local: list[Path]) -> None: """Download files from the given remote path.""" for r, l in zip(remote, local): self.download_file(remote=r, local=l) @@ -59,11 +59,11 @@ def source_folder(self) -> RemotePath: """The source folder this connection uploads to.""" return self._source_folder - def remote_path(self, filename: Union[str, RemotePath]) -> RemotePath: + def remote_path(self, filename: str | RemotePath) -> RemotePath: """Return the complete remote path for a given path.""" return self.source_folder / filename - def upload_files(self, *files: File) -> List[File]: + def upload_files(self, *files: File) -> list[File]: """Upload files to the remote folder.""" raise NotImplementedError() @@ -136,7 +136,7 @@ class LinkFileTransfer: def __init__( self, *, - source_folder: Optional[Union[str, RemotePath]] = None, + source_folder: str | RemotePath | None = None, ) -> None: """Construct a new Link file transfer. diff --git a/src/scitacean/transfer/sftp.py b/src/scitacean/transfer/sftp.py index 4b984e82..f4935e85 100644 --- a/src/scitacean/transfer/sftp.py +++ b/src/scitacean/transfer/sftp.py @@ -3,10 +3,10 @@ """SFTP file transfer.""" import os +from collections.abc import Callable, Iterator from contextlib import contextmanager from datetime import datetime, timezone from pathlib import Path -from typing import Callable, Iterator, List, Optional, Union from paramiko import SFTPAttributes, SFTPClient, SSHClient @@ -29,7 +29,7 @@ def __init__(self, *, sftp_client: SFTPClient, host: str) -> None: self._sftp_client = sftp_client self._host = host - def download_files(self, *, remote: List[RemotePath], local: List[Path]) -> None: + def download_files(self, *, remote: list[RemotePath], local: list[Path]) -> None: """Download files from the given remote path.""" for r, l in zip(remote, local): self.download_file(remote=r, local=l) @@ -64,7 +64,7 @@ def source_folder(self) -> RemotePath: """The source folder this connection uploads to.""" return self._source_folder - def remote_path(self, filename: Union[str, RemotePath]) -> RemotePath: + def remote_path(self, filename: str | RemotePath) -> RemotePath: """Return the complete remote path for a given path.""" return self.source_folder / filename @@ -76,7 +76,7 @@ def _make_source_folder(self) -> None: f"Failed to create source folder {self.source_folder}: {exc.args}" ) from None - def upload_files(self, *files: File) -> List[File]: + def upload_files(self, *files: File) -> list[File]: """Upload files to the remote folder.""" self._make_source_folder() uploaded: list[File] = [] @@ -124,7 +124,7 @@ def revert_upload(self, *files: File) -> None: self._host, ) self._sftp_client.rmdir(self.source_folder.posix) - except IOError as exc: + except OSError as exc: get_logger().warning( "Failed to remove empty remote directory %s on host %s:\n%s", self.source_folder, @@ -132,9 +132,7 @@ def revert_upload(self, *files: File) -> None: exc, ) - def _revert_upload_single( - self, *, remote: RemotePath, local: Optional[Path] - ) -> None: + def _revert_upload_single(self, *, remote: RemotePath, local: Path | None) -> None: remote_path = self.remote_path(remote) get_logger().info( "Reverting upload of file %s to %s on host %s", @@ -145,7 +143,7 @@ def _revert_upload_single( try: self._sftp_client.remove(remote_path.posix) - except IOError as exc: + except OSError as exc: get_logger().warning("Error reverting file %s:\n%s", remote_path, exc) return @@ -247,8 +245,8 @@ def __init__( *, host: str, port: int = 22, - source_folder: Optional[Union[str, RemotePath]] = None, - connect: Optional[Callable[[str, Optional[int]], SFTPClient]] = None, + source_folder: str | RemotePath | None = None, + connect: Callable[[str, int | None], SFTPClient] | None = None, ) -> None: """Construct a new SFTP file transfer. @@ -308,7 +306,7 @@ def connect_for_upload(self, dataset: Dataset) -> Iterator[SFTPUploadConnection] sftp_client.close() -def _default_connect(host: str, port: Optional[int]) -> SFTPClient: +def _default_connect(host: str, port: int | None) -> SFTPClient: client = SSHClient() client.load_system_host_keys() if port is not None: @@ -321,7 +319,7 @@ def _default_connect(host: str, port: Optional[int]) -> SFTPClient: def _connect( host: str, port: int, - connect: Optional[Callable[[str, Optional[int]], SFTPClient]], + connect: Callable[[str, int | None], SFTPClient] | None, ) -> SFTPClient: try: if connect is None: @@ -351,7 +349,7 @@ def _mkdir_remote(sftp: SFTPClient, path: RemotePath) -> None: ) -def _try_remote_stat(sftp: SFTPClient, path: RemotePath) -> Optional[SFTPAttributes]: +def _try_remote_stat(sftp: SFTPClient, path: RemotePath) -> SFTPAttributes | None: try: return sftp.stat(path.posix) except FileNotFoundError: diff --git a/src/scitacean/transfer/util.py b/src/scitacean/transfer/util.py index 9fd0244f..dd72e0a4 100644 --- a/src/scitacean/transfer/util.py +++ b/src/scitacean/transfer/util.py @@ -2,7 +2,6 @@ # Copyright (c) 2024 SciCat Project (https://github.com/SciCatProject/scitacean) """Common utilities for file transfers.""" -from typing import Optional, Union from uuid import uuid4 from ..dataset import Dataset @@ -10,9 +9,7 @@ from ..util.formatter import DatasetPathFormatter -def source_folder_for( - dataset: Dataset, pattern: Optional[Union[str, RemotePath]] -) -> RemotePath: +def source_folder_for(dataset: Dataset, pattern: str | RemotePath | None) -> RemotePath: """Get or build the source folder for a dataset. Parameters diff --git a/src/scitacean/typing.py b/src/scitacean/typing.py index fae06546..b608d618 100644 --- a/src/scitacean/typing.py +++ b/src/scitacean/typing.py @@ -4,7 +4,7 @@ """Definitions for type checking.""" from pathlib import Path -from typing import ContextManager, List, Protocol +from typing import ContextManager, Protocol from .dataset import Dataset from .file import File @@ -14,7 +14,7 @@ class DownloadConnection(Protocol): """An open connection to the file server for downloads.""" - def download_files(self, *, remote: List[RemotePath], local: List[Path]) -> None: + def download_files(self, *, remote: list[RemotePath], local: list[Path]) -> None: """Download files from the file server. Parameters @@ -42,7 +42,7 @@ def connect_for_download(self) -> ContextManager[DownloadConnection]: class UploadConnection(Protocol): """An open connection to the file server for uploads.""" - def upload_files(self, *files: File) -> List[File]: + def upload_files(self, *files: File) -> list[File]: """Upload files to the file server. Parameters diff --git a/src/scitacean/util/credentials.py b/src/scitacean/util/credentials.py index 10ae8bc4..70c33129 100644 --- a/src/scitacean/util/credentials.py +++ b/src/scitacean/util/credentials.py @@ -5,7 +5,7 @@ from __future__ import annotations import datetime -from typing import NoReturn, Optional, Union +from typing import NoReturn class StrStorage: @@ -14,7 +14,7 @@ class StrStorage: Instances can be nested to combine different specialized features. """ - def __init__(self, value: Optional[Union[str, StrStorage]]): + def __init__(self, value: str | StrStorage | None): self._value = value def get_str(self) -> str: @@ -83,7 +83,7 @@ class SecretStr(StrStorage): still be leaked through introspection methods. """ - def __init__(self, value: Union[str, StrStorage]): + def __init__(self, value: str | StrStorage): super().__init__(value) def __str__(self) -> str: @@ -103,9 +103,9 @@ class TimeLimitedStr(StrStorage): def __init__( self, *, - value: Union[str, StrStorage], + value: str | StrStorage, expires_at: datetime.datetime, - tolerance: Optional[datetime.timedelta] = None, + tolerance: datetime.timedelta | None = None, ): super().__init__(value) if tolerance is None: diff --git a/src/scitacean/util/formatter.py b/src/scitacean/util/formatter.py index 15f57d8c..4eb219a1 100644 --- a/src/scitacean/util/formatter.py +++ b/src/scitacean/util/formatter.py @@ -2,8 +2,9 @@ # Copyright (c) 2024 SciCat Project (https://github.com/SciCatProject/scitacean) """String-formatting tools.""" +from collections.abc import Iterable from string import Formatter -from typing import Any, Iterable, Optional, Tuple +from typing import Any from ..filesystem import escape_path @@ -52,16 +53,16 @@ class DatasetPathFormatter(Formatter): def parse( self, format_string: str ) -> Iterable[ - Tuple[ + tuple[ StrOrLiteralStr, - Optional[StrOrLiteralStr], - Optional[StrOrLiteralStr], - Optional[StrOrLiteralStr], + StrOrLiteralStr | None, + StrOrLiteralStr | None, + StrOrLiteralStr | None, ] ]: """Parse a format string.""" - def add0(field_name: Optional[str]) -> Optional[str]: + def add0(field_name: str | None) -> str | None: if field_name == "uid": return field_name if isinstance(field_name, str): diff --git a/tests/common/files.py b/tests/common/files.py index 2c1bf899..2c532889 100644 --- a/tests/common/files.py +++ b/tests/common/files.py @@ -4,14 +4,14 @@ import hashlib from datetime import datetime, timezone from pathlib import Path -from typing import Any, Dict, Optional, Union +from typing import Any from pyfakefs.fake_filesystem import FakeFilesystem def make_file( - fs: FakeFilesystem, path: Union[str, Path], contents: Optional[bytes] = None -) -> Dict[str, Any]: + fs: FakeFilesystem, path: str | Path, contents: bytes | None = None +) -> dict[str, Any]: if contents is None: contents = b"a bunch of file contents" * len(str(path)) path = Path(path) diff --git a/tests/dataset_fields_test.py b/tests/dataset_fields_test.py index 7b5af500..0e705197 100644 --- a/tests/dataset_fields_test.py +++ b/tests/dataset_fields_test.py @@ -6,7 +6,6 @@ # public interface and make sure that Dataset does not break any behavior. from datetime import datetime, timedelta, timezone -from typing import Union import dateutil.parser import pydantic @@ -62,7 +61,7 @@ def test_init_dataset_sets_creation_time(): def test_init_dataset_can_set_creation_time(): - dt: Union[str, datetime] + dt: str | datetime dt = dateutil.parser.parse("2022-01-10T11:14:52.623Z") dset = Dataset(type="derived", creation_time=dt) @@ -229,13 +228,13 @@ def test_init_from_models_sets_files(): assert dset.packed_size == 0 assert dset.size == 6123 + 551 - (f0,) = [f for f in dset.files if f.remote_path.suffix == ".dat"] + (f0,) = (f for f in dset.files if f.remote_path.suffix == ".dat") assert f0.remote_access_path(dset.source_folder) == "/hex/source91/file1.dat" assert f0.local_path is None assert f0.size == 6123 assert f0.make_model().path == "file1.dat" - (f1,) = [f for f in dset.files if f.remote_path.suffix == ".png"] + (f1,) = (f for f in dset.files if f.remote_path.suffix == ".png") assert f1.remote_access_path(dset.source_folder) == "/hex/source91/sub/file2.png" assert f1.local_path is None assert f1.size == 551 @@ -296,13 +295,13 @@ def test_init_from_models_sets_files_multi_datablocks(): assert dset.packed_size == 0 assert dset.size == 6123 + 992 - (f0,) = [f for f in dset.files if f.remote_path.suffix == ".dat"] + (f0,) = (f for f in dset.files if f.remote_path.suffix == ".dat") assert f0.remote_access_path(dset.source_folder) == "/hex/source91/file1.dat" assert f0.local_path is None assert f0.size == 6123 assert f0.make_model().path == "file1.dat" - (f1,) = [f for f in dset.files if f.remote_path.suffix == ".png"] + (f1,) = (f for f in dset.files if f.remote_path.suffix == ".png") assert f1.remote_access_path(dset.source_folder) == "/hex/source91/sub/file2.png" assert f1.local_path is None assert f1.size == 992 diff --git a/tests/download_test.py b/tests/download_test.py index 87d11208..b25760e5 100644 --- a/tests/download_test.py +++ b/tests/download_test.py @@ -5,7 +5,6 @@ from contextlib import contextmanager from copy import deepcopy from pathlib import Path -from typing import Union import pytest from dateutil.parser import parse as parse_date @@ -78,7 +77,7 @@ def dataset_and_files(data_files): } -def load(name: Union[str, Path]) -> bytes: +def load(name: str | Path) -> bytes: with open(name, "rb") as f: return f.read() diff --git a/tests/model_test.py b/tests/model_test.py index b5110974..6dbcf27a 100644 --- a/tests/model_test.py +++ b/tests/model_test.py @@ -1,7 +1,7 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2024 SciCat Project (https://github.com/SciCatProject/scitacean) import dataclasses -from typing import Type, TypeVar +from typing import TypeVar import pytest from dateutil.parser import parse as parse_date @@ -20,7 +20,7 @@ T = TypeVar("T") -def build_user_model_for_upload(cls: Type[T]) -> st.SearchStrategy[T]: +def build_user_model_for_upload(cls: type[T]) -> st.SearchStrategy[T]: private_fields = { field.name: st.none() for field in dataclasses.fields(cls) # type: ignore[arg-type] diff --git a/tests/transfer/sftp_test.py b/tests/transfer/sftp_test.py index c61b22e0..0ed83701 100644 --- a/tests/transfer/sftp_test.py +++ b/tests/transfer/sftp_test.py @@ -4,10 +4,10 @@ import dataclasses import tempfile +from collections.abc import Iterator from contextlib import contextmanager from datetime import datetime, timedelta, timezone from pathlib import Path -from typing import Iterator import paramiko import pytest @@ -279,7 +279,7 @@ class CorruptingSFTP(paramiko.SFTPClient): """Appends bytes to uploaded files to simulate a broken transfer.""" def put(self, localpath, remotepath, callback=None, confirm=True): - with open(localpath, "r") as f: + with open(localpath) as f: content = f.read() with tempfile.TemporaryDirectory() as tempdir: corrupted_path = Path(tempdir) / "corrupted" diff --git a/tools/model-generation/generate_models.py b/tools/model-generation/generate_models.py index 2bf29cf6..2b87a4aa 100644 --- a/tools/model-generation/generate_models.py +++ b/tools/model-generation/generate_models.py @@ -6,9 +6,10 @@ import argparse import subprocess import tempfile +from collections.abc import Generator from contextlib import contextmanager from pathlib import Path -from typing import Any, Dict, Generator +from typing import Any from jinja2 import Environment, FileSystemLoader, Template from spec import DatasetSpec, Spec, load_specs @@ -61,7 +62,7 @@ def _dataset_fields_template() -> Template: return _template("dataset_fields") -def generate_models(specs: Dict[str, Spec]) -> str: +def generate_models(specs: dict[str, Spec]) -> str: specs = dict(specs) dset_spec = specs.pop("Dataset") return _model_template().render(banner=BANNER, specs=specs, dset_spec=dset_spec) @@ -95,7 +96,7 @@ def _scicat_backend() -> Generator[None, None, None]: backend.stop_backend(docker_file) -def load(real_backend: bool) -> Dict[str, Any]: +def load(real_backend: bool) -> dict[str, Any]: if not real_backend: return load_specs(SCHEMA_URL) diff --git a/tools/model-generation/spec/__init__.py b/tools/model-generation/spec/__init__.py index 99f6ce5f..e0436b1b 100644 --- a/tools/model-generation/spec/__init__.py +++ b/tools/model-generation/spec/__init__.py @@ -8,7 +8,7 @@ from copy import deepcopy from functools import lru_cache from pathlib import Path -from typing import Any, Dict, List, Literal, Optional, Union +from typing import Any, Literal import yaml @@ -18,7 +18,7 @@ @dataclasses.dataclass class _UpDownSchemas: download: Schema - upload: Optional[Schema] + upload: Schema | None @dataclasses.dataclass @@ -35,10 +35,10 @@ class SpecField: description: str type: str required: bool # Required in upload. - default: Optional[str] = None + default: str | None = None upload: bool = False download: bool = False - validation: Optional[str] = None + validation: str | None = None def full_type_for(self, kind: Literal["download", "upload", "user"]) -> str: return ( @@ -66,10 +66,10 @@ def type_for(self, kind: Literal["download", "upload", "user"]) -> str: class Spec: name: str download_name: str = dataclasses.field(init=False) - upload_name: Optional[str] = dataclasses.field(init=False) - fields: Dict[str, SpecField] - masked_fields_download: Dict[str, SpecField] = dataclasses.field(init=False) - masked_fields_upload: Dict[str, SpecField] = dataclasses.field(init=False) + upload_name: str | None = dataclasses.field(init=False) + fields: dict[str, SpecField] + masked_fields_download: dict[str, SpecField] = dataclasses.field(init=False) + masked_fields_upload: dict[str, SpecField] = dataclasses.field(init=False) def __post_init__(self) -> None: if _SCHEMA_GROUPS.get(self.name, (None, None))[0]: @@ -82,7 +82,7 @@ def __post_init__(self) -> None: def fields_for( self, kind: Literal["download", "upload", "user"] - ) -> List[SpecField]: + ) -> list[SpecField]: return sorted( sorted( filter( @@ -105,8 +105,8 @@ class DatasetFieldConversion: @dataclasses.dataclass class DatasetField(SpecField): - conversion: Optional[DatasetFieldConversion] = None - default: Optional[Any] = None + conversion: DatasetFieldConversion | None = None + default: Any | None = None manual: bool = False # These are only used for upload models. @@ -118,14 +118,14 @@ class DatasetField(SpecField): @dataclasses.dataclass class DatasetSpec(Spec): download_name: str = dataclasses.field(default="DownloadDataset", init=False) - upload_name: Optional[str] = dataclasses.field(default="UploadDataset", init=False) - fields: Dict[str, DatasetField] + upload_name: str | None = dataclasses.field(default="UploadDataset", init=False) + fields: dict[str, DatasetField] def dset_fields_for( self, kind: Literal["download", "upload", "user"], dset_type: Literal["derived", "raw"], - ) -> List[DatasetField]: + ) -> list[DatasetField]: return list( filter( lambda field: field.used_by_derived @@ -135,7 +135,7 @@ def dset_fields_for( ) ) - def user_dset_fields(self, manual: Optional[bool] = None) -> List[DatasetField]: + def user_dset_fields(self, manual: bool | None = None) -> list[DatasetField]: if manual is None: return list(self.fields.values()) return [field for field in self.fields.values() if field.manual == manual] @@ -156,8 +156,8 @@ def user_dset_fields(self, manual: Optional[bool] = None) -> List[DatasetField]: def _collect_schemas( - schemas: Dict[str, Schema], -) -> Dict[str, Union[_UpDownSchemas, _DatasetSchemas]]: + schemas: dict[str, Schema], +) -> dict[str, _UpDownSchemas | _DatasetSchemas]: return { "Dataset": _DatasetSchemas( upload_derived=schemas["CreateDerivedDatasetDto"], @@ -204,7 +204,7 @@ def _get_common_field_attr( def _merge_field( - name: str, download: Optional[SchemaField], upload: Optional[SchemaField] + name: str, download: SchemaField | None, upload: SchemaField | None ) -> SpecField: fields = {"download": download, "upload": upload} return SpecField( @@ -242,9 +242,9 @@ def _build_spec(name: str, schemas: _UpDownSchemas) -> Spec: def _merge_dataset_field( name: str, - download: Optional[SchemaField], - raw_upload: Optional[SchemaField], - derived_upload: Optional[SchemaField], + download: SchemaField | None, + raw_upload: SchemaField | None, + derived_upload: SchemaField | None, ) -> DatasetField: fields = { "download": download, @@ -291,8 +291,8 @@ def _build_dataset_spec(name: str, schemas: _DatasetSchemas) -> DatasetSpec: @lru_cache -def _masked_fields() -> Dict[str, List[str]]: - with open(Path(__file__).resolve().parent / "masked-fields.yml", "r") as f: +def _masked_fields() -> dict[str, list[str]]: + with open(Path(__file__).resolve().parent / "masked-fields.yml") as f: return yaml.safe_load(f) @@ -316,8 +316,8 @@ def _mask_fields(spec: Spec) -> Spec: @lru_cache -def _field_name_overrides() -> Dict[str, Dict[str, str]]: - with open(Path(__file__).resolve().parent / "field-name-overrides.yml", "r") as f: +def _field_name_overrides() -> dict[str, dict[str, str]]: + with open(Path(__file__).resolve().parent / "field-name-overrides.yml") as f: return yaml.safe_load(f) @@ -330,8 +330,8 @@ def _postprocess_field_names(spec: Spec) -> Spec: @lru_cache -def _field_type_overrides() -> Dict[str, Dict[str, str]]: - with open(Path(__file__).resolve().parent / "field-type-overrides.yml", "r") as f: +def _field_type_overrides() -> dict[str, dict[str, str]]: + with open(Path(__file__).resolve().parent / "field-type-overrides.yml") as f: return yaml.safe_load(f) @@ -352,8 +352,8 @@ def _postprocess_field_types(spec: Spec) -> Spec: @lru_cache -def _field_validations() -> Dict[str, Dict[str, str]]: - with open(Path(__file__).resolve().parent / "field-validations.yml", "r") as f: +def _field_validations() -> dict[str, dict[str, str]]: + with open(Path(__file__).resolve().parent / "field-validations.yml") as f: return yaml.safe_load(f) @@ -373,8 +373,8 @@ def _assign_validations(spec: Spec) -> Spec: @lru_cache -def _dataset_field_customizations() -> Dict[str, Any]: - with open(Path(__file__).resolve().parent / "dataset-fields.yml", "r") as f: +def _dataset_field_customizations() -> dict[str, Any]: + with open(Path(__file__).resolve().parent / "dataset-fields.yml") as f: return yaml.safe_load(f) @@ -392,7 +392,7 @@ def _extend_dataset_fields(spec: DatasetSpec) -> DatasetSpec: return spec -def load_specs(schema_url: str) -> Dict[str, Any]: +def load_specs(schema_url: str) -> dict[str, Any]: schemas = _collect_schemas(load_schemas(schema_url)) dataset_schema = schemas.pop("Dataset") specs = { diff --git a/tools/model-generation/spec/schema.py b/tools/model-generation/spec/schema.py index a96cc78d..39833b47 100644 --- a/tools/model-generation/spec/schema.py +++ b/tools/model-generation/spec/schema.py @@ -3,7 +3,7 @@ """Load schemas from a SciCat API.""" import dataclasses -from typing import Any, Dict, Optional +from typing import Any import requests @@ -14,16 +14,16 @@ class SchemaField: description: str type: str required: bool - default: Optional[str] + default: str | None @dataclasses.dataclass class Schema: name: str - fields: Dict[str, SchemaField] + fields: dict[str, SchemaField] -def parse_field_type(spec: Dict[str, Any]): +def parse_field_type(spec: dict[str, Any]): if "allOf" in spec: if len(spec["allOf"]) != 1: raise ValueError("More than one alternative type in 'allOf'") @@ -49,7 +49,7 @@ def parse_field_type(spec: Dict[str, Any]): raise ValueError(f"Unknown field type: {spec['type']}") -def parse_schema_fields(spec: Dict[str, Any]) -> Dict[str, SchemaField]: +def parse_schema_fields(spec: dict[str, Any]) -> dict[str, SchemaField]: return { name: SchemaField( name=name, @@ -62,7 +62,7 @@ def parse_schema_fields(spec: Dict[str, Any]) -> Dict[str, SchemaField]: } -def parse_schemas(spec: Dict[str, Any]) -> Dict[str, Schema]: +def parse_schemas(spec: dict[str, Any]) -> dict[str, Schema]: schemas = {} for name, field_spec in spec.items(): print(f"Parsing schema {name}") # noqa: T201 @@ -70,7 +70,7 @@ def parse_schemas(spec: Dict[str, Any]) -> Dict[str, Schema]: return schemas -def fetch_specs(url: str) -> Dict[str, Any]: +def fetch_specs(url: str) -> dict[str, Any]: """Download the raw schema JSON from the API. ``url`` needs to point to a 'json-explorer' of a SciCat backend with version >= 4. @@ -84,6 +84,6 @@ def fetch_specs(url: str) -> Dict[str, Any]: return response.json() -def load_schemas(url: str) -> Dict[str, Schema]: +def load_schemas(url: str) -> dict[str, Schema]: spec_json = fetch_specs(url) return parse_schemas(spec_json["components"]["schemas"]) diff --git a/tools/model-generation/templates/__init__.py b/tools/model-generation/templates/__init__.py index 3a6b2958..4b593b31 100644 --- a/tools/model-generation/templates/__init__.py +++ b/tools/model-generation/templates/__init__.py @@ -15,5 +15,5 @@ def load_template(name: str) -> Template: - with open(TEMPLATE_DIR / f"{name}.py.template", "r") as f: + with open(TEMPLATE_DIR / f"{name}.py.template") as f: return Template(f.read()) From 1b79348c5a3b1b1f27b70fed0aa039881e41d400 Mon Sep 17 00:00:00 2001 From: Jan-Lukas Wynen Date: Fri, 19 Apr 2024 15:11:49 +0200 Subject: [PATCH 4/7] Enable UP rules in ruff --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f936180d..6033589a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -102,7 +102,7 @@ extend-include = ["*.ipynb"] extend-exclude = [".*", "__pycache__", "build", "dist", "venv"] [tool.ruff.lint] -select = ["B", "D", "E", "F", "G", "I", "S", "T20", "PGH", "FBT003", "RUF"] +select = ["B", "D", "E", "F", "G", "I", "S", "T20", "UP", "PGH", "FBT003", "RUF"] ignore = [ "B905", # `zip()` without an explicit `strict=` parameter "S324", # insecure hsh function; we don't use hashing for security From e7e5ccdf9453283d5169582a77840b08c9c3c771 Mon Sep 17 00:00:00 2001 From: Jan-Lukas Wynen Date: Fri, 19 Apr 2024 15:12:08 +0200 Subject: [PATCH 5/7] Update ruff --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8bab3321..68b2c953 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,7 +19,7 @@ repos: args: ["--drop-empty-cells", "--extra-keys 'metadata.language_info.version cell.metadata.jp-MarkdownHeadingCollapsed cell.metadata.pycharm'"] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.7 + rev: v0.4.0 hooks: - id: ruff-format types_or: [ python, pyi ] From d16d8642a15de98c17760ec2fc56118f77a6467f Mon Sep 17 00:00:00 2001 From: Jan-Lukas Wynen Date: Fri, 19 Apr 2024 15:31:43 +0200 Subject: [PATCH 6/7] Run updated ruff --- pyproject.toml | 1 + src/scitacean/_internal/docker.py | 4 ++-- src/scitacean/client.py | 8 ++++---- src/scitacean/model.py | 2 +- src/scitacean/testing/backend/_backend.py | 4 ++-- src/scitacean/testing/strategies.py | 6 +++--- src/scitacean/typing.py | 9 ++++++--- 7 files changed, 19 insertions(+), 15 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6033589a..4e0ea23b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -107,6 +107,7 @@ ignore = [ "B905", # `zip()` without an explicit `strict=` parameter "S324", # insecure hsh function; we don't use hashing for security "E741", "E742", "E743", # do not use names ‘l’, ‘O’, or ‘I’; they are not a problem with a proper font + "UP038", # does not seem to work and leads to slower code "E111", "E114", "E117", "D206", "D300", # conflict with ruff format "D105", ] diff --git a/src/scitacean/_internal/docker.py b/src/scitacean/_internal/docker.py index 009ba5c9..dc4e47a8 100644 --- a/src/scitacean/_internal/docker.py +++ b/src/scitacean/_internal/docker.py @@ -9,9 +9,9 @@ import json import os import subprocess -from typing import Any, Union +from typing import Any -_PathLike = Union[str, os.PathLike[str]] +_PathLike = str | os.PathLike[str] def docker_compose_up(config_file: _PathLike, *services: str) -> None: diff --git a/src/scitacean/client.py b/src/scitacean/client.py index 1465fd80..4e9506cd 100644 --- a/src/scitacean/client.py +++ b/src/scitacean/client.py @@ -11,7 +11,7 @@ from collections.abc import Callable, Iterable, Iterator from contextlib import contextmanager from pathlib import Path -from typing import Any, Union +from typing import Any from urllib.parse import quote_plus import requests @@ -1162,9 +1162,9 @@ def _get_token( raise ScicatLoginError(response.content) -FileSelector = Union[ - bool, str, list[str], tuple[str], re.Pattern[str], Callable[[File], bool] -] +FileSelector = ( + bool | str | list[str] | tuple[str] | re.Pattern[str] | Callable[[File], bool] +) def _file_selector(select: FileSelector) -> Callable[[File], bool]: diff --git a/src/scitacean/model.py b/src/scitacean/model.py index d500972c..f170ff7e 100644 --- a/src/scitacean/model.py +++ b/src/scitacean/model.py @@ -127,7 +127,7 @@ class DownloadDataset( dataQualityMetrics: int | None = None description: str | None = None endTime: datetime | None = None - history: None | None = None + history: None = None instrumentGroup: str | None = None instrumentId: str | None = None isPublished: bool | None = None diff --git a/src/scitacean/testing/backend/_backend.py b/src/scitacean/testing/backend/_backend.py index b0c6b8a1..c76798e1 100644 --- a/src/scitacean/testing/backend/_backend.py +++ b/src/scitacean/testing/backend/_backend.py @@ -5,7 +5,7 @@ import time from copy import deepcopy from pathlib import Path -from typing import Any, Union +from typing import Any from urllib.parse import urljoin import requests @@ -14,7 +14,7 @@ from ..._internal.docker import docker_compose_down, docker_compose_up from . import config -_PathLike = Union[str, os.PathLike[str]] +_PathLike = str | os.PathLike[str] def _read_yaml(filename: str) -> Any: diff --git a/src/scitacean/testing/strategies.py b/src/scitacean/testing/strategies.py index 54958677..b6d73681 100644 --- a/src/scitacean/testing/strategies.py +++ b/src/scitacean/testing/strategies.py @@ -31,7 +31,7 @@ import string from functools import partial -from typing import Any, Optional +from typing import Any from email_validator import EmailNotValidError, ValidatedEmail, validate_email from hypothesis import strategies as st @@ -151,7 +151,7 @@ def _scientific_metadata_strategy( def _job_parameters_strategy( field: Dataset.Field, ) -> st.SearchStrategy[dict[str, str] | None]: - return st.from_type(Optional[dict[str, str]]) # type: ignore[arg-type] + return st.from_type(dict[str, str] | None) # type: ignore[arg-type] def _lifecycle_strategy( @@ -192,7 +192,7 @@ def _field_strategy(field: Dataset.Field) -> st.SearchStrategy[Any]: if (strategy := _SPECIAL_FIELDS.get(field.name)) is not None: return strategy(field) - typ = field.type if field.required else Optional[field.type] + typ = field.type if field.required else field.type | None return st.from_type(typ) # type:ignore[arg-type] diff --git a/src/scitacean/typing.py b/src/scitacean/typing.py index b608d618..10598d20 100644 --- a/src/scitacean/typing.py +++ b/src/scitacean/typing.py @@ -3,8 +3,9 @@ """Definitions for type checking.""" +from contextlib import AbstractContextManager from pathlib import Path -from typing import ContextManager, Protocol +from typing import Protocol from .dataset import Dataset from .file import File @@ -29,7 +30,7 @@ def download_files(self, *, remote: list[RemotePath], local: list[Path]) -> None class Downloader(Protocol): """Handler for file downloads.""" - def connect_for_download(self) -> ContextManager[DownloadConnection]: + def connect_for_download(self) -> AbstractContextManager[DownloadConnection]: """Open a connection to the file server. Returns @@ -90,7 +91,9 @@ def source_folder_for(self, dataset: Dataset) -> RemotePath: The source folder for ``dataset``. """ - def connect_for_upload(self, dataset: Dataset) -> ContextManager[UploadConnection]: + def connect_for_upload( + self, dataset: Dataset + ) -> AbstractContextManager[UploadConnection]: """Open a connection to the file server. Parameters From 0196d5f03f954035dc9d24ea4fbb2a6c42c64b06 Mon Sep 17 00:00:00 2001 From: Jan-Lukas Wynen Date: Fri, 19 Apr 2024 15:33:25 +0200 Subject: [PATCH 7/7] Remove unused warning filters --- pyproject.toml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4e0ea23b..c631e1b9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,16 +60,8 @@ addopts = """ """ filterwarnings = [ "error", - # From dateutil. This needs to be first because it is triggered by pytest itself. - 'ignore:datetime.datetime.utcfromtimestamp:DeprecationWarning', # Many tests don't set a checksum, so File raises this warning. "ignore:Cannot check if local file:UserWarning", - # Internal deprecations. - "ignore:SSHFileTransfer is deprecated:scitacean.VisibleDeprecationWarning", - "ignore:Support for Pydantic v1 is deprecated:scitacean.VisibleDeprecationWarning", - # From fabric / invoke - "ignore:_SixMetaPathImporter:ImportWarning", - "ignore:the imp module is deprecated in favour of importlib:DeprecationWarning", ] [tool.mypy]