Skip to content

Commit

Permalink
synctatic auto-upgrades (Python 3.9->3.10) TODO: add to .git-blame-ig…
Browse files Browse the repository at this point in the history
…nore-revs
  • Loading branch information
neutrinoceros committed Sep 3, 2024
1 parent 32d1f7b commit b9fe508
Show file tree
Hide file tree
Showing 91 changed files with 217 additions and 509 deletions.
15 changes: 0 additions & 15 deletions yt/_maintenance/backports.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,18 +63,3 @@ def _generate_next_value_(name, start, count, last_values): # noqa B902
Return the lower-cased version of the member name.
"""
return name.lower()


builtin_zip = zip
if sys.version_info >= (3, 10):
zip = builtin_zip
else:
# this function is deprecated in more_itertools
# because it is superseded by the standard library
from more_itertools import zip_equal

def zip(*args, strict=False):
if strict:
return zip_equal(*args)
else:
return builtin_zip(*args)
3 changes: 1 addition & 2 deletions yt/_maintenance/deprecation.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
import warnings
from functools import wraps
from types import FunctionType
from typing import Optional


def issue_deprecation_warning(
msg: str,
*,
stacklevel: int,
since: str,
removal: Optional[str] = None,
removal: str | None = None,
):
"""
Parameters
Expand Down
14 changes: 7 additions & 7 deletions yt/_typing.py
Original file line number Diff line number Diff line change
@@ -1,34 +1,34 @@
from typing import Any, Optional, Union
from typing import Any, Optional

import numpy as np
import unyt as un

FieldDescT = tuple[str, tuple[str, list[str], Optional[str]]]
FieldDescT = tuple[str, tuple[str, list[str], str | None]]
KnownFieldsT = tuple[FieldDescT, ...]

ParticleType = str
FieldType = str
FieldName = str
FieldKey = tuple[FieldType, FieldName]
ImplicitFieldKey = FieldName
AnyFieldKey = Union[FieldKey, ImplicitFieldKey]
DomainDimensions = Union[tuple[int, ...], list[int], np.ndarray]
AnyFieldKey = FieldKey | ImplicitFieldKey
DomainDimensions = tuple[int, ...] | list[int] | np.ndarray

ParticleCoordinateTuple = tuple[
str, # particle type
tuple[np.ndarray, np.ndarray, np.ndarray], # xyz
Union[float, np.ndarray], # hsml
float | np.ndarray, # hsml
]

# Geometry specific types
AxisName = str
AxisOrder = tuple[AxisName, AxisName, AxisName]

# types that can be converted to un.Unit
Unit = Union[un.Unit, str]
Unit = un.Unit | str

# types that can be converted to un.unyt_quantity
Quantity = Union[un.unyt_quantity, tuple[float, Unit]]
Quantity = un.unyt_quantity | tuple[float, Unit]

# np.ndarray[...] syntax is runtime-valid from numpy 1.22, we quote it until our minimal
# runtime requirement is bumped to, or beyond this version
Expand Down
4 changes: 0 additions & 4 deletions yt/data_objects/analyzer_objects.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,7 @@
import inspect
import sys

from yt.utilities.object_registries import analysis_task_registry

if sys.version_info < (3, 10):
from yt._maintenance.backports import zip


class AnalysisTask:
def __init_subclass__(cls, *args, **kwargs):
Expand Down
4 changes: 0 additions & 4 deletions yt/data_objects/construction_data_containers.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import fileinput
import io
import os
import sys
import warnings
import zipfile
from functools import partial, wraps
Expand Down Expand Up @@ -64,9 +63,6 @@
)
from yt.visualization.color_maps import get_colormap_lut

if sys.version_info < (3, 10):
from yt._maintenance.backports import zip


class YTStreamline(YTSelectionContainer1D):
"""
Expand Down
4 changes: 0 additions & 4 deletions yt/data_objects/data_containers.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import abc
import sys
import weakref
from collections import defaultdict
from contextlib import contextmanager
Expand Down Expand Up @@ -29,9 +28,6 @@
from yt.utilities.on_demand_imports import _firefly as firefly
from yt.utilities.parameter_file_storage import ParameterFileStore

if sys.version_info < (3, 10):
from yt._maintenance.backports import zip

if TYPE_CHECKING:
from yt.data_objects.static_output import Dataset

Expand Down
5 changes: 0 additions & 5 deletions yt/data_objects/derived_quantities.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import sys

import numpy as np

from yt.funcs import camelcase_to_underscore, iter_fields
Expand All @@ -13,9 +11,6 @@
from yt.utilities.physical_constants import gravitational_constant_cgs
from yt.utilities.physical_ratios import HUGE

if sys.version_info < (3, 10):
from yt._maintenance.backports import zip


def get_position_fields(field, data):
axis_names = [data.ds.coordinates.axis_name[num] for num in [0, 1, 2]]
Expand Down
4 changes: 0 additions & 4 deletions yt/data_objects/level_sets/tests/test_clump_finding.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import os
import shutil
import sys
import tempfile

import numpy as np
Expand All @@ -13,9 +12,6 @@
from yt.testing import requires_file, requires_module
from yt.utilities.answer_testing.framework import data_dir_load

if sys.version_info < (3, 10):
from yt._maintenance.backports import zip


def test_clump_finding():
n_c = 8
Expand Down
5 changes: 0 additions & 5 deletions yt/data_objects/profiles.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import sys

import numpy as np
from more_itertools import collapse

Expand All @@ -25,9 +23,6 @@
parallel_objects,
)

if sys.version_info < (3, 10):
from yt._maintenance.backports import zip


def _sanitize_min_max_units(amin, amax, finfo, registry):
# returns a copy of amin and amax, converted to finfo's output units
Expand Down
42 changes: 20 additions & 22 deletions yt/data_objects/static_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from functools import cached_property
from importlib.util import find_spec
from stat import ST_CTIME
from typing import Any, Literal, Optional, Union
from typing import Any, Literal, Optional

import numpy as np
import unyt as un
Expand Down Expand Up @@ -86,15 +86,13 @@
# to here, and then have it instantiate EnzoDatasets as appropriate.


_cached_datasets: MutableMapping[Union[int, str], "Dataset"] = (
weakref.WeakValueDictionary()
)
_cached_datasets: MutableMapping[int | str, "Dataset"] = weakref.WeakValueDictionary()

# we set this global to None as a place holder
# its actual instantiation is delayed until after yt.__init__
# is completed because we need yt.config.ytcfg to be instantiated first

_ds_store: Optional[ParameterFileStore] = None
_ds_store: ParameterFileStore | None = None


def _setup_ds_store(ytcfg: YTConfig) -> None:
Expand Down Expand Up @@ -161,24 +159,24 @@ class Dataset(abc.ABC):
default_field = ("gas", "density")
fluid_types: tuple[FieldType, ...] = ("gas", "deposit", "index")
particle_types: tuple[ParticleType, ...] = ("io",) # By default we have an 'all'
particle_types_raw: Optional[tuple[ParticleType, ...]] = ("io",)
particle_types_raw: tuple[ParticleType, ...] | None = ("io",)
geometry: Geometry = Geometry.CARTESIAN
coordinates = None
storage_filename = None
particle_unions: Optional[dict[ParticleType, ParticleUnion]] = None
known_filters: Optional[dict[ParticleType, ParticleFilter]] = None
particle_unions: dict[ParticleType, ParticleUnion] | None = None
known_filters: dict[ParticleType, ParticleFilter] | None = None
_index_class: type[Index]
field_units: Optional[dict[AnyFieldKey, Unit]] = None
field_units: dict[AnyFieldKey, Unit] | None = None
derived_field_list = requires_index("derived_field_list")
fields = requires_index("fields")
conversion_factors: Optional[dict[str, float]] = None
conversion_factors: dict[str, float] | None = None
# _instantiated represents an instantiation time (since Epoch)
# the default is a place holder sentinel, falsy value
_instantiated: float = 0
_particle_type_counts = None
_proj_type = "quad_proj"
_ionization_label_format = "roman_numeral"
_determined_fields: Optional[dict[str, list[FieldKey]]] = None
_determined_fields: dict[str, list[FieldKey]] | None = None
fields_detected = False

# these are set in self._parse_parameter_file()
Expand Down Expand Up @@ -233,8 +231,8 @@ def __init_subclass__(cls, *args, **kwargs):
def __init__(
self,
filename: str,
dataset_type: Optional[str] = None,
units_override: Optional[dict[str, str]] = None,
dataset_type: str | None = None,
units_override: dict[str, str] | None = None,
# valid unit_system values include all keys from unyt.unit_systems.unit_systems_registry + "code"
unit_system: Literal[
"cgs",
Expand All @@ -250,7 +248,7 @@ def __init__(
"Any"
] = None, # Any used as a placeholder here
*,
axis_order: Optional[AxisOrder] = None,
axis_order: AxisOrder | None = None,
) -> None:
"""
Base class for generating new output types. Principally consists of
Expand Down Expand Up @@ -732,7 +730,7 @@ def setup_deprecated_fields(self):
added.append(("gas", old_name))
self.field_info.find_dependencies(added)

def _setup_coordinate_handler(self, axis_order: Optional[AxisOrder]) -> None:
def _setup_coordinate_handler(self, axis_order: AxisOrder | None) -> None:
# backward compatibility layer:
# turning off type-checker on a per-line basis
cls: type[CoordinateHandler]
Expand Down Expand Up @@ -947,7 +945,7 @@ def _setup_particle_types(self, ptypes=None):

def _get_field_info(
self,
field: Union[FieldKey, ImplicitFieldKey, DerivedField],
field: FieldKey | ImplicitFieldKey | DerivedField,
/,
) -> DerivedField:
field_info, candidates = self._get_field_info_helper(field)
Expand Down Expand Up @@ -1008,7 +1006,7 @@ def _are_ambiguous(candidates: list[FieldKey]) -> bool:

def _get_field_info_helper(
self,
field: Union[FieldKey, ImplicitFieldKey, DerivedField],
field: FieldKey | ImplicitFieldKey | DerivedField,
/,
) -> tuple[DerivedField, list[FieldKey]]:
self.index
Expand Down Expand Up @@ -1280,8 +1278,8 @@ def _assign_unit_system(
# is mks-like: i.e., it has a current with the same
# dimensions as amperes.
mks_system = False
mag_unit: Optional[unyt_quantity] = getattr(self, "magnetic_unit", None)
mag_dims: Optional[set[Symbol]]
mag_unit: unyt_quantity | None = getattr(self, "magnetic_unit", None)
mag_dims: set[Symbol] | None
if mag_unit is not None:
mag_dims = mag_unit.units.dimensions.free_symbols
else:
Expand Down Expand Up @@ -2057,9 +2055,9 @@ class ParticleFile:
filename: str
file_id: int

start: Optional[int] = None
end: Optional[int] = None
total_particles: Optional[defaultdict[str, int]] = None
start: int | None = None
end: int | None = None
total_particles: defaultdict[str, int] | None = None

def __init__(self, ds, io, filename, file_id, range=None):
self.ds = ds
Expand Down
6 changes: 0 additions & 6 deletions yt/data_objects/tests/test_covering_grid.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import sys

import numpy as np
from numpy.testing import assert_almost_equal, assert_array_equal, assert_equal

Expand All @@ -13,10 +11,6 @@
)
from yt.units import kpc

if sys.version_info < (3, 10):
from yt._maintenance.backports import zip


# cylindrical data for covering_grid test
cyl_2d = "WDMerger_hdf5_chk_1000/WDMerger_hdf5_chk_1000.hdf5"
cyl_3d = "MHD_Cyl3d_hdf5_plt_cnt_0100/MHD_Cyl3d_hdf5_plt_cnt_0100.hdf5"
Expand Down
5 changes: 0 additions & 5 deletions yt/data_objects/tests/test_derived_quantities.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import sys

import numpy as np
from numpy.testing import assert_almost_equal, assert_equal

Expand All @@ -13,9 +11,6 @@
requires_file,
)

if sys.version_info < (3, 10):
from yt._maintenance.backports import zip


def setup_module():
from yt.config import ytcfg
Expand Down
5 changes: 2 additions & 3 deletions yt/data_objects/time_series.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import weakref
from abc import ABC, abstractmethod
from functools import wraps
from typing import Optional

import numpy as np
from more_itertools import always_iterable
Expand Down Expand Up @@ -147,7 +146,7 @@ class DatasetSeries:
# this annotation should really be Optional[Type[Dataset]]
# but we cannot import the yt.data_objects.static_output.Dataset
# class here without creating a circular import for now
_dataset_cls: Optional[type] = None
_dataset_cls: type | None = None

def __init_subclass__(cls, *args, **kwargs):
super().__init_subclass__(*args, **kwargs)
Expand Down Expand Up @@ -371,7 +370,7 @@ def from_output_log(cls, output_log, line_prefix="DATASET WRITTEN", parallel=Tru
obj = cls(filenames, parallel=parallel)
return obj

def _load(self, output_fn, *, hint: Optional[str] = None, **kwargs):
def _load(self, output_fn, *, hint: str | None = None, **kwargs):
from yt.loaders import load

if self._dataset_cls is not None:
Expand Down
Loading

0 comments on commit b9fe508

Please sign in to comment.