Skip to content

Commit

Permalink
Aggregates, patch broadcasting, and refactoring (#375)
Browse files Browse the repository at this point in the history
  • Loading branch information
d-chambers authored Jun 8, 2024
1 parent e206fc2 commit dea01ab
Show file tree
Hide file tree
Showing 68 changed files with 5,336 additions and 2,267 deletions.
2 changes: 2 additions & 0 deletions dascore/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
from dascore.core.patch import Patch
from dascore.core.attrs import PatchAttrs
from dascore.core.spool import BaseSpool, spool
from dascore.core.coordmanager import get_coord_manager, CoordManager
from dascore.core.coords import get_coord
from dascore.examples import get_example_patch, get_example_spool
from dascore.io.core import get_format, read, scan, scan_to_df, write
from dascore.units import get_quantity, get_unit
Expand Down
14 changes: 12 additions & 2 deletions dascore/compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

from contextlib import suppress

import numpy as np
from numpy import floor, interp # NOQA
from scipy.interpolate import interp1d # NOQA
from scipy.ndimage import zoom # NOQA
Expand All @@ -24,5 +25,14 @@ class DataArray:

def array(array):
"""Wrapper function for creating 'immutable' arrays."""
array.setflags(write=False)
return array
out = np.asarray(array)
# Setting the write flag to false makes the array immutable unless
# the flag is switched back.
out.setflags(write=False)
return out


def is_array(maybe_array):
"""Determine if an object is array like."""
# This is here so that we can support other array types in the future.
return isinstance(maybe_array, np.ndarray)
26 changes: 26 additions & 0 deletions dascore/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,10 @@ def map(self, func, iterables, **kwargs):
# Methods FileFormatter needs to support
FILE_FORMATTER_METHODS = ("read", "write", "get_format", "scan")

# These attributes are the default to ignore when determine if patches
# can be merged or broadcast together.
DEFAULT_ATTRS_TO_IGNORE = ("history", "dims")

# Large and small np.datetime64[ns] (used when defaults are needed)
SMALLDT64 = np.datetime64(MININT64 + 5_000_000_000, "ns")
LARGEDT64 = np.datetime64(MAXINT64 - 5_000_000_000, "ns")
Expand Down Expand Up @@ -135,6 +139,27 @@ def map(self, func, iterables, **kwargs):
"""


select_values_description = """
Any dimension name can be passed as key, and the values can be:
- a Slice or a tuple of (min, max) for that dimension.
`None` and ... both indicate open intervals.
- an array of values to select, which must be a subset of the
coordinate array.
- an array of booleans of the same length as the coordinate where
`True` indicates values to keep.
"""

check_behavior_description = """
check_behavior
Indicates what to do when an incompatible patch is found in the
spool. `None` will silently skip any incompatible patches,
'warn' will issue a warning and then skip incompatible patches,
'raise' will raise an
[`IncompatiblePatchError`](`dascore.exceptions.IncompatiblePatchError`)
if any incompatible patches are found.
"""


# Rich styles for various object displays.
dascore_styles = dict(
np_array_threshold=100, # max number of elements to show in array
Expand All @@ -147,6 +172,7 @@ def map(self, func, iterables, **kwargs):
coord_monotonic="bold grey",
coord_array="bold orange",
coord_degenerate="bold red",
coord_non="bold red",
units="bright blue",
dtypes="bright black",
keys="grey50",
Expand Down
Loading

0 comments on commit dea01ab

Please sign in to comment.