Skip to content

Commit

Permalink
Merge pull request #1050 from lsst/tickets/DM-36457
Browse files Browse the repository at this point in the history
DM-36457: Remove some deprecated code
  • Loading branch information
timj authored Aug 9, 2024
2 parents 427f673 + fdfc207 commit 8e57637
Show file tree
Hide file tree
Showing 12 changed files with 28 additions and 140 deletions.
2 changes: 2 additions & 0 deletions doc/changes/DM-36457.removal.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
* Removed the ``components`` parameter from registry APIs.
* Dropped supported for regular expressions (`re.Pattern`) in dataset type expressions. Wildcard globs are still supported.
3 changes: 1 addition & 2 deletions doc/lsst.daf.butler/queries.rst
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,10 @@ Arguments that specify one or more dataset types can generally take any of the f
- `DatasetType` instances;
- `str` values (corresponding to `DatasetType.name`);
- `str` values using glob wildcard syntax which will be converted to `re.Pattern`;
- `re.Pattern` values (matched to `DatasetType.name` strings, via `~re.Pattern.fullmatch`);
- iterables of any of the above;
- the special value "``...``", which matches all dataset types.

Wildcards (`re.Pattern` and ``...``) are not allowed in certain contexts, such as `Registry.queryDataIds` and `Registry.queryDimensionRecords`, particularly when datasets are used only as a constraint on what is returned.
Wildcards (globs and ``...``) are not allowed in certain contexts, such as `Registry.queryDataIds` and `Registry.queryDimensionRecords`, particularly when datasets are used only as a constraint on what is returned.
`Registry.queryDatasetTypes` can be used to resolve patterns before calling these methods when desired.
In these contexts, passing a dataset type or name that is not registered with the repository will result in `MissingDatasetTypeError` being raised, while contexts that do accept wildcards will typically ignore unregistered dataset types (for example, `Registry.queryDatasets` will return no datasets for these).

Expand Down
10 changes: 1 addition & 9 deletions python/lsst/daf/butler/_registry_shim.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@
from .registry._collection_type import CollectionType
from .registry._defaults import RegistryDefaults
from .registry.queries import DataCoordinateQueryResults, DatasetQueryResults, DimensionRecordQueryResults
from .utils import _DefaultMarker, _Marker

if TYPE_CHECKING:
from .direct_butler import DirectButler
Expand Down Expand Up @@ -282,11 +281,10 @@ def queryDatasetTypes(
self,
expression: Any = ...,
*,
components: bool | _Marker = _DefaultMarker,
missing: list[str] | None = None,
) -> Iterable[DatasetType]:
# Docstring inherited from a base class.
return self._registry.queryDatasetTypes(expression, components=components, missing=missing)
return self._registry.queryDatasetTypes(expression, missing=missing)

def queryCollections(
self,
Expand All @@ -310,7 +308,6 @@ def queryDatasets(
dataId: DataId | None = None,
where: str = "",
findFirst: bool = False,
components: bool | _Marker = _DefaultMarker,
bind: Mapping[str, Any] | None = None,
check: bool = True,
**kwargs: Any,
Expand All @@ -323,7 +320,6 @@ def queryDatasets(
dataId=dataId,
where=where,
findFirst=findFirst,
components=components,
bind=bind,
check=check,
**kwargs,
Expand All @@ -337,7 +333,6 @@ def queryDataIds(
datasets: Any = None,
collections: CollectionArgType | None = None,
where: str = "",
components: bool | _Marker = _DefaultMarker,
bind: Mapping[str, Any] | None = None,
check: bool = True,
**kwargs: Any,
Expand All @@ -349,7 +344,6 @@ def queryDataIds(
datasets=datasets,
collections=collections,
where=where,
components=components,
bind=bind,
check=check,
**kwargs,
Expand All @@ -363,7 +357,6 @@ def queryDimensionRecords(
datasets: Any = None,
collections: CollectionArgType | None = None,
where: str = "",
components: bool | _Marker = _DefaultMarker,
bind: Mapping[str, Any] | None = None,
check: bool = True,
**kwargs: Any,
Expand All @@ -375,7 +368,6 @@ def queryDimensionRecords(
datasets=datasets,
collections=collections,
where=where,
components=components,
bind=bind,
check=check,
**kwargs,
Expand Down
7 changes: 0 additions & 7 deletions python/lsst/daf/butler/cli/cmd/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
collection_type_option,
collections_argument,
collections_option,
components_option,
confirm_option,
dataset_type_option,
datasets_option,
Expand Down Expand Up @@ -440,15 +439,9 @@ def query_collections(*args: Any, **kwargs: Any) -> None:
"dataset types to return."
)
@verbose_option(help="Include dataset type name, dimensions, and storage class in output.")
@components_option()
@options_file_option()
def query_dataset_types(*args: Any, **kwargs: Any) -> None:
"""Get the dataset types in a repository."""
# Drop the components option.
components = kwargs.pop("components")
if components is not None:
comp_opt_str = "" if components else "no-"
click.echo(f"WARNING: --{comp_opt_str}components option is deprecated and will be removed after v27.")
table = script.queryDatasetTypes(*args, **kwargs)
if table:
table.pprint_all()
Expand Down
13 changes: 0 additions & 13 deletions python/lsst/daf/butler/cli/opt/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
"CollectionTypeCallback",
"collection_type_option",
"collections_option",
"components_option",
"config_option",
"config_file_option",
"confirm_option",
Expand Down Expand Up @@ -110,18 +109,6 @@ def makeCollectionTypes(
)


components_option = MWOptionDecorator(
"--components/--no-components",
default=None,
help=unwrap(
"""For --components, apply all expression patterns to
component dataset type names as well. For --no-components,
never apply patterns to components. Only --no-components
is now supported. Option will be removed after v27."""
),
)


def _config_split(*args: Any) -> dict[str | None, str]:
# Config values might include commas so disable comma-splitting.
result = split_kv(*args, multiple=False)
Expand Down
22 changes: 2 additions & 20 deletions python/lsst/daf/butler/registry/_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -1015,7 +1015,6 @@ def queryDatasetTypes(
self,
expression: Any = ...,
*,
components: bool = False,
missing: list[str] | None = None,
) -> Iterable[DatasetType]:
"""Iterate over the dataset types whose names match an expression.
Expand All @@ -1028,9 +1027,6 @@ def queryDatasetTypes(
``...`` can be used to return all dataset types, and is the
default. See :ref:`daf_butler_dataset_type_expressions` for more
information.
components : `bool`, optional
Must be `False`. Provided only for backwards compatibility. After
v27 this argument will be removed entirely.
missing : `list` of `str`, optional
String dataset type names that were explicitly given (i.e. not
regular expression patterns) but not found will be appended to this
Expand Down Expand Up @@ -1114,7 +1110,6 @@ def queryDatasets(
dataId: DataId | None = None,
where: str = "",
findFirst: bool = False,
components: bool = False,
bind: Mapping[str, Any] | None = None,
check: bool = True,
**kwargs: Any,
Expand Down Expand Up @@ -1159,9 +1154,6 @@ def queryDatasets(
(according to the order of ``collections`` passed in). If `True`,
``collections`` must not contain regular expressions and may not
be ``...``.
components : `bool`, optional
Must be `False`. Provided only for backwards compatibility. After
v27 this argument will be removed entirely.
bind : `~collections.abc.Mapping`, optional
Mapping containing literal values that should be injected into the
``where`` expression, keyed by the identifiers they replace.
Expand Down Expand Up @@ -1227,7 +1219,6 @@ def queryDataIds(
datasets: Any = None,
collections: CollectionArgType | None = None,
where: str = "",
components: bool = False,
bind: Mapping[str, Any] | None = None,
check: bool = True,
**kwargs: Any,
Expand All @@ -1250,10 +1241,8 @@ def queryDataIds(
``exposure``, ``detector``, and ``physical_filter`` values to only
those for which at least one "raw" dataset exists in
``collections``. Allowed types include `DatasetType`, `str`,
and iterables thereof. Regular expression objects (i.e.
`re.Pattern`) are deprecated and will be removed after the v26
release. See :ref:`daf_butler_dataset_type_expressions` for more
information.
and iterables thereof. See
:ref:`daf_butler_dataset_type_expressions` for more information.
collections : collection expression, optional
An expression that identifies the collections to search for
datasets, such as a `str` (for full matches or partial matches
Expand All @@ -1269,9 +1258,6 @@ def queryDataIds(
any column of a dimension table or (as a shortcut for the primary
key column of a dimension table) dimension name. See
:ref:`daf_butler_dimension_expressions` for more information.
components : `bool`, optional
Must be `False`. Provided only for backwards compatibility. After
v27 this argument will be removed entirely.
bind : `~collections.abc.Mapping`, optional
Mapping containing literal values that should be injected into the
``where`` expression, keyed by the identifiers they replace.
Expand Down Expand Up @@ -1329,7 +1315,6 @@ def queryDimensionRecords(
datasets: Any = None,
collections: CollectionArgType | None = None,
where: str = "",
components: bool = False,
bind: Mapping[str, Any] | None = None,
check: bool = True,
**kwargs: Any,
Expand Down Expand Up @@ -1361,9 +1346,6 @@ def queryDimensionRecords(
A string expression similar to a SQL WHERE clause. See
`queryDataIds` and :ref:`daf_butler_dimension_expressions` for more
information.
components : `bool`, optional
Must be `False`. Provided only for backwards compatibility. After
v27 this argument will be removed entirely.
bind : `~collections.abc.Mapping`, optional
Mapping containing literal values that should be injected into the
``where`` expression, keyed by the identifiers they replace.
Expand Down
4 changes: 0 additions & 4 deletions python/lsst/daf/butler/registry/queries/_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,6 @@ def findDatasets(
collections: Any,
*,
findFirst: bool = True,
components: bool = False,
) -> ParentDatasetQueryResults:
"""Find datasets using the data IDs identified by this query.
Expand All @@ -341,9 +340,6 @@ def findDatasets(
dataset type appears (according to the order of ``collections``
passed in). If `True`, ``collections`` must not contain regular
expressions and may not be ``...``.
components : `bool`, optional
Must be `False`. Provided only for backwards compatibility. After
v27 this argument will be removed entirely.
Returns
-------
Expand Down
Loading

0 comments on commit 8e57637

Please sign in to comment.