Skip to content

Commit

Permalink
Merge pull request #794 from lsst/tickets/DM-38065-v24
Browse files Browse the repository at this point in the history
DM-38065: V24.1 release notes
  • Loading branch information
timj authored Feb 27, 2023
2 parents 01021c5 + 6b6d35d commit 703018b
Show file tree
Hide file tree
Showing 46 changed files with 37 additions and 89 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v3
with:
python-version: 3.8
python-version: "3.10"
cache: "pip"
cache-dependency-path: "setup.cfg"

Expand Down
10 changes: 7 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.3.0
rev: v4.4.0
hooks:
- id: check-yaml
args:
- "--unsafe"
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/psf/black
rev: 22.3.0
rev: 23.1.0
hooks:
- id: black
# It is recommended to specify the latest version of Python
Expand All @@ -17,7 +17,11 @@ repos:
# https://pre-commit.com/#top_level-default_language_version
language_version: python3.8
- repo: https://github.com/pycqa/isort
rev: 5.10.1
rev: 5.12.0
hooks:
- id: isort
name: isort (python)
- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
hooks:
- id: flake8
2 changes: 0 additions & 2 deletions doc/changes/DM-35082.api.rst

This file was deleted.

3 changes: 0 additions & 3 deletions doc/changes/DM-37249.misc.md

This file was deleted.

18 changes: 18 additions & 0 deletions doc/lsst.daf.butler/CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,3 +1,21 @@
Butler v24.1.0 2023-01-13
=========================

API Changes
-----------

- ``DimensionUniverse`` now has a ``isCompatibleWith()`` method to check if two universes are compatible with each other.
The initial test is very basic but can be improved later. (`DM-35082 <https://jira.lsstcorp.org/browse/DM-35082>`_)


Other Changes and Additions
---------------------------

- Rework transaction and connection management for compatibility with transaction-level connection pooling on the server.

Butler clients still hold long-lived connections, via delegation to SQLAlchemy's connection pooling, which can handle disconnections transparently most of the time. But we now wrap all temporary table usage and cursor iteration in transactions. (`DM-37249 <https://jira.lsstcorp.org/browse/DM-37249>`_)


Butler v24.0.0 2022-08-26
=========================

Expand Down
1 change: 0 additions & 1 deletion python/lsst/daf/butler/_butler.py
Original file line number Diff line number Diff line change
Expand Up @@ -1906,7 +1906,6 @@ def ingest(
groupedData[ref.datasetType][ref.dataId] = (dataset, resolvedRefs)

if existingRefs:

if len(dataset.refs) != len(existingRefs):
# Keeping track of partially pre-existing datasets is hard
# and should generally never happen. For now don't allow
Expand Down
1 change: 0 additions & 1 deletion python/lsst/daf/butler/_butlerConfig.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@ def __init__(
other: Optional[Union[ResourcePathExpression, Config]] = None,
searchPaths: Sequence[ResourcePathExpression] = None,
):

self.configDir: Optional[ResourcePath] = None

# If this is already a ButlerConfig we assume that defaults
Expand Down
1 change: 0 additions & 1 deletion python/lsst/daf/butler/cli/butler.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,6 @@ def _raiseIfDuplicateCommands(commands):


class ButlerCLI(LoaderCLI):

localCmdPkg = "lsst.daf.butler.cli.cmd"

pluginEnvVar = "DAF_BUTLER_PLUGINS"
Expand Down
1 change: 0 additions & 1 deletion python/lsst/daf/butler/cli/cliLog.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,6 @@ def initLog(
if not log_tty:
logging.basicConfig(force=True, handlers=[logging.NullHandler()])
elif longlog:

# Want to create our own Formatter so that we can get high
# precision timestamps. This requires we attach our own
# default stream handler.
Expand Down
1 change: 0 additions & 1 deletion python/lsst/daf/butler/cli/opt/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@


class CollectionTypeCallback:

collectionTypes = tuple(collectionType.name for collectionType in CollectionType.all())

@staticmethod
Expand Down
1 change: 0 additions & 1 deletion python/lsst/daf/butler/cli/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -881,7 +881,6 @@ class MWCtxObj:
"""

def __init__(self):

self.args = None

@staticmethod
Expand Down
3 changes: 0 additions & 3 deletions python/lsst/daf/butler/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,6 @@ def _processExplicitIncludes(self):
names = self.nameTuples()
for path in names:
if path[-1] == self.includeKey:

log.debug("Processing file include directive at %s", self._D + self._D.join(path))
basePath = path[:-1]

Expand Down Expand Up @@ -1120,7 +1119,6 @@ class ConfigSubset(Config):
"""

def __init__(self, other=None, validate=True, mergeDefaults=True, searchPaths=None):

# Create a blank object to receive the defaults
# Once we have the defaults we then update with the external values
super().__init__()
Expand Down Expand Up @@ -1148,7 +1146,6 @@ def __init__(self, other=None, validate=True, mergeDefaults=True, searchPaths=No

# Sometimes we do not want to merge with defaults.
if mergeDefaults:

# Supplied search paths have highest priority
fullSearchPath = []
if searchPaths:
Expand Down
1 change: 0 additions & 1 deletion python/lsst/daf/butler/core/configSupport.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,6 @@ def __init__(
self._name = None

if name is not None:

if not isinstance(name, str):
raise ValueError(f"Supplied name must be str not: '{name}'")

Expand Down
1 change: 0 additions & 1 deletion python/lsst/daf/butler/core/datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,6 @@ def __init__(
primaryURI: Optional[ResourcePath] = None,
componentURIs: Optional[Dict[str, ResourcePath]] = None,
):

self.primaryURI = primaryURI
"""The URI to the primary artifact associated with this dataset. If the
dataset was disassembled within the datastore this may be `None`.
Expand Down
1 change: 0 additions & 1 deletion python/lsst/daf/butler/core/datastoreRecordData.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
from .storedFileInfo import StoredDatastoreItemInfo

if TYPE_CHECKING:

from ..registry import Registry

_Record = Dict[str, Any]
Expand Down
1 change: 0 additions & 1 deletion python/lsst/daf/butler/core/fileTemplates.py
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,6 @@ def format(self, ref: DatasetRef) -> str:
output = ""

for literal, field_name, format_spec, conversion in parts:

if field_name == "component":
usedComponent = True

Expand Down
1 change: 0 additions & 1 deletion python/lsst/daf/butler/core/storedFileInfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,6 @@ def __init__(
file_size: int,
dataset_id: DatasetId,
):

# Use these shenanigans to allow us to use a frozen dataclass
object.__setattr__(self, "path", path)
object.__setattr__(self, "storageClass", storageClass)
Expand Down
8 changes: 0 additions & 8 deletions python/lsst/daf/butler/datastores/fileDatastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -627,7 +627,6 @@ def _prepare_for_get(

fileGetInfo = []
for location, storedFileInfo in fileLocations:

# The storage class used to write the file
writeStorageClass = storedFileInfo.storageClass

Expand Down Expand Up @@ -1294,7 +1293,6 @@ def _read_artifact_into_memory(
location_updated = True

with uri.as_local() as local_uri:

can_be_cached = False
if uri != local_uri:
# URI was remote and file was downloaded
Expand Down Expand Up @@ -1733,7 +1731,6 @@ def _predict_URIs(
uris = DatasetRefURIs()

if self.composites.shouldBeDisassembled(ref):

for component, _ in ref.datasetType.storageClass.components.items():
comp_ref = ref.makeComponentRef(component)
comp_location, _ = self._determine_put_formatter_location(comp_ref)
Expand All @@ -1743,7 +1740,6 @@ def _predict_URIs(
uris.componentURIs[component] = ResourcePath(comp_location.uri.geturl() + "#predicted")

else:

location, _ = self._determine_put_formatter_location(ref)

# Add the "#predicted" URI fragment to indicate this is a guess
Expand All @@ -1768,7 +1764,6 @@ def getManyURIs(
missing_refs = (ref for ref in refs if ref.id not in records_keys)

for ref in missing_refs:

# if this has never been written then we have to guess
if not predict:
if not allow_missing:
Expand Down Expand Up @@ -2025,7 +2020,6 @@ def get(self, ref: DatasetRef, parameters: Optional[Mapping[str, Any]] = None) -
)

elif isDisassembledReadOnlyComponent:

compositeStorageClass = ref.datasetType.parentStorageClass
if compositeStorageClass is None:
raise RuntimeError(
Expand Down Expand Up @@ -2334,7 +2328,6 @@ def emptyTrash(self, ignore_errors: bool = True) -> None:
)

for ref, info in trashed:

# Mypy needs to know this is not the base class
assert isinstance(info, StoredFileInfo), f"Unexpectedly got info of class {type(info)}"

Expand All @@ -2348,7 +2341,6 @@ def emptyTrash(self, ignore_errors: bool = True) -> None:
artifacts_to_keep = set(path_map)

for ref, info in trashed:

# Should not happen for this implementation but need
# to keep mypy happy.
assert info is not None, f"Internal logic error in emptyTrash with ref {ref}."
Expand Down
1 change: 0 additions & 1 deletion python/lsst/daf/butler/datastores/inMemoryDatastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,6 @@ def get(self, ref: DatasetRef, parameters: Optional[Mapping[str, Any]] = None) -

# Different storage classes implies a component request
if readStorageClass != writeStorageClass:

if component is None:
raise ValueError(
"Storage class inconsistency ({} vs {}) but no"
Expand Down
1 change: 0 additions & 1 deletion python/lsst/daf/butler/registry/_dbAuth.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,6 @@ def getAuth(
raise DbAuthError("Missing database parameter")

for authDict in self.authList:

# Check for mandatory entries
if "url" not in authDict:
raise DbAuthError("Missing URL in DbAuth configuration")
Expand Down
1 change: 0 additions & 1 deletion python/lsst/daf/butler/registry/queries/_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -682,7 +682,6 @@ def _order_by_columns(self) -> Iterable[OrderByColumn]:
return order_by_columns

for order_by_column in self.summary.order_by.order_by_columns:

column: sqlalchemy.sql.ColumnElement
if order_by_column.column is None:
# dimension name, it has to be in SELECT list already, only
Expand Down
2 changes: 0 additions & 2 deletions python/lsst/daf/butler/registry/queries/_structs.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,6 @@ class OrderByClause:
"""

def __init__(self, order_by: Iterable[str], graph: DimensionGraph):

self.order_by_columns = []
for name in order_by:
if not name or name == "-":
Expand Down Expand Up @@ -309,7 +308,6 @@ class ElementOrderByClause:
"""

def __init__(self, order_by: Iterable[str], element: DimensionElement):

self.order_by_columns = []
for name in order_by:
if not name or name == "-":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,6 @@ class ParserYacc:
"""

def __init__(self, idMap=None, **kwargs):

kw = dict(write_tables=0, debug=False)
kw.update(kwargs)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
# This regular expression is used to match valid token names
_is_identifier = re.compile(r"^[a-zA-Z0-9_]+$")


# Exception thrown when invalid token encountered and no default error
# handler is defined.
class LexError(Exception):
Expand Down Expand Up @@ -449,6 +450,7 @@ def next(self):
# and build a Lexer object from it.
# -----------------------------------------------------------------------------


# -----------------------------------------------------------------------------
# _get_regex(func)
#
Expand Down Expand Up @@ -912,7 +914,6 @@ def lex(
debuglog=None,
errorlog=None,
):

if lextab is None:
lextab = "lextab"

Expand Down
Loading

0 comments on commit 703018b

Please sign in to comment.