From 5fed161669843304525b6b2b14264a4018a8a3c1 Mon Sep 17 00:00:00 2001 From: Tim Jenness Date: Mon, 27 Feb 2023 15:06:33 -0700 Subject: [PATCH 1/8] v24.1 release notes --- doc/changes/DM-35082.api.rst | 2 -- doc/changes/DM-37249.misc.md | 3 --- doc/lsst.daf.butler/CHANGES.rst | 18 ++++++++++++++++++ 3 files changed, 18 insertions(+), 5 deletions(-) delete mode 100644 doc/changes/DM-35082.api.rst delete mode 100644 doc/changes/DM-37249.misc.md diff --git a/doc/changes/DM-35082.api.rst b/doc/changes/DM-35082.api.rst deleted file mode 100644 index dc5561b9ce..0000000000 --- a/doc/changes/DM-35082.api.rst +++ /dev/null @@ -1,2 +0,0 @@ -``DimensionUniverse`` now has a ``isCompatibleWith()`` method to check if two universes are compatible with each other. -The initial test is very basic but can be improved later. diff --git a/doc/changes/DM-37249.misc.md b/doc/changes/DM-37249.misc.md deleted file mode 100644 index e19ece6704..0000000000 --- a/doc/changes/DM-37249.misc.md +++ /dev/null @@ -1,3 +0,0 @@ -Rework transaction and connection management for compatibility with transaction-level connection pooling on the server. - -Butler clients still hold long-lived connections, via delegation to SQLAlchemy's connection pooling, which can handle disconnections transparently most of the time. But we now wrap all temporary table usage and cursor iteration in transactions. diff --git a/doc/lsst.daf.butler/CHANGES.rst b/doc/lsst.daf.butler/CHANGES.rst index bdce5f83e8..1f80f2bb5a 100644 --- a/doc/lsst.daf.butler/CHANGES.rst +++ b/doc/lsst.daf.butler/CHANGES.rst @@ -1,3 +1,21 @@ +Butler v24.1.0 2023-01-13 +========================= + +API Changes +----------- + +- ``DimensionUniverse`` now has a ``isCompatibleWith()`` method to check if two universes are compatible with each other. + The initial test is very basic but can be improved later. (`DM-35082 `_) + + +Other Changes and Additions +--------------------------- + +- Rework transaction and connection management for compatibility with transaction-level connection pooling on the server. + + Butler clients still hold long-lived connections, via delegation to SQLAlchemy's connection pooling, which can handle disconnections transparently most of the time. But we now wrap all temporary table usage and cursor iteration in transactions. (`DM-37249 `_) + + Butler v24.0.0 2022-08-26 ========================= From 205aed218ee5b161a6e9f1e98868599bf336224b Mon Sep 17 00:00:00 2001 From: Tim Jenness Date: Wed, 1 Feb 2023 12:39:09 -0700 Subject: [PATCH 2/8] Update version in pre-commit --- .pre-commit-config.yaml | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 56ae570dc1..0f18e7d3a6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.3.0 + rev: v4.4.0 hooks: - id: check-yaml args: @@ -8,7 +8,7 @@ repos: - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.1.0 hooks: - id: black # It is recommended to specify the latest version of Python @@ -17,7 +17,11 @@ repos: # https://pre-commit.com/#top_level-default_language_version language_version: python3.8 - repo: https://github.com/pycqa/isort - rev: 5.10.1 + rev: 5.12.0 hooks: - id: isort name: isort (python) + - repo: https://github.com/PyCQA/flake8 + rev: 6.0.0 + hooks: + - id: flake8 From 1a8f64c9f567e09c09b548766aae51b298e9f243 Mon Sep 17 00:00:00 2001 From: Tim Jenness Date: Mon, 27 Feb 2023 15:08:20 -0700 Subject: [PATCH 3/8] Updates for new black --- python/lsst/daf/butler/_butler.py | 1 - python/lsst/daf/butler/_butlerConfig.py | 1 - python/lsst/daf/butler/cli/butler.py | 1 - python/lsst/daf/butler/cli/cliLog.py | 1 - python/lsst/daf/butler/cli/opt/options.py | 1 - python/lsst/daf/butler/cli/utils.py | 1 - python/lsst/daf/butler/core/config.py | 3 --- python/lsst/daf/butler/core/configSupport.py | 1 - python/lsst/daf/butler/core/datastore.py | 1 - python/lsst/daf/butler/core/datastoreRecordData.py | 1 - python/lsst/daf/butler/core/fileTemplates.py | 1 - python/lsst/daf/butler/core/storedFileInfo.py | 1 - python/lsst/daf/butler/datastores/fileDatastore.py | 8 -------- .../daf/butler/datastores/inMemoryDatastore.py | 1 - python/lsst/daf/butler/registry/_dbAuth.py | 1 - .../lsst/daf/butler/registry/queries/_builder.py | 1 - .../lsst/daf/butler/registry/queries/_structs.py | 2 -- .../queries/expressions/parser/parserYacc.py | 1 - .../registry/queries/expressions/parser/ply/lex.py | 3 ++- .../queries/expressions/parser/ply/yacc.py | 14 ++------------ python/lsst/daf/butler/registry/tests/_registry.py | 3 --- python/lsst/daf/butler/registry/versions.py | 1 - python/lsst/daf/butler/script/ingest_files.py | 1 - python/lsst/daf/butler/tests/cliLogTestBase.py | 1 - tests/test_butler.py | 2 -- tests/test_cliCmdConfigDump.py | 1 - tests/test_cliCmdConfigValidate.py | 1 - tests/test_cliCmdCreate.py | 1 - tests/test_cliCmdImport.py | 2 -- tests/test_cliCmdIngestFiles.py | 2 -- tests/test_cliCmdQueryCollections.py | 2 -- tests/test_cliCmdQueryDataIds.py | 1 - tests/test_cliCmdQueryDatasetTypes.py | 1 - tests/test_cliCmdQueryDatasets.py | 1 - tests/test_cliCmdQueryDimensionRecords.py | 2 -- tests/test_cliCmdRetrieveArtifacts.py | 2 -- tests/test_datastore.py | 3 --- tests/test_dimensions.py | 1 - tests/test_logging.py | 2 -- 39 files changed, 4 insertions(+), 71 deletions(-) diff --git a/python/lsst/daf/butler/_butler.py b/python/lsst/daf/butler/_butler.py index 5420909186..452c2f0905 100644 --- a/python/lsst/daf/butler/_butler.py +++ b/python/lsst/daf/butler/_butler.py @@ -1906,7 +1906,6 @@ def ingest( groupedData[ref.datasetType][ref.dataId] = (dataset, resolvedRefs) if existingRefs: - if len(dataset.refs) != len(existingRefs): # Keeping track of partially pre-existing datasets is hard # and should generally never happen. For now don't allow diff --git a/python/lsst/daf/butler/_butlerConfig.py b/python/lsst/daf/butler/_butlerConfig.py index e46768d5e8..fb031ca4b5 100644 --- a/python/lsst/daf/butler/_butlerConfig.py +++ b/python/lsst/daf/butler/_butlerConfig.py @@ -68,7 +68,6 @@ def __init__( other: Optional[Union[ResourcePathExpression, Config]] = None, searchPaths: Sequence[ResourcePathExpression] = None, ): - self.configDir: Optional[ResourcePath] = None # If this is already a ButlerConfig we assume that defaults diff --git a/python/lsst/daf/butler/cli/butler.py b/python/lsst/daf/butler/cli/butler.py index cf10085b02..c8d69f4197 100755 --- a/python/lsst/daf/butler/cli/butler.py +++ b/python/lsst/daf/butler/cli/butler.py @@ -302,7 +302,6 @@ def _raiseIfDuplicateCommands(commands): class ButlerCLI(LoaderCLI): - localCmdPkg = "lsst.daf.butler.cli.cmd" pluginEnvVar = "DAF_BUTLER_PLUGINS" diff --git a/python/lsst/daf/butler/cli/cliLog.py b/python/lsst/daf/butler/cli/cliLog.py index 3bf0e3ef89..9302c6f47d 100644 --- a/python/lsst/daf/butler/cli/cliLog.py +++ b/python/lsst/daf/butler/cli/cliLog.py @@ -172,7 +172,6 @@ def initLog( if not log_tty: logging.basicConfig(force=True, handlers=[logging.NullHandler()]) elif longlog: - # Want to create our own Formatter so that we can get high # precision timestamps. This requires we attach our own # default stream handler. diff --git a/python/lsst/daf/butler/cli/opt/options.py b/python/lsst/daf/butler/cli/opt/options.py index 6f84b8fd1c..4f70580c66 100644 --- a/python/lsst/daf/butler/cli/opt/options.py +++ b/python/lsst/daf/butler/cli/opt/options.py @@ -59,7 +59,6 @@ class CollectionTypeCallback: - collectionTypes = tuple(collectionType.name for collectionType in CollectionType.all()) @staticmethod diff --git a/python/lsst/daf/butler/cli/utils.py b/python/lsst/daf/butler/cli/utils.py index deed42a0d0..73bc77ae6e 100644 --- a/python/lsst/daf/butler/cli/utils.py +++ b/python/lsst/daf/butler/cli/utils.py @@ -881,7 +881,6 @@ class MWCtxObj: """ def __init__(self): - self.args = None @staticmethod diff --git a/python/lsst/daf/butler/core/config.py b/python/lsst/daf/butler/core/config.py index 942569ec47..edf38695b5 100644 --- a/python/lsst/daf/butler/core/config.py +++ b/python/lsst/daf/butler/core/config.py @@ -426,7 +426,6 @@ def _processExplicitIncludes(self): names = self.nameTuples() for path in names: if path[-1] == self.includeKey: - log.debug("Processing file include directive at %s", self._D + self._D.join(path)) basePath = path[:-1] @@ -1120,7 +1119,6 @@ class ConfigSubset(Config): """ def __init__(self, other=None, validate=True, mergeDefaults=True, searchPaths=None): - # Create a blank object to receive the defaults # Once we have the defaults we then update with the external values super().__init__() @@ -1148,7 +1146,6 @@ def __init__(self, other=None, validate=True, mergeDefaults=True, searchPaths=No # Sometimes we do not want to merge with defaults. if mergeDefaults: - # Supplied search paths have highest priority fullSearchPath = [] if searchPaths: diff --git a/python/lsst/daf/butler/core/configSupport.py b/python/lsst/daf/butler/core/configSupport.py index e8d72761e7..b7a63a73f4 100644 --- a/python/lsst/daf/butler/core/configSupport.py +++ b/python/lsst/daf/butler/core/configSupport.py @@ -83,7 +83,6 @@ def __init__( self._name = None if name is not None: - if not isinstance(name, str): raise ValueError(f"Supplied name must be str not: '{name}'") diff --git a/python/lsst/daf/butler/core/datastore.py b/python/lsst/daf/butler/core/datastore.py index f7193ad3dc..0561fed574 100644 --- a/python/lsst/daf/butler/core/datastore.py +++ b/python/lsst/daf/butler/core/datastore.py @@ -210,7 +210,6 @@ def __init__( primaryURI: Optional[ResourcePath] = None, componentURIs: Optional[Dict[str, ResourcePath]] = None, ): - self.primaryURI = primaryURI """The URI to the primary artifact associated with this dataset. If the dataset was disassembled within the datastore this may be `None`. diff --git a/python/lsst/daf/butler/core/datastoreRecordData.py b/python/lsst/daf/butler/core/datastoreRecordData.py index 609a4d5f54..a1c3278646 100644 --- a/python/lsst/daf/butler/core/datastoreRecordData.py +++ b/python/lsst/daf/butler/core/datastoreRecordData.py @@ -39,7 +39,6 @@ from .storedFileInfo import StoredDatastoreItemInfo if TYPE_CHECKING: - from ..registry import Registry _Record = Dict[str, Any] diff --git a/python/lsst/daf/butler/core/fileTemplates.py b/python/lsst/daf/butler/core/fileTemplates.py index 83e927507c..959e8147ee 100644 --- a/python/lsst/daf/butler/core/fileTemplates.py +++ b/python/lsst/daf/butler/core/fileTemplates.py @@ -470,7 +470,6 @@ def format(self, ref: DatasetRef) -> str: output = "" for literal, field_name, format_spec, conversion in parts: - if field_name == "component": usedComponent = True diff --git a/python/lsst/daf/butler/core/storedFileInfo.py b/python/lsst/daf/butler/core/storedFileInfo.py index 166bb6e326..2dce0c1445 100644 --- a/python/lsst/daf/butler/core/storedFileInfo.py +++ b/python/lsst/daf/butler/core/storedFileInfo.py @@ -108,7 +108,6 @@ def __init__( file_size: int, dataset_id: DatasetId, ): - # Use these shenanigans to allow us to use a frozen dataclass object.__setattr__(self, "path", path) object.__setattr__(self, "storageClass", storageClass) diff --git a/python/lsst/daf/butler/datastores/fileDatastore.py b/python/lsst/daf/butler/datastores/fileDatastore.py index ee4eb34a88..b83bcbf19a 100644 --- a/python/lsst/daf/butler/datastores/fileDatastore.py +++ b/python/lsst/daf/butler/datastores/fileDatastore.py @@ -627,7 +627,6 @@ def _prepare_for_get( fileGetInfo = [] for location, storedFileInfo in fileLocations: - # The storage class used to write the file writeStorageClass = storedFileInfo.storageClass @@ -1294,7 +1293,6 @@ def _read_artifact_into_memory( location_updated = True with uri.as_local() as local_uri: - can_be_cached = False if uri != local_uri: # URI was remote and file was downloaded @@ -1733,7 +1731,6 @@ def _predict_URIs( uris = DatasetRefURIs() if self.composites.shouldBeDisassembled(ref): - for component, _ in ref.datasetType.storageClass.components.items(): comp_ref = ref.makeComponentRef(component) comp_location, _ = self._determine_put_formatter_location(comp_ref) @@ -1743,7 +1740,6 @@ def _predict_URIs( uris.componentURIs[component] = ResourcePath(comp_location.uri.geturl() + "#predicted") else: - location, _ = self._determine_put_formatter_location(ref) # Add the "#predicted" URI fragment to indicate this is a guess @@ -1768,7 +1764,6 @@ def getManyURIs( missing_refs = (ref for ref in refs if ref.id not in records_keys) for ref in missing_refs: - # if this has never been written then we have to guess if not predict: if not allow_missing: @@ -2025,7 +2020,6 @@ def get(self, ref: DatasetRef, parameters: Optional[Mapping[str, Any]] = None) - ) elif isDisassembledReadOnlyComponent: - compositeStorageClass = ref.datasetType.parentStorageClass if compositeStorageClass is None: raise RuntimeError( @@ -2334,7 +2328,6 @@ def emptyTrash(self, ignore_errors: bool = True) -> None: ) for ref, info in trashed: - # Mypy needs to know this is not the base class assert isinstance(info, StoredFileInfo), f"Unexpectedly got info of class {type(info)}" @@ -2348,7 +2341,6 @@ def emptyTrash(self, ignore_errors: bool = True) -> None: artifacts_to_keep = set(path_map) for ref, info in trashed: - # Should not happen for this implementation but need # to keep mypy happy. assert info is not None, f"Internal logic error in emptyTrash with ref {ref}." diff --git a/python/lsst/daf/butler/datastores/inMemoryDatastore.py b/python/lsst/daf/butler/datastores/inMemoryDatastore.py index 3b247f856b..e91ac75e26 100644 --- a/python/lsst/daf/butler/datastores/inMemoryDatastore.py +++ b/python/lsst/daf/butler/datastores/inMemoryDatastore.py @@ -338,7 +338,6 @@ def get(self, ref: DatasetRef, parameters: Optional[Mapping[str, Any]] = None) - # Different storage classes implies a component request if readStorageClass != writeStorageClass: - if component is None: raise ValueError( "Storage class inconsistency ({} vs {}) but no" diff --git a/python/lsst/daf/butler/registry/_dbAuth.py b/python/lsst/daf/butler/registry/_dbAuth.py index 9bc664e522..5086e2eecd 100644 --- a/python/lsst/daf/butler/registry/_dbAuth.py +++ b/python/lsst/daf/butler/registry/_dbAuth.py @@ -191,7 +191,6 @@ def getAuth( raise DbAuthError("Missing database parameter") for authDict in self.authList: - # Check for mandatory entries if "url" not in authDict: raise DbAuthError("Missing URL in DbAuth configuration") diff --git a/python/lsst/daf/butler/registry/queries/_builder.py b/python/lsst/daf/butler/registry/queries/_builder.py index f88f9a8f69..c980eb9485 100644 --- a/python/lsst/daf/butler/registry/queries/_builder.py +++ b/python/lsst/daf/butler/registry/queries/_builder.py @@ -682,7 +682,6 @@ def _order_by_columns(self) -> Iterable[OrderByColumn]: return order_by_columns for order_by_column in self.summary.order_by.order_by_columns: - column: sqlalchemy.sql.ColumnElement if order_by_column.column is None: # dimension name, it has to be in SELECT list already, only diff --git a/python/lsst/daf/butler/registry/queries/_structs.py b/python/lsst/daf/butler/registry/queries/_structs.py index 02535cd70d..19a6ddd0cb 100644 --- a/python/lsst/daf/butler/registry/queries/_structs.py +++ b/python/lsst/daf/butler/registry/queries/_structs.py @@ -267,7 +267,6 @@ class OrderByClause: """ def __init__(self, order_by: Iterable[str], graph: DimensionGraph): - self.order_by_columns = [] for name in order_by: if not name or name == "-": @@ -309,7 +308,6 @@ class ElementOrderByClause: """ def __init__(self, order_by: Iterable[str], element: DimensionElement): - self.order_by_columns = [] for name in order_by: if not name or name == "-": diff --git a/python/lsst/daf/butler/registry/queries/expressions/parser/parserYacc.py b/python/lsst/daf/butler/registry/queries/expressions/parser/parserYacc.py index ffa875eca7..43c7f361c4 100644 --- a/python/lsst/daf/butler/registry/queries/expressions/parser/parserYacc.py +++ b/python/lsst/daf/butler/registry/queries/expressions/parser/parserYacc.py @@ -234,7 +234,6 @@ class ParserYacc: """ def __init__(self, idMap=None, **kwargs): - kw = dict(write_tables=0, debug=False) kw.update(kwargs) diff --git a/python/lsst/daf/butler/registry/queries/expressions/parser/ply/lex.py b/python/lsst/daf/butler/registry/queries/expressions/parser/ply/lex.py index 9a89570836..7a3c24ef05 100644 --- a/python/lsst/daf/butler/registry/queries/expressions/parser/ply/lex.py +++ b/python/lsst/daf/butler/registry/queries/expressions/parser/ply/lex.py @@ -52,6 +52,7 @@ # This regular expression is used to match valid token names _is_identifier = re.compile(r"^[a-zA-Z0-9_]+$") + # Exception thrown when invalid token encountered and no default error # handler is defined. class LexError(Exception): @@ -449,6 +450,7 @@ def next(self): # and build a Lexer object from it. # ----------------------------------------------------------------------------- + # ----------------------------------------------------------------------------- # _get_regex(func) # @@ -912,7 +914,6 @@ def lex( debuglog=None, errorlog=None, ): - if lextab is None: lextab = "lextab" diff --git a/python/lsst/daf/butler/registry/queries/expressions/parser/ply/yacc.py b/python/lsst/daf/butler/registry/queries/expressions/parser/ply/yacc.py index a9a98de385..5014030b8d 100644 --- a/python/lsst/daf/butler/registry/queries/expressions/parser/ply/yacc.py +++ b/python/lsst/daf/butler/registry/queries/expressions/parser/ply/yacc.py @@ -549,7 +549,6 @@ def parsedebug(self, input=None, lexer=None, debug=False, tracking=False, tokenf # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! else: - # --! TRACKING if tracking: sym.lineno = lexer.lineno @@ -599,7 +598,6 @@ def parsedebug(self, input=None, lexer=None, debug=False, tracking=False, tokenf return result if t is None: - # --! DEBUG debug.error( "Error : %s", @@ -866,7 +864,6 @@ def parseopt(self, input=None, lexer=None, debug=False, tracking=False, tokenfun # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! else: - # --! TRACKING if tracking: sym.lineno = lexer.lineno @@ -909,7 +906,6 @@ def parseopt(self, input=None, lexer=None, debug=False, tracking=False, tokenfun return result if t is None: - # We have some kind of parsing error here. To handle # this, we are going to push the current token onto # the tokenstack and replace it with an 'error' token. @@ -1159,7 +1155,6 @@ def parseopt_notrack(self, input=None, lexer=None, debug=False, tracking=False, # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! else: - targ = [sym] # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! @@ -1196,7 +1191,6 @@ def parseopt_notrack(self, input=None, lexer=None, debug=False, tracking=False, return result if t is None: - # We have some kind of parsing error here. To handle # this, we are going to push the current token onto # the tokenstack and replace it with an 'error' token. @@ -1578,7 +1572,6 @@ def set_precedence(self, term, assoc, level): # ----------------------------------------------------------------------------- def add_production(self, prodname, syms, func=None, file="", line=0): - if prodname in self.Terminals: raise GrammarError( "%s:%d: Illegal rule name %r. Already defined as a token" % (file, line, prodname) @@ -1687,7 +1680,6 @@ def set_start(self, start=None): # ----------------------------------------------------------------------------- def find_unreachable(self): - # Mark all symbols that are reachable from a symbol s def mark_reachable_from(s): if s in reachable: @@ -1727,7 +1719,7 @@ def infinite_cycles(self): # Then propagate termination until no change: while True: some_change = False - for (n, pl) in self.Prodnames.items(): + for n, pl in self.Prodnames.items(): # Nonterminal n terminates iff any of its productions terminates. for p in pl: # Production p terminates iff all of its rhs symbols terminate. @@ -1755,7 +1747,7 @@ def infinite_cycles(self): break infinite = [] - for (s, term) in terminates.items(): + for s, term in terminates.items(): if not term: if s not in self.Prodnames and s not in self.Terminals and s != "error": # s is used-but-not-defined, and we've already warned of that, @@ -1839,7 +1831,6 @@ def unused_precedence(self): # Afterward (e.g., when called from compute_follow()), it will be complete. # ------------------------------------------------------------------------- def _first(self, beta): - # We are computing First(x1,x2,x3,...,xn) result = [] for x in beta: @@ -3303,7 +3294,6 @@ def yacc( errorlog=None, picklefile=None, ): - if tabmodule is None: tabmodule = tab_module diff --git a/python/lsst/daf/butler/registry/tests/_registry.py b/python/lsst/daf/butler/registry/tests/_registry.py index ec7835ff3b..6097b7c93d 100644 --- a/python/lsst/daf/butler/registry/tests/_registry.py +++ b/python/lsst/daf/butler/registry/tests/_registry.py @@ -485,7 +485,6 @@ def testImportDatasetsUUID(self): # Test for non-unique IDs, they can be re-imported multiple times. for run, idGenMode in ((2, DatasetIdGenEnum.DATAID_TYPE), (4, DatasetIdGenEnum.DATAID_TYPE_RUN)): with self.subTest(idGenMode=idGenMode): - # Use integer dataset ID to force UUID calculation in _import ref = DatasetRef(datasetTypeBias, dataIdBias1, id=0, run=f"run{run}") (ref1,) = registry._importDatasets([ref], idGenerationMode=idGenMode) @@ -2232,7 +2231,6 @@ def testSkipCalibs(self): self.assertGreater(len(datasets), 0) def testIngestTimeQuery(self): - registry = self.makeRegistry() self.loadData(registry, "base.yaml") dt0 = datetime.utcnow() @@ -2515,7 +2513,6 @@ def testQueryResultSummaries(self): ["nonexistent"], ), ]: - self.assertFalse(query.any(execute=False, exact=False)) self.assertFalse(query.any(execute=True, exact=False)) self.assertFalse(query.any(execute=True, exact=True)) diff --git a/python/lsst/daf/butler/registry/versions.py b/python/lsst/daf/butler/registry/versions.py index 1fb81e4065..b4a548e2c6 100644 --- a/python/lsst/daf/butler/registry/versions.py +++ b/python/lsst/daf/butler/registry/versions.py @@ -225,7 +225,6 @@ def storeManagersVersions(self) -> None: schema digest as a value. """ for extension in self._managers.values(): - version = extension.currentVersion() if version: key = self._managerVersionKey(extension) diff --git a/python/lsst/daf/butler/script/ingest_files.py b/python/lsst/daf/butler/script/ingest_files.py index 80a49ccbd5..c5535d7139 100644 --- a/python/lsst/daf/butler/script/ingest_files.py +++ b/python/lsst/daf/butler/script/ingest_files.py @@ -159,7 +159,6 @@ def extract_datasets_from_table( refs_by_file = defaultdict(list) n_dataset_refs = 0 for row in table: - # Convert the row to a dataId, remembering to extract the # path column. dataId = dict(row) diff --git a/python/lsst/daf/butler/tests/cliLogTestBase.py b/python/lsst/daf/butler/tests/cliLogTestBase.py index 0f2b6645fd..42bf533491 100644 --- a/python/lsst/daf/butler/tests/cliLogTestBase.py +++ b/python/lsst/daf/butler/tests/cliLogTestBase.py @@ -70,7 +70,6 @@ def command_log_settings_test( expected_lsstbutler_level, expected_lsstx_level, ): - LogLevel = namedtuple("LogLevel", ("expected", "actual", "name")) logLevels = [ diff --git a/tests/test_butler.py b/tests/test_butler.py index bac8985d3f..3a15414667 100644 --- a/tests/test_butler.py +++ b/tests/test_butler.py @@ -593,7 +593,6 @@ def testBasicPutGet(self): self.runPutGetTest(storageClass, "test_metric") def testCompositePutGetConcrete(self): - storageClass = self.storageClassFactory.getStorageClass("StructuredCompositeReadCompNoDisassembly") butler = self.runPutGetTest(storageClass, "test_metric") @@ -2142,7 +2141,6 @@ def assertButlerTransfers(self, id_gen_map=None, purge=False, storageClassName=" if purge: # Remove records for a fraction. if index == 1: - # For one of these delete the file as well. # This allows the "missing" code to filter the # file out. diff --git a/tests/test_cliCmdConfigDump.py b/tests/test_cliCmdConfigDump.py index 841b4472b9..9578cd27fd 100644 --- a/tests/test_cliCmdConfigDump.py +++ b/tests/test_cliCmdConfigDump.py @@ -38,7 +38,6 @@ class ConfigDumpTest(CliCmdTestBase, unittest.TestCase): - mockFuncName = "lsst.daf.butler.cli.cmd.commands.script.configDump" @staticmethod diff --git a/tests/test_cliCmdConfigValidate.py b/tests/test_cliCmdConfigValidate.py index a3e80d1101..66e6eba0a2 100644 --- a/tests/test_cliCmdConfigValidate.py +++ b/tests/test_cliCmdConfigValidate.py @@ -31,7 +31,6 @@ class ValidateTest(CliCmdTestBase, unittest.TestCase): - mockFuncName = "lsst.daf.butler.cli.cmd.commands.script.configValidate" @staticmethod diff --git a/tests/test_cliCmdCreate.py b/tests/test_cliCmdCreate.py index 34fa938dbf..8cb1158188 100644 --- a/tests/test_cliCmdCreate.py +++ b/tests/test_cliCmdCreate.py @@ -26,7 +26,6 @@ class CreateTest(CliCmdTestBase, unittest.TestCase): - mockFuncName = "lsst.daf.butler.cli.cmd.commands.script.createRepo" @staticmethod diff --git a/tests/test_cliCmdImport.py b/tests/test_cliCmdImport.py index 92e5347b67..d3adcba80b 100644 --- a/tests/test_cliCmdImport.py +++ b/tests/test_cliCmdImport.py @@ -31,7 +31,6 @@ class ImportTestCase(CliCmdTestBase, unittest.TestCase): - mockFuncName = "lsst.daf.butler.cli.cmd.commands.script.butlerImport" @staticmethod @@ -64,7 +63,6 @@ def test_missingArgument(self): class ExportFileCase(CliCmdTestBase, unittest.TestCase): - mockFuncName = "lsst.daf.butler.cli.cmd.commands.script.butlerImport" @property diff --git a/tests/test_cliCmdIngestFiles.py b/tests/test_cliCmdIngestFiles.py index 4dbc6c566e..bd0bed20a9 100644 --- a/tests/test_cliCmdIngestFiles.py +++ b/tests/test_cliCmdIngestFiles.py @@ -37,7 +37,6 @@ class CliIngestFilesTest(unittest.TestCase, ButlerTestHelper): - configFile = os.path.join(TESTDIR, "config/basic/butler.yaml") def setUp(self): @@ -88,7 +87,6 @@ def testIngestRelativeWithDataId(self): def assertIngest(self, table, options): runner = LogCliRunner() with runner.isolated_filesystem(): - table_file = os.path.join(self.root2, f"table_{self.id()}.csv") table.write(table_file) diff --git a/tests/test_cliCmdQueryCollections.py b/tests/test_cliCmdQueryCollections.py index aa389f63e9..7bb7cedbf6 100644 --- a/tests/test_cliCmdQueryCollections.py +++ b/tests/test_cliCmdQueryCollections.py @@ -40,7 +40,6 @@ class QueryCollectionsCmdTest(CliCmdTestBase, unittest.TestCase): - mockFuncName = "lsst.daf.butler.cli.cmd.commands.script.queryCollections" @staticmethod @@ -127,7 +126,6 @@ def assertChain(self, args: List[str], expected: str): def testChained(self): with self.runner.isolated_filesystem(): - # Create a butler and add some chained collections: butlerCfg = Butler.makeRepo("here") diff --git a/tests/test_cliCmdQueryDataIds.py b/tests/test_cliCmdQueryDataIds.py index 6d2ac18f76..644a1b37e7 100644 --- a/tests/test_cliCmdQueryDataIds.py +++ b/tests/test_cliCmdQueryDataIds.py @@ -34,7 +34,6 @@ class QueryDataIdsTest(unittest.TestCase, ButlerTestHelper): - mockFuncName = "lsst.daf.butler.cli.cmd.commands.script.queryDataIds" @staticmethod diff --git a/tests/test_cliCmdQueryDatasetTypes.py b/tests/test_cliCmdQueryDatasetTypes.py index 8641548c12..0ec20523b8 100644 --- a/tests/test_cliCmdQueryDatasetTypes.py +++ b/tests/test_cliCmdQueryDatasetTypes.py @@ -34,7 +34,6 @@ class QueryDatasetTypesCmdTest(CliCmdTestBase, unittest.TestCase): - mockFuncName = "lsst.daf.butler.cli.cmd.commands.script.queryDatasetTypes" @staticmethod diff --git a/tests/test_cliCmdQueryDatasets.py b/tests/test_cliCmdQueryDatasets.py index 0cf8d133db..2d2024d27f 100644 --- a/tests/test_cliCmdQueryDatasets.py +++ b/tests/test_cliCmdQueryDatasets.py @@ -134,7 +134,6 @@ def expectedFilesystemDatastoreTables(root: ResourcePath): class QueryDatasetsTest(unittest.TestCase, ButlerTestHelper): - configFile = os.path.join(TESTDIR, "config/basic/butler.yaml") storageClassFactory = StorageClassFactory() diff --git a/tests/test_cliCmdQueryDimensionRecords.py b/tests/test_cliCmdQueryDimensionRecords.py index 88345eedcd..e5c43485c3 100644 --- a/tests/test_cliCmdQueryDimensionRecords.py +++ b/tests/test_cliCmdQueryDimensionRecords.py @@ -48,7 +48,6 @@ class QueryDimensionRecordsTest(unittest.TestCase, ButlerTestHelper): - mockFuncName = "lsst.daf.butler.cli.cmd.commands.script.queryDimensionRecords" configFile = os.path.join(TESTDIR, "config/basic/butler.yaml") @@ -151,7 +150,6 @@ def testWhere(self): self.assertAstropyTablesEqual(readTable(result.output), expected) def testCollection(self): - butler = Butler(self.root, run="foo") # try replacing the testRepo's butler with the one with the "foo" run. diff --git a/tests/test_cliCmdRetrieveArtifacts.py b/tests/test_cliCmdRetrieveArtifacts.py index 4d5e8b9668..dc62d47ef4 100644 --- a/tests/test_cliCmdRetrieveArtifacts.py +++ b/tests/test_cliCmdRetrieveArtifacts.py @@ -36,7 +36,6 @@ class CliRetrieveArtifactsTest(unittest.TestCase, ButlerTestHelper): - configFile = os.path.join(TESTDIR, "config/basic/butler.yaml") storageClassFactory = StorageClassFactory() @@ -54,7 +53,6 @@ def find_files(root: str) -> List[ResourcePath]: def testRetrieveAll(self): runner = LogCliRunner() with runner.isolated_filesystem(): - # When preserving the path the run will be in the directory along # with a . in the component name. When not preserving paths the # filename will have an underscore rather than dot. diff --git a/tests/test_datastore.py b/tests/test_datastore.py index 4de90a24ac..0f1ab0996a 100644 --- a/tests/test_datastore.py +++ b/tests/test_datastore.py @@ -736,7 +736,6 @@ def runIngestTest(self, func, expectOutput=True): def testIngestNoTransfer(self): """Test ingesting existing files with no transfer.""" for mode in (None, "auto"): - # Some datastores have auto but can't do in place transfer if mode == "auto" and "auto" in self.ingestTransferModes and not self.canIngestNoTransferAuto: continue @@ -1080,7 +1079,6 @@ def testCleanup(self): # a file behind for formatter in (BadWriteFormatter, BadNoWriteFormatter): with self.subTest(formatter=formatter): - # Monkey patch the formatter datastore.formatterFactory.registerFormatter(ref.datasetType, formatter, overwrite=True) @@ -1256,7 +1254,6 @@ def testConstraints(self): ("metric33", dataId2, sc2, (True, True, False), True), ("metric5", dataId1, sc2, (False, True, False), True), ): - # Choose different temp file depending on StorageClass testfile = testfile_j if sc.name.endswith("Json") else testfile_y diff --git a/tests/test_dimensions.py b/tests/test_dimensions.py index 5960399f70..a21c0f7420 100644 --- a/tests/test_dimensions.py +++ b/tests/test_dimensions.py @@ -401,7 +401,6 @@ def chain(self, n: Optional[int] = None) -> Iterator: class DataCoordinateTestCase(unittest.TestCase): - RANDOM_SEED = 10 @classmethod diff --git a/tests/test_logging.py b/tests/test_logging.py index e50708acf3..1d06c2d70a 100644 --- a/tests/test_logging.py +++ b/tests/test_logging.py @@ -107,7 +107,6 @@ def testRecordCapture(self): self.assertIn("...", str(cm.exception)) def testRecordsFormatting(self): - self.log.setLevel(logging.DEBUG) self.log.debug("debug message") self.log.warning("warning message") @@ -185,7 +184,6 @@ def testButlerLogRecords(self): records.append({}) def testExceptionInfo(self): - self.log.setLevel(logging.DEBUG) try: raise RuntimeError("A problem has been encountered.") From bbc060188cc9b44b5b2beea6883a1e9885d1d6f2 Mon Sep 17 00:00:00 2001 From: Tim Jenness Date: Mon, 27 Feb 2023 15:16:30 -0700 Subject: [PATCH 4/8] Explicitly point to git release branches for utils and resources --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 9984f5d65f..1a71a84ddf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,8 +6,8 @@ pydantic httpx deprecated >=1.2 git+https://github.com/lsst/sphgeom@24.0.0#egg=lsst-sphgeom -lsst-utils >= 24.0.0,<25.0.0 -lsst-resources >= 24.0.0,<25.0.0 +git+https://github.com/lsst/utils@v24.0.x#egg=lsst-utils +git+https://github.com/lsst/resources@v24.0.x#egg=lsst-resources # optional backoff >= 1.10 boto3 >= 1.13 From bff8803bc70d4b0987121f020306818dd6e915eb Mon Sep 17 00:00:00 2001 From: Tim Jenness Date: Mon, 27 Feb 2023 15:19:00 -0700 Subject: [PATCH 5/8] Pin this release branch dependencies We want 24.0 to only install with other 24.x releases but not the weekly 24.2022.x releases. --- setup.cfg | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/setup.cfg b/setup.cfg index 879eb6e4f2..a921d53ed2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -33,9 +33,9 @@ install_requires = pyyaml >=5.1 sqlalchemy >= 1.4 click >= 7.0 - lsst-sphgeom - lsst-utils - lsst-resources + lsst-sphgeom >=24.0.0, <24.100.0 + lsst-utils >=24.0.0, <24.100.0 + lsst-resources >=24.0.0, <24.100.0 deprecated >= 1.2 pydantic tests_require = From 109bb88ab00d95b52e71d285f2fe928d07765f77 Mon Sep 17 00:00:00 2001 From: Tim Jenness Date: Mon, 27 Feb 2023 16:34:30 -0700 Subject: [PATCH 6/8] Pin sqlalchemy<2 --- requirements.txt | 2 +- setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 1a71a84ddf..ca119a85ad 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ pyyaml >= 5.1 astropy >= 4.0 click >7.0 -sqlalchemy >= 1.4 +sqlalchemy >= 1.4,<2.0 pydantic httpx deprecated >=1.2 diff --git a/setup.cfg b/setup.cfg index a921d53ed2..0934eac5e6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,7 +31,7 @@ setup_requires = install_requires = astropy >=4.0 pyyaml >=5.1 - sqlalchemy >= 1.4 + sqlalchemy >= 1.4,<2.0 click >= 7.0 lsst-sphgeom >=24.0.0, <24.100.0 lsst-utils >=24.0.0, <24.100.0 From 6be19acb2bc051718f9c5cad4263ffdfb2dff47f Mon Sep 17 00:00:00 2001 From: Tim Jenness Date: Mon, 27 Feb 2023 16:35:04 -0700 Subject: [PATCH 7/8] Use python 3.10 for pypi action --- .github/workflows/build.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 9b31cbca14..779b8b5178 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -109,7 +109,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v3 with: - python-version: 3.8 + python-version: "3.10" cache: "pip" cache-dependency-path: "setup.cfg" From 6b6d35d3900d96735fe2fad87c0cb4a14b76921d Mon Sep 17 00:00:00 2001 From: Tim Jenness Date: Mon, 27 Feb 2023 16:35:25 -0700 Subject: [PATCH 8/8] Remove statement that 3.8 and 3.9 are supported --- setup.cfg | 2 -- 1 file changed, 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index 0934eac5e6..a3eee9e276 100644 --- a/setup.cfg +++ b/setup.cfg @@ -10,8 +10,6 @@ classifiers = License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+) Operating System :: OS Independent Programming Language :: Python :: 3 - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Topic :: Scientific/Engineering :: Astronomy long_description = file: README.md