Skip to content

Commit

Permalink
Merge pull request #908 from lsst/tickets/DM-41750
Browse files Browse the repository at this point in the history
DM-41750: Fix some problems with global state in tests
  • Loading branch information
timj committed Nov 22, 2023
2 parents 8ca025c + 869a33c commit 593d116
Show file tree
Hide file tree
Showing 15 changed files with 132 additions and 81 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.10", "3.11"]
python-version: ["3.11", "3.12"]

steps:
- uses: actions/checkout@v3
Expand Down Expand Up @@ -110,7 +110,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
python-version: "3.11"

- name: Install dependencies
run: |
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "lsst-daf-butler"
requires-python = ">=3.10.0"
requires-python = ">=3.11.0"
description = "An abstraction layer for reading and writing astronomical data to datastores."
license = {text = "BSD 3-Clause License"}
readme = "README.md"
Expand All @@ -16,7 +16,7 @@ classifiers = [
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.11",
"Topic :: Scientific/Engineering :: Astronomy",
]
Expand Down
33 changes: 29 additions & 4 deletions python/lsst/daf/butler/_storage_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -724,11 +724,11 @@ def addFromConfig(self, config: StorageClassConfig | Config | str) -> None:
# components or parents before their classes are defined
# we have a helper function that we can call recursively
# to extract definitions from the configuration.
def processStorageClass(name: str, sconfig: StorageClassConfig, msg: str = "") -> None:
def processStorageClass(name: str, _sconfig: StorageClassConfig, msg: str = "") -> None:
# Maybe we've already processed this through recursion
if name not in sconfig:
if name not in _sconfig:
return
info = sconfig.pop(name)
info = _sconfig.pop(name)

# Always create the storage class so we can ensure that
# we are not trying to overwrite with a different definition
Expand Down Expand Up @@ -757,9 +757,31 @@ def processStorageClass(name: str, sconfig: StorageClassConfig, msg: str = "") -
baseClass = None
if "inheritsFrom" in info:
baseName = info["inheritsFrom"]
if baseName not in self:

# The inheritsFrom feature requires that the storage class
# being inherited from is itself a subclass of StorageClass
# that was created with makeNewStorageClass. If it was made
# and registered with a simple StorageClass constructor it
# cannot be used here and we try to recreate it.
if baseName in self:
baseClass = type(self.getStorageClass(baseName))
if baseClass is StorageClass:
log.warning(
"Storage class %s is requested to inherit from %s but that storage class "
"has not been defined to be a subclass of StorageClass and so can not "
"be used. Attempting to recreate parent class from current configuration.",
name,
baseName,
)
processStorageClass(baseName, sconfig, msg)
else:
processStorageClass(baseName, sconfig, msg)
baseClass = type(self.getStorageClass(baseName))
if baseClass is StorageClass:
raise TypeError(
f"Configuration for storage class {name} requests to inherit from "
f" storage class {baseName} but that class is not defined correctly."
)

newStorageClassType = self.makeNewStorageClass(name, baseClass, **storageClassKwargs)
newStorageClass = newStorageClassType()
Expand Down Expand Up @@ -942,6 +964,9 @@ def registerStorageClass(self, storageClass: StorageClass, msg: str | None = Non
f"New definition for StorageClass {storageClass.name} ({storageClass!r}) "
f"differs from current definition ({existing!r}){errmsg}"
)
if type(existing) is StorageClass and type(storageClass) is not StorageClass:
# Replace generic with specialist subclass equivalent.
self._storageClasses[storageClass.name] = storageClass
else:
self._storageClasses[storageClass.name] = storageClass

Expand Down
2 changes: 1 addition & 1 deletion python/lsst/daf/butler/cli/cliLog.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ class PrecisionLogFormatter(logging.Formatter):

def formatTime(self, record: logging.LogRecord, datefmt: str | None = None) -> str:
"""Format the time as an aware datetime."""
ct: datetime.datetime = self.converter(record.created, tz=datetime.timezone.utc) # type: ignore
ct: datetime.datetime = self.converter(record.created, tz=datetime.UTC) # type: ignore
if self.use_local:
ct = ct.astimezone()
if datefmt:
Expand Down
6 changes: 3 additions & 3 deletions python/lsst/daf/butler/datastore/cache_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ def from_file(cls, file: ResourcePath, root: ResourcePath) -> CacheEntry:
size=stat.st_size,
ref=id_,
component=component,
ctime=datetime.datetime.utcfromtimestamp(stat.st_ctime),
ctime=datetime.datetime.fromtimestamp(stat.st_ctime, datetime.UTC),
)


Expand Down Expand Up @@ -239,7 +239,7 @@ def items(self) -> ItemsView[str, CacheEntry]:
name="marker",
size=0,
ref=uuid.UUID("{00000000-0000-0000-0000-000000000000}"),
ctime=datetime.datetime.utcfromtimestamp(0),
ctime=datetime.datetime.fromtimestamp(0, datetime.UTC),
)

def pop(self, key: str, default: CacheEntry | None = __marker) -> CacheEntry | None:
Expand Down Expand Up @@ -967,7 +967,7 @@ def _expire_cache(self) -> None:
return

if self._expiration_mode == "age":
now = datetime.datetime.utcnow()
now = datetime.datetime.now(datetime.UTC)
for key in self._sort_cache():
delta = now - self._cache_entries[key].ctime
if delta.seconds > self._expiration_threshold:
Expand Down
2 changes: 1 addition & 1 deletion python/lsst/daf/butler/datastore/file_templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ def validateTemplates(
log.critical("Template failure with key '%s': %s", matchKey, e)

if logFailures and unmatchedKeys:
log.warning("Unchecked keys: %s", ", ".join([str(k) for k in unmatchedKeys]))
log.warning("Unchecked keys: '%s'", ", ".join([str(k) for k in unmatchedKeys]))

if failed:
if len(failed) == 1:
Expand Down
2 changes: 1 addition & 1 deletion python/lsst/daf/butler/datastores/chainedDatastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -461,7 +461,7 @@ def put_new(self, inMemoryDataset: Any, ref: DatasetRef) -> Mapping[str, Dataset
npermanent = 0
nephemeral = 0
stored_refs: dict[str, DatasetRef] = {}
for datastore, constraints in zip(self.datastores, self.datastoreConstraints):
for datastore, constraints in zip(self.datastores, self.datastoreConstraints, strict=True):
if (
constraints is not None and not constraints.isAcceptable(ref)
) or not datastore.constraints.isAcceptable(ref):
Expand Down
7 changes: 6 additions & 1 deletion python/lsst/daf/butler/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,11 @@ def MDCRemove(cls, key: str) -> None:
"""
cls._MDC.pop(key, None)

@classmethod
def clear_mdc(cls) -> None:
"""Clear all MDC entries."""
cls._MDC.clear()

@classmethod
@contextmanager
def set_mdc(cls, mdc: dict[str, str]) -> Generator[None, None, None]:
Expand Down Expand Up @@ -231,7 +236,7 @@ def from_record(cls, record: LogRecord) -> "ButlerLogRecord":
# Always use UTC because in distributed systems we can't be sure
# what timezone localtime is and it's easier to compare logs if
# every system is using the same time.
record_dict["asctime"] = datetime.datetime.fromtimestamp(record.created, tz=datetime.timezone.utc)
record_dict["asctime"] = datetime.datetime.fromtimestamp(record.created, tz=datetime.UTC)

# Sometimes exception information is included so must be
# extracted.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@

__all__ = ("ByDimensionsDatasetRecordStorage",)

import datetime
from collections.abc import Callable, Iterable, Iterator, Sequence, Set
from datetime import datetime
from typing import TYPE_CHECKING

import astropy.time
Expand Down Expand Up @@ -586,13 +586,13 @@ def insert(
# Current timestamp, type depends on schema version. Use microsecond
# precision for astropy time to keep things consistent with
# TIMESTAMP(6) SQL type.
timestamp: datetime | astropy.time.Time
timestamp: datetime.datetime | astropy.time.Time
if self._use_astropy:
# Astropy `now()` precision should be the same as `utcnow()` which
# Astropy `now()` precision should be the same as `now()` which
# should mean microsecond.
timestamp = astropy.time.Time.now()
else:
timestamp = datetime.utcnow()
timestamp = datetime.datetime.now(datetime.UTC)

# Iterate over data IDs, transforming a possibly-single-pass iterable
# into a list.
Expand Down Expand Up @@ -647,11 +647,11 @@ def import_(

# Current timestamp, type depends on schema version.
if self._use_astropy:
# Astropy `now()` precision should be the same as `utcnow()` which
# Astropy `now()` precision should be the same as `now()` which
# should mean microsecond.
timestamp = sqlalchemy.sql.literal(astropy.time.Time.now(), type_=ddl.AstropyTimeNsecTai)
else:
timestamp = sqlalchemy.sql.literal(datetime.utcnow())
timestamp = sqlalchemy.sql.literal(datetime.datetime.now(datetime.UTC))

# Iterate over data IDs, transforming a possibly-single-pass iterable
# into a list.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@
(?P<value>
(?P<number>-?(\d+(\.\d*)|(\.\d+))) # floating point number
|
(?P<iso>\d+-\d+-\d+([ T]\d+:\d+(:\d+([.]\d*)?)?)?) # iso(t)
(?P<iso>\d+-\d+-\d+([ T]\d+:\d+(:\d+([.]\d*)?)?)?) # iso(t) [no timezone]
|
(?P<fits>[+]\d+-\d+-\d+(T\d+:\d+:\d+([.]\d*)?)?) # fits
|
Expand Down Expand Up @@ -108,6 +108,11 @@ def _parseTimeString(time_str):
ValueError
Raised if input string has unexpected format
"""
# Check for time zone. Python datetime objects can be timezone-aware
# and if one has been stringified then there will be a +00:00 on the end.
# Special case UTC. Fail for other timezones.
time_str = time_str.replace("+00:00", "")

match = _re_time_str.match(time_str)
if not match:
raise ValueError(f'Time string "{time_str}" does not match known formats')
Expand Down
7 changes: 4 additions & 3 deletions python/lsst/daf/butler/registry/tests/_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@

__all__ = ["RegistryTests"]

import datetime
import itertools
import logging
import os
Expand All @@ -39,7 +40,7 @@
from abc import ABC, abstractmethod
from collections import defaultdict, namedtuple
from collections.abc import Iterator
from datetime import datetime, timedelta
from datetime import timedelta
from typing import TYPE_CHECKING

import astropy.time
Expand Down Expand Up @@ -2481,9 +2482,9 @@ def testSkipCalibs(self):
def testIngestTimeQuery(self):
registry = self.makeRegistry()
self.loadData(registry, "base.yaml")
dt0 = datetime.utcnow()
dt0 = datetime.datetime.now(datetime.UTC)
self.loadData(registry, "datasets.yaml")
dt1 = datetime.utcnow()
dt1 = datetime.datetime.now(datetime.UTC)

datasets = list(registry.queryDatasets(..., collections=...))
len0 = len(datasets)
Expand Down
15 changes: 10 additions & 5 deletions python/lsst/daf/butler/tests/_examplePythonTypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,9 @@ def registerMetricsExample(butler: Butler) -> None:
can be retrieved as dataset components.
``StructuredDataNoComponents``
A monolithic write of a `MetricsExample`.
These definitions must match the equivalent definitions in the test YAML
files.
"""
yamlDict = _addFullStorageClass(
butler,
Expand All @@ -101,6 +104,7 @@ def registerMetricsExample(butler: Butler) -> None:
pytype=MetricsExample,
parameters={"slice"},
delegate="lsst.daf.butler.tests.MetricsDelegate",
converters={"dict": "lsst.daf.butler.tests.MetricsExample.makeFromDict"},
)

_addFullStorageClass(
Expand All @@ -117,9 +121,7 @@ def registerMetricsExample(butler: Butler) -> None:
)


def _addFullStorageClass(
butler: Butler, name: str, formatter: str, *args: Any, **kwargs: Any
) -> StorageClass:
def _addFullStorageClass(butler: Butler, name: str, formatter: str, **kwargs: Any) -> StorageClass:
"""Create a storage class-formatter pair in a repository if it does not
already exist.
Expand All @@ -132,7 +134,6 @@ def _addFullStorageClass(
formatter : `str`
The formatter to use with the storage class. Ignored if ``butler``
does not use formatters.
*args
**kwargs
Arguments, other than ``name``, to the `~lsst.daf.butler.StorageClass`
constructor.
Expand All @@ -145,7 +146,11 @@ class : `lsst.daf.butler.StorageClass`
"""
storageRegistry = butler._datastore.storageClassFactory

storage = StorageClass(name, *args, **kwargs)
# Use the special constructor to allow a subclass of storage class
# to be created. This allows other test storage classes to inherit from
# this one.
storage_type = storageRegistry.makeNewStorageClass(name, None, **kwargs)
storage = storage_type()
try:
storageRegistry.registerStorageClass(storage)
except ValueError:
Expand Down
Loading

0 comments on commit 593d116

Please sign in to comment.