Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DM-39751: Clarify where a warning comes from and hide it #854

Merged
merged 2 commits into from
Jun 22, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion python/lsst/daf/butler/registries/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import sqlalchemy
from lsst.daf.relation import LeafRelation, Relation
from lsst.resources import ResourcePathExpression
from lsst.utils.introspection import find_outside_stacklevel
from lsst.utils.iteration import ensure_iterable

from ..core import (
Expand Down Expand Up @@ -415,7 +416,13 @@ def removeDatasetType(self, name: str | tuple[str, ...]) -> None:
# Docstring inherited from lsst.daf.butler.registry.Registry

for datasetTypeExpression in ensure_iterable(name):
datasetTypes = list(self.queryDatasetTypes(datasetTypeExpression))
# Catch any warnings from the caller specifying a component
# dataset type. This will result in an error later but the
# warning could be confusing when the caller is not querying
# anything.
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=FutureWarning)
datasetTypes = list(self.queryDatasetTypes(datasetTypeExpression))
if not datasetTypes:
_LOG.info("Dataset type %r not defined", datasetTypeExpression)
else:
Expand Down Expand Up @@ -1103,6 +1110,7 @@ def _standardize_query_dataset_args(
warnings.warn(
f"Dataset type(s) {missing} are not registered; this will be an error after v26.",
FutureWarning,
stacklevel=find_outside_stacklevel("lsst.daf.butler"),
)
doomed_by.extend(f"Dataset type {name} is not registered." for name in missing)
elif collections:
Expand Down
18 changes: 15 additions & 3 deletions python/lsst/daf/butler/registry/datasets/byDimensions/_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from typing import TYPE_CHECKING, Any

import sqlalchemy
from lsst.utils.introspection import find_outside_stacklevel

from ....core import DatasetId, DatasetIdGenEnum, DatasetRef, DatasetType, DimensionUniverse, ddl
from ..._collection_summary import CollectionSummary
Expand Down Expand Up @@ -371,7 +372,9 @@ def resolve_wildcard(
for name, dataset_type in wildcard.values.items():
parent_name, component_name = DatasetType.splitDatasetTypeName(name)
if component_name is not None and components_deprecated:
warnings.warn(deprecation_message, FutureWarning)
warnings.warn(
deprecation_message, FutureWarning, stacklevel=find_outside_stacklevel("lsst.daf.butler")
)
if (found_storage := self.find(parent_name)) is not None:
found_parent = found_storage.datasetType
if component_name is not None:
Expand Down Expand Up @@ -412,7 +415,11 @@ def resolve_wildcard(
and not already_warned
and components_deprecated
):
warnings.warn(deprecation_message, FutureWarning)
warnings.warn(
deprecation_message,
FutureWarning,
stacklevel=find_outside_stacklevel("lsst.daf.butler"),
)
already_warned = True
except KeyError as err:
_LOG.warning(
Expand All @@ -426,6 +433,7 @@ def resolve_wildcard(
warnings.warn(
"Passing wildcard patterns here is deprecated and will be prohibited after v26.",
FutureWarning,
stacklevel=find_outside_stacklevel("lsst.daf.butler"),
)
for storage in self._byName.values():
if any(p.fullmatch(storage.datasetType.name) for p in wildcard.patterns):
Expand All @@ -451,7 +459,11 @@ def resolve_wildcard(
):
result[storage.datasetType].add(component_name)
if not already_warned and components_deprecated:
warnings.warn(deprecation_message, FutureWarning)
warnings.warn(
deprecation_message,
FutureWarning,
stacklevel=find_outside_stacklevel("lsst.daf.butler"),
)
already_warned = True
return {k: list(v) for k, v in result.items()}

Expand Down
2 changes: 2 additions & 0 deletions python/lsst/daf/butler/registry/obscore/_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
from lsst.daf.butler import Config, DataCoordinate, DatasetRef, DimensionRecordColumnTag, DimensionUniverse
from lsst.daf.relation import Join
from lsst.sphgeom import Region
from lsst.utils.introspection import find_outside_stacklevel
from lsst.utils.iteration import chunk_iterable

from ..interfaces import ObsCoreTableManager, VersionTuple
Expand Down Expand Up @@ -313,6 +314,7 @@ def update_exposure_regions(self, instrument: str, region_data: Iterable[tuple[i
warnings.warn(
f"Failed to convert region for exposure={exposure} detector={detector}: {exc}",
category=RegionTypeWarning,
stacklevel=find_outside_stacklevel("lsst.daf.butler"),
)
continue

Expand Down
2 changes: 2 additions & 0 deletions python/lsst/daf/butler/registry/obscore/_records.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@

import astropy.time
from lsst.daf.butler import DataCoordinate, DatasetRef, Dimension, DimensionRecord, DimensionUniverse
from lsst.utils.introspection import find_outside_stacklevel

from ._config import ExtraColumnConfig, ExtraColumnType, ObsCoreConfig
from ._spatial import RegionTypeError, RegionTypeWarning
Expand Down Expand Up @@ -197,6 +198,7 @@ def __call__(self, ref: DatasetRef, context: SqlQueryContext) -> Record | None:
warnings.warn(
f"Failed to convert region for obscore dataset {ref.id}: {exc}",
category=RegionTypeWarning,
stacklevel=find_outside_stacklevel("lsst.daf.butler"),
)
else:
record.update(plugin_records)
Expand Down