diff --git a/python/lsst/daf/butler/remote_butler/_remote_butler.py b/python/lsst/daf/butler/remote_butler/_remote_butler.py index df1120cf12..fbb269ea9d 100644 --- a/python/lsst/daf/butler/remote_butler/_remote_butler.py +++ b/python/lsst/daf/butler/remote_butler/_remote_butler.py @@ -560,18 +560,24 @@ def _get_url(self, path: str, version: str = "v1") -> str: return f"{version}/{path}" def _post(self, path: str, model: _BaseModelCompat) -> httpx.Response: + """Send a POST request to the Butler server.""" json = model.model_dump_json(exclude_unset=True).encode("utf-8") url = self._get_url(path) return self._client.post(url, content=json, headers={"content-type": "application/json"}) def _get(self, path: str) -> httpx.Response: + """Send a GET request to the Butler server.""" url = self._get_url(path) return self._client.get(url) def _parse_model(self, response: httpx.Response, model: Type[_AnyPydanticModel]) -> _AnyPydanticModel: + """Deserialize a Pydantic model from the body of an HTTP response.""" return model.model_validate_json(response.content) def _normalize_collections(self, collections: str | Sequence[str] | None) -> CollectionList: + """Convert the ``collections`` parameter in the format used by Butler + methods to a standardized format for the REST API. + """ if collections is None: if not self.collections: raise NoDefaultCollectionError( @@ -585,6 +591,9 @@ def _normalize_collections(self, collections: str | Sequence[str] | None) -> Col return CollectionList(list(wildcards.strings)) def _normalize_dataset_type_name(self, datasetTypeOrName: DatasetType | str) -> DatasetTypeName: + """Convert DatasetType parameters in the format used by Butler methods + to a standardized string name for the REST API. + """ if isinstance(datasetTypeOrName, DatasetType): return DatasetTypeName(datasetTypeOrName.name) else: diff --git a/python/lsst/daf/butler/remote_butler/server/handlers/_external.py b/python/lsst/daf/butler/remote_butler/server/handlers/_external.py index 9e2604d128..f84d842b44 100644 --- a/python/lsst/daf/butler/remote_butler/server/handlers/_external.py +++ b/python/lsst/daf/butler/remote_butler/server/handlers/_external.py @@ -150,7 +150,11 @@ def find_dataset( return ref.to_simple() if ref else None -@external_router.get("/v1/get_file/{dataset_id}") +@external_router.get( + "/v1/get_file/{dataset_id}", + summary="Lookup via DatasetId (UUID) the information needed to download and use the files associated" + " with a dataset.", +) def get_file( dataset_id: uuid.UUID, factory: Factory = Depends(factory_dependency), @@ -162,7 +166,11 @@ def get_file( return _get_file_by_ref(butler, ref) -@external_router.post("/v1/get_file_by_data_id") +@external_router.post( + "/v1/get_file_by_data_id", + summary="Lookup via DataId (metadata key/value pairs) the information needed to download" + " and use the files associated with a dataset.", +) def get_file_by_data_id( request: GetFileByDataIdRequestModel, factory: Factory = Depends(factory_dependency), @@ -181,5 +189,6 @@ def get_file_by_data_id( def _get_file_by_ref(butler: Butler, ref: DatasetRef) -> GetFileResponseModel: + """Return file information associated with ``ref``.""" payload = butler._datastore.prepare_get_for_external_client(ref) return GetFileResponseModel.model_validate(payload) diff --git a/python/lsst/daf/butler/remote_butler/server_models.py b/python/lsst/daf/butler/remote_butler/server_models.py index c6f03555cb..19ffdebcd2 100644 --- a/python/lsst/daf/butler/remote_butler/server_models.py +++ b/python/lsst/daf/butler/remote_butler/server_models.py @@ -50,6 +50,8 @@ class FindDatasetModel(_BaseModelCompat): class GetFileByDataIdRequestModel(_BaseModelCompat): + """Request model for ``get_file_by_data_id``.""" + dataset_type_name: DatasetTypeName data_id: SerializedDataId collections: CollectionList diff --git a/tests/test_server.py b/tests/test_server.py index 290eca2656..966c4f78e9 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -298,26 +298,26 @@ def check_sc_override(converted): check_sc_override(self.butler.get(ref, storageClass=new_sc)) - # Test storage class override via DatasetRef + # Test storage class override via DatasetRef. check_sc_override(self.butler.get(ref.overrideStorageClass("MetricsConversion"))) - # Test storage class override via DatasetType + # Test storage class override via DatasetType. check_sc_override( self.butler.get( ref.datasetType.overrideStorageClass(new_sc), dataId=data_id, collections=collections ) ) - # Test component override via DatasetRef + # Test component override via DatasetRef. component_ref = ref.makeComponentRef("summary") component_data = self.butler.get(component_ref) self.assertEqual(component_data, MetricTestRepo.METRICS_EXAMPLE_SUMMARY) - # Test overriding both storage class and component via DatasetRef + # Test overriding both storage class and component via DatasetRef. converted_component_data = self.butler.get(component_ref, storageClass="DictConvertibleModel") self.assertIsInstance(converted_component_data, DictConvertibleModel) self.assertEqual(converted_component_data.content, MetricTestRepo.METRICS_EXAMPLE_SUMMARY) - # Test component override via DatasetType + # Test component override via DatasetType. dataset_type_component_data = self.butler.get( component_ref.datasetType, component_ref.dataId, collections=collections )