Skip to content

Commit

Permalink
Add missing documentation
Browse files Browse the repository at this point in the history
  • Loading branch information
dhirving committed Dec 13, 2023
1 parent 4e06df0 commit c54ad78
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 7 deletions.
9 changes: 9 additions & 0 deletions python/lsst/daf/butler/remote_butler/_remote_butler.py
Original file line number Diff line number Diff line change
Expand Up @@ -560,18 +560,24 @@ def _get_url(self, path: str, version: str = "v1") -> str:
return f"{version}/{path}"

def _post(self, path: str, model: _BaseModelCompat) -> httpx.Response:
"""Send a POST request to the Butler server."""
json = model.model_dump_json(exclude_unset=True).encode("utf-8")
url = self._get_url(path)
return self._client.post(url, content=json, headers={"content-type": "application/json"})

def _get(self, path: str) -> httpx.Response:
"""Send a GET request to the Butler server."""
url = self._get_url(path)
return self._client.get(url)

def _parse_model(self, response: httpx.Response, model: Type[_AnyPydanticModel]) -> _AnyPydanticModel:
"""Deserialize a Pydantic model from the body of an HTTP response."""
return model.model_validate_json(response.content)

def _normalize_collections(self, collections: str | Sequence[str] | None) -> CollectionList:
"""Convert the ``collections`` parameter in the format used by Butler
methods to a standardized format for the REST API.
"""
if collections is None:
if not self.collections:
raise NoDefaultCollectionError(
Expand All @@ -585,6 +591,9 @@ def _normalize_collections(self, collections: str | Sequence[str] | None) -> Col
return CollectionList(list(wildcards.strings))

def _normalize_dataset_type_name(self, datasetTypeOrName: DatasetType | str) -> DatasetTypeName:
"""Convert DatasetType parameters in the format used by Butler methods
to a standardized string name for the REST API.
"""
if isinstance(datasetTypeOrName, DatasetType):
return DatasetTypeName(datasetTypeOrName.name)
else:
Expand Down
13 changes: 11 additions & 2 deletions python/lsst/daf/butler/remote_butler/server/handlers/_external.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,11 @@ def find_dataset(
return ref.to_simple() if ref else None


@external_router.get("/v1/get_file/{dataset_id}")
@external_router.get(
"/v1/get_file/{dataset_id}",
summary="Lookup via DatasetId (UUID) the information needed to download and use the files associated"
" with a dataset.",
)
def get_file(
dataset_id: uuid.UUID,
factory: Factory = Depends(factory_dependency),
Expand All @@ -162,7 +166,11 @@ def get_file(
return _get_file_by_ref(butler, ref)


@external_router.post("/v1/get_file_by_data_id")
@external_router.post(
"/v1/get_file_by_data_id",
summary="Lookup via DataId (metadata key/value pairs) the information needed to download"
" and use the files associated with a dataset.",
)
def get_file_by_data_id(
request: GetFileByDataIdRequestModel,
factory: Factory = Depends(factory_dependency),
Expand All @@ -181,5 +189,6 @@ def get_file_by_data_id(


def _get_file_by_ref(butler: Butler, ref: DatasetRef) -> GetFileResponseModel:
"""Return file information associated with ``ref``."""
payload = butler._datastore.prepare_get_for_external_client(ref)
return GetFileResponseModel.model_validate(payload)
2 changes: 2 additions & 0 deletions python/lsst/daf/butler/remote_butler/server_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@ class FindDatasetModel(_BaseModelCompat):


class GetFileByDataIdRequestModel(_BaseModelCompat):
"""Request model for ``get_file_by_data_id``."""

dataset_type_name: DatasetTypeName
data_id: SerializedDataId
collections: CollectionList
Expand Down
10 changes: 5 additions & 5 deletions tests/test_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,26 +298,26 @@ def check_sc_override(converted):

check_sc_override(self.butler.get(ref, storageClass=new_sc))

# Test storage class override via DatasetRef
# Test storage class override via DatasetRef.
check_sc_override(self.butler.get(ref.overrideStorageClass("MetricsConversion")))
# Test storage class override via DatasetType
# Test storage class override via DatasetType.
check_sc_override(
self.butler.get(
ref.datasetType.overrideStorageClass(new_sc), dataId=data_id, collections=collections
)
)

# Test component override via DatasetRef
# Test component override via DatasetRef.
component_ref = ref.makeComponentRef("summary")
component_data = self.butler.get(component_ref)
self.assertEqual(component_data, MetricTestRepo.METRICS_EXAMPLE_SUMMARY)

# Test overriding both storage class and component via DatasetRef
# Test overriding both storage class and component via DatasetRef.
converted_component_data = self.butler.get(component_ref, storageClass="DictConvertibleModel")
self.assertIsInstance(converted_component_data, DictConvertibleModel)
self.assertEqual(converted_component_data.content, MetricTestRepo.METRICS_EXAMPLE_SUMMARY)

# Test component override via DatasetType
# Test component override via DatasetType.
dataset_type_component_data = self.butler.get(
component_ref.datasetType, component_ref.dataId, collections=collections
)
Expand Down

0 comments on commit c54ad78

Please sign in to comment.