diff --git a/src/dodal/plan_stubs/__init__.py b/src/dodal/plan_stubs/__init__.py index 6db5b3b480..5279aae100 100644 --- a/src/dodal/plan_stubs/__init__.py +++ b/src/dodal/plan_stubs/__init__.py @@ -1,3 +1,11 @@ +"""Plan stubs may be operations required to construct larger plans or contained +functionality that does not operate a complete Run and collect data but that may wish +to be run alone. +Functions that are the latter should be added to __export__ to allow them to be picked +up by blueapi. Functions in __export__ are checked to ensure that their arguments are +non-variadic (to enable well-typed forms to be generated). +""" + from .check_topup import check_topup_and_wait_if_necessary, wait_for_topup_complete from .data_session import ( DATA_SESSION, @@ -11,6 +19,7 @@ home_and_reset_wrapper, move_and_reset_wrapper, ) +from .wrapped import move, move_relative, set_absolute, set_relative, sleep, wait __all__ = [ "DATA_SESSION", @@ -23,4 +32,19 @@ "home_and_reset_decorator", "home_and_reset_wrapper", "move_and_reset_wrapper", + "wait", + "set_absolute", + "set_relative", + "move", + "move_relative", + "sleep", +] + +__export__ = [ # Stubs that should be exposed by blueapi + "wait", + "set_absolute", + "set_relative", + "move", + "move_relative", + "sleep", ] diff --git a/src/dodal/plan_stubs/data_session.py b/src/dodal/plan_stubs/data_session.py index bfdd91d5c2..bed1946ddf 100644 --- a/src/dodal/plan_stubs/data_session.py +++ b/src/dodal/plan_stubs/data_session.py @@ -2,7 +2,7 @@ from bluesky import preprocessors as bpp from bluesky.utils import MsgGenerator, make_decorator -from dodal.common.beamlines import beamline_utils +from dodal.common.beamlines.beamline_utils import get_path_provider from dodal.common.types import UpdatingPathProvider DATA_SESSION = "data_session" @@ -31,7 +31,7 @@ def attach_data_session_metadata_wrapper( Iterator[Msg]: Plan messages """ if provider is None: - provider = beamline_utils.get_path_provider() + provider = get_path_provider() yield from bps.wait_for([provider.update]) ress = yield from bps.wait_for([provider.data_session]) data_session = ress[0].result() diff --git a/src/dodal/plan_stubs/wrapped.py b/src/dodal/plan_stubs/wrapped.py new file mode 100644 index 0000000000..fb5b552072 --- /dev/null +++ b/src/dodal/plan_stubs/wrapped.py @@ -0,0 +1,142 @@ +import itertools +from collections.abc import Mapping +from typing import Annotated, Any, TypeVar + +import bluesky.plan_stubs as bps +from bluesky.protocols import Movable +from bluesky.utils import MsgGenerator + +""" +Wrappers for Bluesky built-in plan stubs with type hinting +""" + +Group = Annotated[str, "String identifier used by 'wait' or stubs that await"] + + +def set_absolute( + movable: Movable, value: Any, group: Group | None = None, wait: bool = False +) -> MsgGenerator: + """ + Set a device, wrapper for `bp.abs_set`. + + Args: + movable (Movable): The device to set + value (T): The new value + group (Group | None, optional): The message group to associate with the + setting, for sequencing. Defaults to None. + wait (bool, optional): The group should wait until all setting is complete + (e.g. a motor has finished moving). Defaults to False. + + Returns: + MsgGenerator: Plan + + Yields: + Iterator[MsgGenerator]: Bluesky messages + """ + + return (yield from bps.abs_set(movable, value, group=group, wait=wait)) + + +def set_relative( + movable: Movable, value: Any, group: Group | None = None, wait: bool = False +) -> MsgGenerator: + """ + Change a device, wrapper for `bp.rel_set`. + + Args: + movable (Movable): The device to set + value (T): The new value + group (Group | None, optional): The message group to associate with the + setting, for sequencing. Defaults to None. + wait (bool, optional): The group should wait until all setting is complete + (e.g. a motor has finished moving). Defaults to False. + + Returns: + MsgGenerator: Plan + + Yields: + Iterator[MsgGenerator]: Bluesky messages + """ + + return (yield from bps.rel_set(movable, value, group=group, wait=wait)) + + +def move(moves: Mapping[Movable, Any], group: Group | None = None) -> MsgGenerator: + """ + Move a device, wrapper for `bp.mv`. + + Args: + moves (Mapping[Movable, Any]): Mapping of Movables to target positions + group (Group | None, optional): The message group to associate with the + setting, for sequencing. Defaults to None. + + Returns: + MsgGenerator: Plan + + Yields: + Iterator[MsgGenerator]: Bluesky messages + """ + + return ( + # https://github.com/bluesky/bluesky/issues/1809 + yield from bps.mv(*itertools.chain.from_iterable(moves.items()), group=group) # type: ignore + ) + + +def move_relative( + moves: Mapping[Movable, Any], group: Group | None = None +) -> MsgGenerator: + """ + Move a device relative to its current position, wrapper for `bp.mvr`. + + Args: + moves (Mapping[Movable, Any]): Mapping of Movables to target deltas + group (Group | None, optional): The message group to associate with the + setting, for sequencing. Defaults to None. + + Returns: + MsgGenerator: Plan + + Yields: + Iterator[MsgGenerator]: Bluesky messages + """ + + return ( + # https://github.com/bluesky/bluesky/issues/1809 + yield from bps.mvr(*itertools.chain.from_iterable(moves.items()), group=group) # type: ignore + ) + + +def sleep(time: float) -> MsgGenerator: + """ + Suspend all action for a given time, wrapper for `bp.sleep` + + Args: + time (float): Time to wait in seconds + + Returns: + MsgGenerator: Plan + + Yields: + Iterator[MsgGenerator]: Bluesky messages + """ + + return (yield from bps.sleep(time)) + + +def wait(group: Group | None = None) -> MsgGenerator: + """ + Wait for a group status to complete, wrapper for `bp.wait` + + Args: + group (Group | None, optional): The name of the group to wait for, defaults + to None. + + Returns: + MsgGenerator: Plan + + Yields: + Iterator[MsgGenerator]: Bluesky messages + """ + + return (yield from bps.wait(group)) diff --git a/src/dodal/plans/__init__.py b/src/dodal/plans/__init__.py index fb40245969..93f515b408 100644 --- a/src/dodal/plans/__init__.py +++ b/src/dodal/plans/__init__.py @@ -1,3 +1,9 @@ +"""Plans should be full experimental proceedures that start and end runs and collect +data. To aid in making plans adaptable and to enable them to be exposed in blueapi, +functions in this module are checked to ensure that: metadata may be passed into them, +and that their arguments are non-variadic (to enable well-typed forms to be generated). +""" + from .scanspec import spec_scan from .wrapped import count diff --git a/src/dodal/plans/scanspec.py b/src/dodal/plans/scanspec.py index 0b9ed1603b..f74fb77d12 100644 --- a/src/dodal/plans/scanspec.py +++ b/src/dodal/plans/scanspec.py @@ -1,5 +1,4 @@ import operator -from collections.abc import Mapping from functools import reduce from typing import Annotated, Any @@ -27,7 +26,7 @@ def spec_scan( Spec[Movable], Field(description="ScanSpec modelling the path of the scan"), ], - metadata: Mapping[str, Any] | None = None, + metadata: dict[str, Any] | None = None, ) -> MsgGenerator: """Generic plan for reading `detectors` at every point of a ScanSpec `spec`.""" _md = { diff --git a/src/dodal/plans/wrapped.py b/src/dodal/plans/wrapped.py index d9c807b383..0ed779627d 100644 --- a/src/dodal/plans/wrapped.py +++ b/src/dodal/plans/wrapped.py @@ -33,6 +33,9 @@ def count( ], metadata: dict[str, Any] | None = None, ) -> MsgGenerator: + """Reads from a number of devices. + Wraps bluesky.plans.count(det, num, delay, md=metadata) exposing only serializable + parameters and metadata to be consumed by JSONForms or similar.""" if isinstance(delay, list): assert ( delays := len(delay) diff --git a/tests/plans/test_compliance.py b/tests/plans/test_compliance.py index 9a9a810009..a3b036c0a8 100644 --- a/tests/plans/test_compliance.py +++ b/tests/plans/test_compliance.py @@ -2,9 +2,10 @@ from types import ModuleType from typing import Any, get_type_hints -from dls_bluesky_core.core import MsgGenerator, PlanGenerator +from bluesky.utils import MsgGenerator from dodal import plan_stubs, plans +from dodal.common.types import PlanGenerator def is_bluesky_plan_generator(func: Any) -> bool: @@ -20,10 +21,10 @@ def get_named_subset(names: list[str]): for name in names: yield getattr(mod, name) - if "__export__" in mod.__dict__: - yield from get_named_subset(mod.get("__export__")) - elif "__all__" in mod.__dict__: - yield from get_named_subset(mod.get("__all__")) + if explicit_exports := mod.__dict__.get("__export__"): + yield from get_named_subset(explicit_exports) + elif implicit_exports := mod.__dict__.get("__all__"): + yield from get_named_subset(implicit_exports) else: for name, value in mod.__dict__.items(): if not name.startswith("_"): @@ -60,9 +61,9 @@ def test_plans_comply(): def test_stubs_comply(): - for plan in get_all_available_generators(plan_stubs): - if is_bluesky_plan_generator(plan): - signature = inspect.Signature.from_callable(plan) - assert_hard_requirements(plan, signature) + for stub in get_all_available_generators(plan_stubs): + if is_bluesky_plan_generator(stub): + signature = inspect.Signature.from_callable(stub) + assert_hard_requirements(stub, signature) if "metadata" in signature.parameters: - assert_metadata_requirements(plan, signature) + assert_metadata_requirements(stub, signature) diff --git a/tests/plans/test_scanspec.py b/tests/plans/test_scanspec.py index b7eb1437cd..9bdc81a8a8 100644 --- a/tests/plans/test_scanspec.py +++ b/tests/plans/test_scanspec.py @@ -1,85 +1,97 @@ from pathlib import Path +from unittest.mock import patch import pytest from bluesky.run_engine import RunEngine from event_model.documents import ( DocumentType, + Event, + EventDescriptor, + RunStart, + RunStop, ) from ophyd_async.core import ( DeviceCollector, PathProvider, - callback_on_mock_put, - set_mock_value, + StandardDetector, ) -from ophyd_async.epics.adaravis import AravisDetector -from ophyd_async.epics.motor import Motor -from scanspec.specs import Line, Spiral +from ophyd_async.sim.demo import PatternDetector, SimMotor +from scanspec.specs import Line -from dodal.common.beamlines.beamline_utils import set_path_provider -from dodal.common.visit import StaticVisitPathProvider from dodal.plans import spec_scan @pytest.fixture -def x_axis(RE: RunEngine) -> Motor: +def det(RE: RunEngine, tmp_path: Path) -> StandardDetector: with DeviceCollector(mock=True): - x_axis = Motor("DUMMY:X:") - set_mock_value(x_axis.velocity, 1) - return x_axis + det = PatternDetector(tmp_path / "foo.h5") + return det @pytest.fixture -def y_axis(RE: RunEngine) -> Motor: +def x_axis(RE: RunEngine) -> SimMotor: with DeviceCollector(mock=True): - y_axis = Motor("DUMMY:X:") - set_mock_value(y_axis.velocity, 1) - return y_axis - - -@pytest.fixture -def path_provider(static_path_provider: PathProvider): - assert isinstance(static_path_provider, StaticVisitPathProvider) - set_path_provider(static_path_provider) - yield static_path_provider - set_path_provider(None) # type: ignore + x_axis = SimMotor() + return x_axis @pytest.fixture -def det(RE: RunEngine, path_provider: PathProvider, tmp_path: Path) -> AravisDetector: +def y_axis(RE: RunEngine) -> SimMotor: with DeviceCollector(mock=True): - det = AravisDetector("DUMMY:DET", path_provider=path_provider) - - def ready_to_write(file_name: str, *_, **__): - set_mock_value(det.hdf.file_path_exists, True) - set_mock_value(det.hdf.full_file_name, str(tmp_path / f"{file_name}.h5")) - - callback_on_mock_put(det.hdf.file_path, ready_to_write) - set_mock_value(det.hdf.capture, True) - - return det - - -def test_metadata_of_simple_spec(RE: RunEngine, x_axis: Motor, det: AravisDetector): - spec = Line(axis=x_axis, start=1, stop=2, num=3) - - docs: list[tuple[str, DocumentType]] = [] - - def capture_doc(name: str, doc: DocumentType): - docs.append((name, doc)) + y_axis = SimMotor() + return y_axis - RE(spec_scan({det}, spec), capture_doc) - # Start, Descriptor, StreamResource, StreamDatum, Event * 3, Stop - assert len(docs) == 8 +@pytest.fixture +def path_provider(static_path_provider: PathProvider): + # Prevents issue with leftover state from beamline tests + with patch("dodal.plan_stubs.data_session.get_path_provider") as mock: + mock.return_value = static_path_provider + yield -def test_metadata_of_spiral_spec( - RE: RunEngine, x_axis: Motor, y_axis: Motor, det: AravisDetector +def test_output_of_simple_spec( + RE: RunEngine, x_axis: SimMotor, det: StandardDetector, path_provider ): - spec = Spiral.spaced(x_axis, y_axis, 0, 0, 5, 1) - docs: list[tuple[str, DocumentType]] = [] - - def capture_doc(name: str, doc: DocumentType): - docs.append((name, doc)) - - RE(spec_scan({det}, spec), capture_doc) + docs: dict[str, list[DocumentType]] = {} + RE( + spec_scan( + {det}, + Line(axis=x_axis, start=1, stop=2, num=3), + ), + lambda name, doc: docs.setdefault(name, []).append(doc), + ) + for metadata_doc in ("start", "stop", "descriptor"): + assert metadata_doc in docs + assert len(docs[metadata_doc]) == 1 + + start: RunStart = docs["start"][0] # type: ignore (cannot type check TypedDict) + assert (hints := start.get("hints")) and ( + hints.get("dimensions") == [([x_axis.user_readback.name], "primary")] + ) + assert start.get("shape") == (3,) + + descriptor: EventDescriptor = docs["descriptor"][0] # type: ignore + assert x_axis.name in descriptor.get("object_keys", {}) + assert det.name in descriptor.get("object_keys", {}) + + stop: RunStop = docs["stop"][0] # type: ignore + assert stop.get("exit_status") == "success" + assert stop.get("num_events") == {"primary": 3} + assert stop.get("run_start") == start.get("uid") + + assert "event" in docs + + initial_position = 1.0 + step = 0.5 + for doc, index in zip(docs["event"], range(1, 4), strict=True): + event: Event = doc # type: ignore + location = initial_position + ((index - 1) * step) + assert event.get("data").get(x_axis.user_readback.name) == location + + # Output of detector not linked to Spec, just check that dets are all triggered + assert "stream_resource" in docs + assert len(docs["stream_resource"]) == 2 # det, det.sum + + assert "stream_datum" in docs + assert len(docs["stream_datum"]) == 3 * 2 # each point per resource