Skip to content

Commit

Permalink
cleaned up after rebase
Browse files Browse the repository at this point in the history
  • Loading branch information
evalott100 committed Apr 19, 2024
1 parent d7b3246 commit b1d892a
Show file tree
Hide file tree
Showing 7 changed files with 43 additions and 70 deletions.
22 changes: 9 additions & 13 deletions src/ophyd_async/epics/pvi/pvi.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,14 +57,14 @@ def _split_subscript(tp: T) -> Union[Tuple[Any, Tuple[Any]], Tuple[T, None]]:
return tp, None


def _strip_union(field: Union[Union[T], T]) -> T:
def _strip_union(field: Union[Union[T], T]) -> Tuple[T, bool]:
if get_origin(field) is Union:
args = get_args(field)
is_optional = type(None) in args
for field in args:
if field is not type(None):
break
return field, is_optional
for arg in args:
if arg is not type(None):
return arg, is_optional
return field, False


def _strip_device_vector(field: Union[Type[Device]]) -> Tuple[bool, Type[Device]]:
Expand Down Expand Up @@ -131,7 +131,7 @@ def _parse_type(
):
if common_device_type:
# pre-defined type
device_cls = _strip_union(common_device_type)
device_cls, _ = _strip_union(common_device_type)
is_device_vector, device_cls = _strip_device_vector(device_cls)
device_cls, device_args = _split_subscript(device_cls)
assert issubclass(device_cls, Device)
Expand Down Expand Up @@ -165,7 +165,7 @@ def _sim_common_blocks(device: Device, stripped_type: Optional[Type] = None):
)

for device_name, device_cls in sub_devices:
device_cls = _strip_union(device_cls)
device_cls, _ = _strip_union(device_cls)
is_device_vector, device_cls = _strip_device_vector(device_cls)
device_cls, device_args = _split_subscript(device_cls)
assert issubclass(device_cls, Device)
Expand All @@ -190,8 +190,7 @@ def _sim_common_blocks(device: Device, stripped_type: Optional[Type] = None):
if is_signal:
sub_device = device_cls(SimSignalBackend(signal_dtype, device_name))
else:
sub_device = device_cls()

sub_device = getattr(device, device_name, device_cls())
_sim_common_blocks(sub_device, stripped_type=device_cls)

setattr(device, device_name, sub_device)
Expand Down Expand Up @@ -226,10 +225,7 @@ async def _get_pvi_entries(entry: PVIEntry, timeout=DEFAULT_TIMEOUT):
if is_signal:
device = _pvi_mapping[frozenset(pva_entries.keys())](signal_dtype, *pvs)
else:
if hasattr(entry.device, sub_name):
device = getattr(entry.device, sub_name)
else:
device = device_type()
device = getattr(entry.device, sub_name, device_type())

sub_entry = PVIEntry(
device=device, common_device_type=device_type, sub_entries={}
Expand Down
2 changes: 1 addition & 1 deletion src/ophyd_async/panda/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from ._common_panda import (
from ._common_blocks import (
CommonPandaBlocks,
DataBlock,
PcapBlock,
Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion src/ophyd_async/panda/_hdf_panda.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
)
from ophyd_async.epics.pvi import create_children_from_annotations, fill_pvi_entries

from ._common_panda import CommonPandaBlocks
from ._common_blocks import CommonPandaBlocks
from ._panda_controller import PandaPcapController
from .writers._hdf_writer import PandaHDFWriter

Expand Down
79 changes: 27 additions & 52 deletions tests/panda/test_hdf_panda.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,11 @@
)


def assert_emitted(docs: Dict[str, list], **numbers: int):
assert list(docs) == list(numbers)
assert {name: len(d) for name, d in docs.items()} == numbers


class MockPandaPcapController(DetectorControl):
def __init__(self, pcap: PcapBlock) -> None:
self.pcap = pcap
Expand Down Expand Up @@ -89,12 +94,12 @@ async def test_hdf_panda_hardware_triggered_flyable(
RE: RunEngine,
sim_hdf_panda,
):
names = []
docs = []
docs = {}

def append_and_print(name, doc):
names.append(name)
docs.append(doc)
if name not in docs:
docs[name] = []
docs[name] += [doc]

RE.subscribe(append_and_print)

Expand Down Expand Up @@ -158,56 +163,30 @@ def flying_plan():
# fly scan
RE(flying_plan())

assert names == [
"start",
"descriptor",
"stream_resource",
"stream_resource",
"stream_datum",
"stream_datum",
"stop",
]
named_docs = dict(
zip(
[
"start",
"descriptor",
"stream_resource_a",
"stream_resource_b",
"stream_datum_a",
"stream_datum_b",
"stop",
],
docs,
)
assert_emitted(
docs, start=1, descriptor=1, stream_resource=2, stream_datum=2, stop=1
)

# test descriptor
data_key_names: Dict[str, str] = named_docs["descriptor"]["object_keys"]["panda"]
data_key_names: Dict[str, str] = docs["descriptor"][0]["object_keys"]["panda"]
assert data_key_names == [
"panda-block_a-test-Min",
"panda-block_b-test-Diff",
]
for data_key_name in data_key_names:
assert (
named_docs["descriptor"]["data_keys"][data_key_name]["source"]
docs["descriptor"][0]["data_keys"][data_key_name]["source"]
== "sim://hdf_directory"
)

# test stream resources
for block_letter, data_key_name in zip(("a", "b"), data_key_names):
assert (
named_docs[f"stream_resource_{block_letter}"]["data_key"] == data_key_name
)
assert (
named_docs[f"stream_resource_{block_letter}"]["spec"]
== "AD_HDF5_SWMR_SLICE"
)
assert (
named_docs[f"stream_resource_{block_letter}"]["run_start"]
== named_docs["start"]["uid"]
)
assert named_docs[f"stream_resource_{block_letter}"]["resource_kwargs"] == {
for block_letter, stream_resource, data_key_name in zip(
("a", "b"), docs["stream_resource"], data_key_names
):
assert stream_resource["data_key"] == data_key_name
assert stream_resource["spec"] == "AD_HDF5_SWMR_SLICE"
assert stream_resource["run_start"] == docs["start"][0]["uid"]
assert stream_resource["resource_kwargs"] == {
"block": f"block_{block_letter}",
"multiplier": 1,
"name": data_key_name,
Expand All @@ -216,20 +195,16 @@ def flying_plan():
}

# test stream datum
for block_letter in ("a", "b"):
assert (
named_docs[f"stream_datum_{block_letter}"]["descriptor"]
== named_docs["descriptor"]["uid"]
)
assert named_docs[f"stream_datum_{block_letter}"]["seq_nums"] == {
for stream_datum in docs["stream_datum"]:
assert stream_datum["descriptor"] == docs["descriptor"][0]["uid"]
assert stream_datum["seq_nums"] == {
"start": 1,
"stop": 2,
}
assert named_docs[f"stream_datum_{block_letter}"]["indices"] == {
assert stream_datum["indices"] == {
"start": 0,
"stop": 1,
}
assert (
named_docs[f"stream_datum_{block_letter}"]["stream_resource"]
== named_docs[f"stream_resource_{block_letter}"]["uid"]
)
assert stream_datum["stream_resource"] in [
sd["uid"].split("/")[0] for sd in docs["stream_datum"]
]
2 changes: 1 addition & 1 deletion tests/panda/test_panda_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from ophyd_async.epics.pvi import fill_pvi_entries
from ophyd_async.epics.signal import epics_signal_rw
from ophyd_async.panda import CommonPandaBlocks, TimeUnits
from ophyd_async.panda._common_panda import DataBlock
from ophyd_async.panda._common_blocks import DataBlock
from ophyd_async.panda._utils import phase_sorter


Expand Down
6 changes: 4 additions & 2 deletions tests/panda/test_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,13 @@ async def sim_panda(panda_t):
sim_panda = panda_t("SIM_PANDA", name="sim_panda")

set_sim_value(
sim_panda.block_a.test_capture, Capture.MinMaxMean # type: ignore[attr-defined]
sim_panda.block_a.test_capture,
Capture.MinMaxMean, # type: ignore[attr-defined]
)

set_sim_value(
sim_panda.block_b.test_capture, Capture.No # type: ignore[attr-defined]
sim_panda.block_b.test_capture,
Capture.No, # type: ignore[attr-defined]
)

return sim_panda
Expand Down

0 comments on commit b1d892a

Please sign in to comment.