From 39d83c0ba1ef734367e5f411dbb2f150b8089bde Mon Sep 17 00:00:00 2001 From: James Souter Date: Fri, 17 Nov 2023 10:46:41 +0000 Subject: [PATCH 01/12] Add FullFileName and NumCaptured records Fix logic issue when EndData packet received --- src/pandablocks_ioc/_hdf_ioc.py | 45 ++++++++++++++++++++++++++++++++- 1 file changed, 44 insertions(+), 1 deletion(-) diff --git a/src/pandablocks_ioc/_hdf_ioc.py b/src/pandablocks_ioc/_hdf_ioc.py index f3543039..9efe0522 100644 --- a/src/pandablocks_ioc/_hdf_ioc.py +++ b/src/pandablocks_ioc/_hdf_ioc.py @@ -59,6 +59,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): length=path_length, DESC="File path for HDF5 files", validate=self._parameter_validate, + on_update=self._update_full_file_name, ) add_pvi_info( PviGroup.INPUTS, @@ -76,6 +77,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): length=filename_length, DESC="File name prefix for HDF5 files", validate=self._parameter_validate, + on_update=self._update_full_file_name ) add_pvi_info( PviGroup.INPUTS, @@ -87,6 +89,22 @@ def __init__(self, client: AsyncioClient, record_prefix: str): record_prefix + ":" + file_name_record_name.upper() ) + full_file_name_record_name = EpicsName(self._HDF5_PREFIX + ":FullFileName") + self._full_file_name_record = builder.longStringOut( + full_file_name_record_name, + length=path_length + 1 + filename_length, + DESC="Full HDF5 file name with path", + ) + add_pvi_info( + PviGroup.INPUTS, + self._full_file_name_record, + full_file_name_record_name, + builder.longStringOut, + ) + self._file_name_record.add_alias( + record_prefix + ":" + full_file_name_record_name.upper() + ) + num_capture_record_name = EpicsName(self._HDF5_PREFIX + ":NumCapture") self._num_capture_record = builder.longOut( num_capture_record_name, @@ -106,6 +124,24 @@ def __init__(self, client: AsyncioClient, record_prefix: str): record_prefix + ":" + num_capture_record_name.upper() ) + num_captured_record_name = EpicsName(self._HDF5_PREFIX + ":NumCaptured") + self._num_captured_record = builder.longOut( + num_captured_record_name, + initial_value=0, + DESC="Number of frames captured.", + DRVL=0, + ) + + add_pvi_info( + PviGroup.INPUTS, + self._num_captured_record, + num_captured_record_name, + builder.longOut, + ) + self._num_captured_record.add_alias( + record_prefix + ":" + num_captured_record_name.upper() + ) + flush_period_record_name = EpicsName(self._HDF5_PREFIX + ":FlushPeriod") self._flush_period_record = builder.aOut( flush_period_record_name, @@ -187,6 +223,9 @@ def _parameter_validate(self, record: RecordWrapper, new_val) -> bool: return False return True + async def _update_full_file_name(self, new_val) -> None: + self._full_file_name_record.set(self._get_filename()) + async def _handle_hdf5_data(self) -> None: """Handles writing HDF5 data from the PandA to file, based on configuration in the various HDF5 records. @@ -197,6 +236,7 @@ async def _handle_hdf5_data(self) -> None: # disabled start_data: Optional[StartData] = None captured_frames: int = 0 + self._num_captured_record.set(captured_frames) # Only one filename - user must stop capture and set new FileName/FilePath # for new files pipeline: List[Pipeline] = create_default_pipeline( @@ -250,6 +290,7 @@ async def _handle_hdf5_data(self) -> None: captured_frames = num_frames_to_capture pipeline[0].queue.put_nowait(data) + self._num_captured_record.set(captured_frames) if ( num_frames_to_capture > 0 @@ -267,7 +308,9 @@ async def _handle_hdf5_data(self) -> None: EndData(captured_frames, EndReason.OK) ) break - elif not isinstance(data, EndData): + elif isinstance(data, EndData): + break + else: raise RuntimeError( f"Data was recieved that was of type {type(data)}, not" "StartData, EndData, ReadyData or FrameData" From e912b77f6cbebbe79812e364e4b08c0ce27da271 Mon Sep 17 00:00:00 2001 From: James Souter Date: Fri, 17 Nov 2023 14:55:12 +0000 Subject: [PATCH 02/12] test NumCaptured and FullFileName in test_hdf5_file_writing --- src/pandablocks_ioc/_hdf_ioc.py | 2 +- tests/test_hdf_ioc.py | 12 ++++++++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/pandablocks_ioc/_hdf_ioc.py b/src/pandablocks_ioc/_hdf_ioc.py index 9efe0522..4f95d77d 100644 --- a/src/pandablocks_ioc/_hdf_ioc.py +++ b/src/pandablocks_ioc/_hdf_ioc.py @@ -77,7 +77,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): length=filename_length, DESC="File name prefix for HDF5 files", validate=self._parameter_validate, - on_update=self._update_full_file_name + on_update=self._update_full_file_name, ) add_pvi_info( PviGroup.INPUTS, diff --git a/tests/test_hdf_ioc.py b/tests/test_hdf_ioc.py index 36ef4912..88c73365 100644 --- a/tests/test_hdf_ioc.py +++ b/tests/test_hdf_ioc.py @@ -395,6 +395,10 @@ async def test_hdf5_file_writing( val = await caget(hdf5_test_prefix + ":FilePath") assert val.tobytes().decode() == test_dir + val = await caget(hdf5_test_prefix + ":FullFileName") + # slash appended to file path for full file name + assert val.tobytes().decode() == "/".join([str(tmp_path), "\0"]) + await caput( hdf5_test_prefix + ":FileName", _string_to_buffer(test_filename), @@ -404,6 +408,10 @@ async def test_hdf5_file_writing( val = await caget(hdf5_test_prefix + ":FileName") assert val.tobytes().decode() == test_filename + val = await caget(hdf5_test_prefix + ":FullFileName") + # value has \0 terminator, like test_filename + assert val.tobytes().decode() == "/".join([str(tmp_path), test_filename]) + # Only a single FrameData in the example data assert await caget(hdf5_test_prefix + ":NumCapture") == 0 await caput( @@ -423,7 +431,7 @@ async def test_hdf5_file_writing( assert await capturing_queue.get() == 0 await caput(hdf5_test_prefix + ":Capture", 1, wait=True, timeout=TIMEOUT) - + assert await caget(hdf5_test_prefix + ":NumCaptured") <= num_capture assert await capturing_queue.get() == 1 # The HDF5 data will be processed, and when it's done Capturing is set to 0 @@ -434,7 +442,7 @@ async def test_hdf5_file_writing( # Close capture, thus closing hdf5 file await caput(hdf5_test_prefix + ":Capture", 0, wait=True) assert await caget(hdf5_test_prefix + ":Capture") == 0 - + assert await caget(hdf5_test_prefix + ":NumCaptured") == num_capture # Confirm file contains data we expect hdf_file = h5py.File(tmp_path / test_filename[:-1], "r") assert list(hdf_file) == [ From b248cc404931c97202c8dd2e4915c62c2c3a0366 Mon Sep 17 00:00:00 2001 From: James Souter Date: Mon, 20 Nov 2023 15:15:08 +0000 Subject: [PATCH 03/12] Put EndReason.OK on receiving Ok packet --- src/pandablocks_ioc/_hdf_ioc.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/pandablocks_ioc/_hdf_ioc.py b/src/pandablocks_ioc/_hdf_ioc.py index 4f95d77d..09560546 100644 --- a/src/pandablocks_ioc/_hdf_ioc.py +++ b/src/pandablocks_ioc/_hdf_ioc.py @@ -309,6 +309,9 @@ async def _handle_hdf5_data(self) -> None: ) break elif isinstance(data, EndData): + pipeline[0].queue.put_nowait( + EndData(captured_frames, EndReason.OK) + ) break else: raise RuntimeError( From 6c724d710a74243eb09d669433e70d648a2ba46a Mon Sep 17 00:00:00 2001 From: James Souter Date: Tue, 21 Nov 2023 16:15:49 +0000 Subject: [PATCH 04/12] Set captured frames from dataset size during acquisition. rename NumCaptured to NumWritten_RBV --- src/pandablocks_ioc/_hdf_ioc.py | 53 +++++++++++++++++++-------------- tests/test_hdf_ioc.py | 4 +-- 2 files changed, 33 insertions(+), 24 deletions(-) diff --git a/src/pandablocks_ioc/_hdf_ioc.py b/src/pandablocks_ioc/_hdf_ioc.py index 09560546..4e33d0ce 100644 --- a/src/pandablocks_ioc/_hdf_ioc.py +++ b/src/pandablocks_ioc/_hdf_ioc.py @@ -50,6 +50,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): path_length = os.pathconf("/", "PC_PATH_MAX") filename_length = os.pathconf("/", "PC_NAME_MAX") + self._captured_frames = 0 # Create the records, including an uppercase alias for each # Naming convention and settings (mostly) copied from FSCN2 HDF5 records @@ -124,22 +125,22 @@ def __init__(self, client: AsyncioClient, record_prefix: str): record_prefix + ":" + num_capture_record_name.upper() ) - num_captured_record_name = EpicsName(self._HDF5_PREFIX + ":NumCaptured") - self._num_captured_record = builder.longOut( - num_captured_record_name, + num_written_record_name = EpicsName(self._HDF5_PREFIX + ":NumWritten_RBV") + self._num_written_record = builder.longOut( + num_written_record_name, initial_value=0, - DESC="Number of frames captured.", + DESC="Number of frames written to HDF file.", DRVL=0, ) add_pvi_info( PviGroup.INPUTS, - self._num_captured_record, - num_captured_record_name, + self._num_written_record, + num_written_record_name, builder.longOut, ) - self._num_captured_record.add_alias( - record_prefix + ":" + num_captured_record_name.upper() + self._num_written_record.add_alias( + record_prefix + ":" + num_written_record_name.upper() ) flush_period_record_name = EpicsName(self._HDF5_PREFIX + ":FlushPeriod") @@ -235,8 +236,8 @@ async def _handle_hdf5_data(self) -> None: # capture, and thus new StartData, is sent without Capture ever being # disabled start_data: Optional[StartData] = None - captured_frames: int = 0 - self._num_captured_record.set(captured_frames) + self._captured_frames: int = 0 + self._num_written_record.set(self._captured_frames) # Only one filename - user must stop capture and set new FileName/FilePath # for new files pipeline: List[Pipeline] = create_default_pipeline( @@ -266,7 +267,9 @@ async def _handle_hdf5_data(self) -> None: alarm=alarm.STATE_ALARM, ) pipeline[0].queue.put_nowait( - EndData(captured_frames, EndReason.START_DATA_MISMATCH) + EndData( + self._captured_frames, EndReason.START_DATA_MISMATCH + ) ) break @@ -278,23 +281,26 @@ async def _handle_hdf5_data(self) -> None: pipeline[0].queue.put_nowait(data) elif isinstance(data, FrameData): - captured_frames += len(data.data) + self._captured_frames += len(data.data) num_frames_to_capture: int = self._num_capture_record.get() if ( num_frames_to_capture > 0 - and captured_frames > num_frames_to_capture + and self._captured_frames > num_frames_to_capture ): # Discard extra collected data points if necessary - data.data = data.data[: num_frames_to_capture - captured_frames] - captured_frames = num_frames_to_capture - + data.data = data.data[ + : num_frames_to_capture - self._captured_frames + ] + self._captured_frames = num_frames_to_capture pipeline[0].queue.put_nowait(data) - self._num_captured_record.set(captured_frames) + sizes = [ds.size for ds in pipeline[1].datasets] + if sizes: # not empty + self._num_written_record.set(min(sizes)) if ( num_frames_to_capture > 0 - and captured_frames >= num_frames_to_capture + and self._captured_frames >= num_frames_to_capture ): # Reached configured capture limit, stop the file logging.info( @@ -305,12 +311,12 @@ async def _handle_hdf5_data(self) -> None: "Requested number of frames captured" ) pipeline[0].queue.put_nowait( - EndData(captured_frames, EndReason.OK) + EndData(self._captured_frames, EndReason.OK) ) break elif isinstance(data, EndData): pipeline[0].queue.put_nowait( - EndData(captured_frames, EndReason.OK) + EndData(self._captured_frames, EndReason.OK) ) break else: @@ -327,7 +333,9 @@ async def _handle_hdf5_data(self) -> None: # Only send EndData if we know the file was opened - could be cancelled # before PandA has actually send any data if start_data: - pipeline[0].queue.put_nowait(EndData(captured_frames, EndReason.OK)) + pipeline[0].queue.put_nowait( + EndData(self._captured_frames, EndReason.OK) + ) except Exception: logging.exception("HDF5 data capture terminated due to unexpected error") @@ -340,12 +348,13 @@ async def _handle_hdf5_data(self) -> None: # before file was opened if start_data: pipeline[0].queue.put_nowait( - EndData(captured_frames, EndReason.UNKNOWN_EXCEPTION) + EndData(self._captured_frames, EndReason.UNKNOWN_EXCEPTION) ) finally: logging.debug("Finishing processing HDF5 PandA data") stop_pipeline(pipeline) + self._num_written_record.set(self._captured_frames) self._capture_control_record.set(0) self._currently_capturing_record.set(0) diff --git a/tests/test_hdf_ioc.py b/tests/test_hdf_ioc.py index 88c73365..b678bf44 100644 --- a/tests/test_hdf_ioc.py +++ b/tests/test_hdf_ioc.py @@ -431,7 +431,7 @@ async def test_hdf5_file_writing( assert await capturing_queue.get() == 0 await caput(hdf5_test_prefix + ":Capture", 1, wait=True, timeout=TIMEOUT) - assert await caget(hdf5_test_prefix + ":NumCaptured") <= num_capture + assert await caget(hdf5_test_prefix + ":NumWritten_RBV") <= num_capture assert await capturing_queue.get() == 1 # The HDF5 data will be processed, and when it's done Capturing is set to 0 @@ -442,7 +442,7 @@ async def test_hdf5_file_writing( # Close capture, thus closing hdf5 file await caput(hdf5_test_prefix + ":Capture", 0, wait=True) assert await caget(hdf5_test_prefix + ":Capture") == 0 - assert await caget(hdf5_test_prefix + ":NumCaptured") == num_capture + assert await caget(hdf5_test_prefix + ":NumWritten_RBV") == num_capture # Confirm file contains data we expect hdf_file = h5py.File(tmp_path / test_filename[:-1], "r") assert list(hdf_file) == [ From ed8f118e0266f6055eece6d1ca74ff82f30fc221 Mon Sep 17 00:00:00 2001 From: James Souter Date: Wed, 22 Nov 2023 09:21:32 +0000 Subject: [PATCH 05/12] revert captured_frames lifetime to _handle_hdf5_data --- src/pandablocks_ioc/_hdf_ioc.py | 35 +++++++++++---------------------- 1 file changed, 12 insertions(+), 23 deletions(-) diff --git a/src/pandablocks_ioc/_hdf_ioc.py b/src/pandablocks_ioc/_hdf_ioc.py index 4e33d0ce..f7b83d9e 100644 --- a/src/pandablocks_ioc/_hdf_ioc.py +++ b/src/pandablocks_ioc/_hdf_ioc.py @@ -50,7 +50,6 @@ def __init__(self, client: AsyncioClient, record_prefix: str): path_length = os.pathconf("/", "PC_PATH_MAX") filename_length = os.pathconf("/", "PC_NAME_MAX") - self._captured_frames = 0 # Create the records, including an uppercase alias for each # Naming convention and settings (mostly) copied from FSCN2 HDF5 records @@ -231,13 +230,13 @@ async def _handle_hdf5_data(self) -> None: """Handles writing HDF5 data from the PandA to file, based on configuration in the various HDF5 records. This method expects to be run as an asyncio Task.""" + captured_frames: int = 0 try: # Keep the start data around to compare against, for the case where a new # capture, and thus new StartData, is sent without Capture ever being # disabled start_data: Optional[StartData] = None - self._captured_frames: int = 0 - self._num_written_record.set(self._captured_frames) + self._num_written_record.set(captured_frames) # Only one filename - user must stop capture and set new FileName/FilePath # for new files pipeline: List[Pipeline] = create_default_pipeline( @@ -267,9 +266,7 @@ async def _handle_hdf5_data(self) -> None: alarm=alarm.STATE_ALARM, ) pipeline[0].queue.put_nowait( - EndData( - self._captured_frames, EndReason.START_DATA_MISMATCH - ) + EndData(captured_frames, EndReason.START_DATA_MISMATCH) ) break @@ -281,18 +278,14 @@ async def _handle_hdf5_data(self) -> None: pipeline[0].queue.put_nowait(data) elif isinstance(data, FrameData): - self._captured_frames += len(data.data) - num_frames_to_capture: int = self._num_capture_record.get() if ( num_frames_to_capture > 0 - and self._captured_frames > num_frames_to_capture + and captured_frames > num_frames_to_capture ): # Discard extra collected data points if necessary - data.data = data.data[ - : num_frames_to_capture - self._captured_frames - ] - self._captured_frames = num_frames_to_capture + data.data = data.data[: num_frames_to_capture - captured_frames] + captured_frames = num_frames_to_capture pipeline[0].queue.put_nowait(data) sizes = [ds.size for ds in pipeline[1].datasets] if sizes: # not empty @@ -300,7 +293,7 @@ async def _handle_hdf5_data(self) -> None: if ( num_frames_to_capture > 0 - and self._captured_frames >= num_frames_to_capture + and captured_frames >= num_frames_to_capture ): # Reached configured capture limit, stop the file logging.info( @@ -311,13 +304,11 @@ async def _handle_hdf5_data(self) -> None: "Requested number of frames captured" ) pipeline[0].queue.put_nowait( - EndData(self._captured_frames, EndReason.OK) + EndData(captured_frames, EndReason.OK) ) break elif isinstance(data, EndData): - pipeline[0].queue.put_nowait( - EndData(self._captured_frames, EndReason.OK) - ) + pipeline[0].queue.put_nowait(EndData(captured_frames, EndReason.OK)) break else: raise RuntimeError( @@ -333,9 +324,7 @@ async def _handle_hdf5_data(self) -> None: # Only send EndData if we know the file was opened - could be cancelled # before PandA has actually send any data if start_data: - pipeline[0].queue.put_nowait( - EndData(self._captured_frames, EndReason.OK) - ) + pipeline[0].queue.put_nowait(EndData(captured_frames, EndReason.OK)) except Exception: logging.exception("HDF5 data capture terminated due to unexpected error") @@ -348,13 +337,13 @@ async def _handle_hdf5_data(self) -> None: # before file was opened if start_data: pipeline[0].queue.put_nowait( - EndData(self._captured_frames, EndReason.UNKNOWN_EXCEPTION) + EndData(captured_frames, EndReason.UNKNOWN_EXCEPTION) ) finally: logging.debug("Finishing processing HDF5 PandA data") stop_pipeline(pipeline) - self._num_written_record.set(self._captured_frames) + self._num_written_record.set(captured_frames) self._capture_control_record.set(0) self._currently_capturing_record.set(0) From c1a214eddbfdc40341f9ea9126d4f38bbd14e7da Mon Sep 17 00:00:00 2001 From: Eva Lott Date: Wed, 22 Nov 2023 13:10:14 +0000 Subject: [PATCH 06/12] Empty commit to update setuptools_scm From f9fe0cd870b4b61cdf17aa54f181f4eee7c57f82 Mon Sep 17 00:00:00 2001 From: Eva Lott Date: Tue, 28 Nov 2023 15:31:22 +0000 Subject: [PATCH 07/12] Added a HDF buffer Implemented James' changes. Seperated out the code that deals with framedata into a new HDF5Buffer class. Adjusted tests. Also wrote the Forever and Last_N capture mode. --- src/pandablocks_ioc/_hdf_ioc.py | 467 ++++++++++++------ src/pandablocks_ioc/_pvi.py | 14 +- tests/test-bobfiles/{HDF5.bob => DATA.bob} | 63 ++- tests/test-bobfiles/index.bob | 2 +- tests/test_hdf_ioc.py | 531 ++++++++++++++++++--- 5 files changed, 860 insertions(+), 217 deletions(-) rename tests/test-bobfiles/{HDF5.bob => DATA.bob} (73%) diff --git a/src/pandablocks_ioc/_hdf_ioc.py b/src/pandablocks_ioc/_hdf_ioc.py index f7b83d9e..1ba39c6f 100644 --- a/src/pandablocks_ioc/_hdf_ioc.py +++ b/src/pandablocks_ioc/_hdf_ioc.py @@ -2,14 +2,15 @@ import logging import os from asyncio import CancelledError +from collections import deque +from enum import Enum from importlib.util import find_spec -from typing import List, Optional +from typing import Callable, Deque, Optional, Union from pandablocks.asyncio import AsyncioClient from pandablocks.hdf import ( EndData, FrameData, - Pipeline, StartData, create_default_pipeline, stop_pipeline, @@ -21,15 +22,255 @@ from ._pvi import PviGroup, add_pvi_info from ._types import ONAM_STR, ZNAM_STR, EpicsName +HDFReceived = Union[ReadyData, StartData, FrameData, EndData] + + +class CaptureMode(Enum): + """ + The mode which the circular buffer will use to flush + """ + + #: Wait till N frames are recieved then write them + #: and finish capture + FIRST_N = 0 + + #: On EndData write the last N frames + LAST_N = 1 + + #: Write data as received until Capture set to 0 + FOREVER = 2 + + +class HDF5Buffer: + _buffer_index = None + start_data = None + number_of_received_rows = 0 + finish_capturing = False + circular_buffer: Deque[FrameData] = deque() + number_of_rows_in_circular_buffer = 0 + + def __init__( + self, + capture_mode: CaptureMode, + filepath: str, + number_of_rows_to_capture: int, + status_message_setter: Callable, + capturing_record_setter: Callable, + number_received_setter: Callable, + ): + # Only one filename - user must stop capture and set new FileName/FilePath + # for new files + + self.capture_mode = capture_mode + self.filepath = filepath + self.number_of_rows_to_capture = number_of_rows_to_capture + self.status_message_setter = status_message_setter + self.capturing_record_setter = capturing_record_setter + self.number_received_setter = number_received_setter + + if ( + self.capture_mode == CaptureMode.LAST_N + and self.number_of_rows_to_capture <= 0 + ): + raise RuntimeError("Number of rows to capture must be > 0 on LAST_N mode") + + def put_data_to_file(self, data: HDFReceived): + try: + self.pipeline[0].queue.put_nowait(data) + except Exception as ex: + logging.exception(f"Failed to save the data to HDF5 file: {ex}") + + def start_pipeline(self): + self.pipeline = create_default_pipeline(iter([self.filepath])) + + def get_written_data_size(self): + return min([ds.size for ds in self.pipeline[1].datasets]) + + def stop_pipeline(self): + stop_pipeline(self.pipeline) + + def _handle_StartData(self, data: StartData): + if self.start_data and data != self.start_data: + # PandA was disarmed, had config changed, and rearmed. + # Cannot process to the same file with different start data. + logging.error( + "New start data detected, differs from previous start " + "data for this file. Aborting HDF5 data capture." + ) + + self.status_message_setter( + "Mismatched StartData packet for file", + severity=alarm.MAJOR_ALARM, + alarm=alarm.STATE_ALARM, + ) + self.put_data_to_file( + EndData(self.number_of_received_rows, EndReason.START_DATA_MISMATCH) + ) + + self.finish_capturing = True + + if self.start_data is None: + # Only pass StartData to pipeline if we haven't previously + # - if we have there will already be an in-progress HDF file + # that we should just append data to + self.start_data = data + self.put_data_to_file(data) + + def _capture_first_n(self, data: FrameData): + """ + Capture framedata as it comes in. Stop when number of frames exceeds + number_of_rows_to_capture, and cut off the data so that it's length + number_of_rows_to_capture. + """ + self.number_of_received_rows += len(data.data) + + if ( + self.number_of_rows_to_capture > 0 + and self.number_of_received_rows > self.number_of_rows_to_capture + ): + # Discard extra collected data points if necessary + data.data = data.data[ + : self.number_of_rows_to_capture - self.number_of_received_rows + ].copy() + self.number_of_received_rows = self.number_of_rows_to_capture + + self.put_data_to_file(data) + self.number_received_setter(self.number_of_received_rows) + + if ( + self.number_of_rows_to_capture > 0 + and self.number_of_received_rows == self.number_of_rows_to_capture + ): + # Reached configured capture limit, stop the file + logging.info( + f"Requested number of frames ({self.number_of_rows_to_capture}) " + "captured, disabling Capture." + ) + self.status_message_setter("Requested number of frames captured") + self.put_data_to_file(EndData(self.number_of_received_rows, EndReason.OK)) + self.finish_capturing = True + + def _capture_forever(self, data: FrameData): + self.put_data_to_file(data) + self.number_of_received_rows += len(data.data) + self.number_received_setter(self.number_of_received_rows) + + def _capture_last_n(self, data: FrameData): + """ + Append every FrameData to a buffer until the number of rows equals + `:NumCapture`. Then rewrite the data circularly. + + Only write the data once PCAP is received. + """ + self.circular_buffer.append(data) + self.number_of_received_rows += len(data.data) + self.number_of_rows_in_circular_buffer += len(data.data) + + if self.number_of_rows_in_circular_buffer > self.number_of_rows_to_capture: + self.status_message_setter( + "NumCapture received, rewriting first frames received" + ) + + else: + self.status_message_setter("Filling buffer to NumReceived") + + while self.number_of_rows_in_circular_buffer > self.number_of_rows_to_capture: + first_frame_data = self.circular_buffer.popleft() + first_frame_data_length = len(first_frame_data.data) + + if first_frame_data_length >= self.number_of_rows_to_capture: + # More data than we want to capture, all in a single FrameData + # We can just slice with the NumCapture since this has to be the + # only FrameData in the buffer at this point + assert len(self.circular_buffer) == 0 + first_frame_data.data = first_frame_data.data[ + -self.number_of_rows_to_capture : + ].copy() + self.circular_buffer.appendleft(first_frame_data) + self.number_of_rows_in_circular_buffer = self.number_of_rows_to_capture + elif ( + first_frame_data_length + > self.number_of_rows_in_circular_buffer + - self.number_of_rows_to_capture + ): + # We can slice from the beginning of the FrameData to have the desired + # number of rows + indices_to_discard = ( + self.number_of_rows_in_circular_buffer + - self.number_of_rows_to_capture + ) + first_frame_data.data = first_frame_data.data[ + indices_to_discard: + ].copy() + self.circular_buffer.appendleft(first_frame_data) + self.number_of_rows_in_circular_buffer -= indices_to_discard + assert ( + self.number_of_rows_in_circular_buffer + == self.number_of_rows_to_capture + ) + else: + # If we remove the enire first frame data then the buffer will still + # be too big, or it will be exactly the number of rows we want + self.number_of_rows_in_circular_buffer -= first_frame_data_length + + self.number_received_setter(self.number_of_received_rows) + + def _handle_FrameData(self, data: FrameData): + match self.capture_mode: + case CaptureMode.FIRST_N: + self._capture_first_n(data) + case CaptureMode.LAST_N: + self._capture_last_n(data) + case CaptureMode.FOREVER: + self._capture_forever(data) + + def _handle_EndData(self, data: EndData): + if self.capture_mode == CaptureMode.LAST_N: + # Put all the data to file + self.status_message_setter( + "Finishing capture, writing buffered frames to file" + ) + # In LAST_N only write FrameData if the EndReason is OK + if data.reason == EndReason.OK: + for frame_data in self.circular_buffer: + self.put_data_to_file(frame_data) + + if self.capture_mode == CaptureMode.FOREVER: + self.start_data = None + self.status_message_setter("Finished capture, waiting for next ReadyData") + else: + self.finish_capturing = True + self.status_message_setter("Finished capture") + + self.capturing_record_setter(0) + self.put_data_to_file(data) + + def handle_data(self, data: HDFReceived): + match data: + case ReadyData(): + self.capturing_record_setter(1) + self.status_message_setter("Starting capture") + case StartData(): + self._handle_StartData(data) + case FrameData(): + self._handle_FrameData(data) + case EndData(): + self._handle_EndData(data) + case _: + raise RuntimeError( + f"Data was recieved that was of type {type(data)}, not" + "StartData, EndData, ReadyData or FrameData" + ) + class HDF5RecordController: """Class to create and control the records that handle HDF5 processing""" - _HDF5_PREFIX = "HDF5" + _DATA_PREFIX = "DATA" _client: AsyncioClient - _file_path_record: RecordWrapper + _directory_record: RecordWrapper _file_name_record: RecordWrapper _file_number_record: RecordWrapper _file_format_record: RecordWrapper @@ -53,34 +294,34 @@ def __init__(self, client: AsyncioClient, record_prefix: str): # Create the records, including an uppercase alias for each # Naming convention and settings (mostly) copied from FSCN2 HDF5 records - file_path_record_name = EpicsName(self._HDF5_PREFIX + ":FilePath") - self._file_path_record = builder.longStringOut( - file_path_record_name, + directory_record_name = EpicsName(self._DATA_PREFIX + ":HDFDirectory") + self._directory_record = builder.longStringOut( + directory_record_name, length=path_length, DESC="File path for HDF5 files", validate=self._parameter_validate, - on_update=self._update_full_file_name, + on_update=self._update_full_file_path, ) add_pvi_info( - PviGroup.INPUTS, - self._file_path_record, - file_path_record_name, + PviGroup.HDF, + self._directory_record, + directory_record_name, builder.longStringOut, ) - self._file_path_record.add_alias( - record_prefix + ":" + file_path_record_name.upper() + self._directory_record.add_alias( + record_prefix + ":" + directory_record_name.upper() ) - file_name_record_name = EpicsName(self._HDF5_PREFIX + ":FileName") + file_name_record_name = EpicsName(self._DATA_PREFIX + ":HDFFileName") self._file_name_record = builder.longStringOut( file_name_record_name, length=filename_length, DESC="File name prefix for HDF5 files", validate=self._parameter_validate, - on_update=self._update_full_file_name, + on_update=self._update_full_file_path, ) add_pvi_info( - PviGroup.INPUTS, + PviGroup.HDF, self._file_name_record, file_name_record_name, builder.longStringOut, @@ -89,23 +330,23 @@ def __init__(self, client: AsyncioClient, record_prefix: str): record_prefix + ":" + file_name_record_name.upper() ) - full_file_name_record_name = EpicsName(self._HDF5_PREFIX + ":FullFileName") - self._full_file_name_record = builder.longStringOut( - full_file_name_record_name, + full_file_path_record_name = EpicsName(self._DATA_PREFIX + ":HDFFullFilePath") + self._full_file_path_record = builder.longStringIn( + full_file_path_record_name, length=path_length + 1 + filename_length, - DESC="Full HDF5 file name with path", + DESC="Full HDF5 file name with directory", ) add_pvi_info( - PviGroup.INPUTS, - self._full_file_name_record, - full_file_name_record_name, - builder.longStringOut, + PviGroup.HDF, + self._full_file_path_record, + full_file_path_record_name, + builder.longStringIn, ) self._file_name_record.add_alias( - record_prefix + ":" + full_file_name_record_name.upper() + record_prefix + ":" + full_file_path_record_name.upper() ) - num_capture_record_name = EpicsName(self._HDF5_PREFIX + ":NumCapture") + num_capture_record_name = EpicsName(self._DATA_PREFIX + ":NumCapture") self._num_capture_record = builder.longOut( num_capture_record_name, initial_value=0, # Infinite capture @@ -114,7 +355,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): ) add_pvi_info( - PviGroup.INPUTS, + PviGroup.CAPTURE, self._num_capture_record, num_capture_record_name, builder.longOut, @@ -124,32 +365,32 @@ def __init__(self, client: AsyncioClient, record_prefix: str): record_prefix + ":" + num_capture_record_name.upper() ) - num_written_record_name = EpicsName(self._HDF5_PREFIX + ":NumWritten_RBV") - self._num_written_record = builder.longOut( - num_written_record_name, + num_received_record_name = EpicsName(self._DATA_PREFIX + ":NumReceived") + self._num_received_record = builder.longIn( + num_received_record_name, initial_value=0, DESC="Number of frames written to HDF file.", - DRVL=0, ) add_pvi_info( - PviGroup.INPUTS, - self._num_written_record, - num_written_record_name, - builder.longOut, + PviGroup.CAPTURE, + self._num_received_record, + num_received_record_name, + builder.longIn, ) - self._num_written_record.add_alias( - record_prefix + ":" + num_written_record_name.upper() + self._num_received_record.add_alias( + record_prefix + ":" + num_received_record_name.upper() ) - flush_period_record_name = EpicsName(self._HDF5_PREFIX + ":FlushPeriod") + flush_period_record_name = EpicsName(self._DATA_PREFIX + ":FlushPeriod") self._flush_period_record = builder.aOut( flush_period_record_name, initial_value=1.0, DESC="Frequency that data is flushed (seconds)", + EGU="s", ) add_pvi_info( - PviGroup.INPUTS, + PviGroup.CAPTURE, self._flush_period_record, flush_period_record_name, builder.aOut, @@ -158,7 +399,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): record_prefix + ":" + flush_period_record_name.upper() ) - capture_control_record_name = EpicsName(self._HDF5_PREFIX + ":Capture") + capture_control_record_name = EpicsName(self._DATA_PREFIX + ":Capture") self._capture_control_record = builder.boolOut( capture_control_record_name, ZNAM=ZNAM_STR, @@ -168,7 +409,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): DESC="Start/stop HDF5 capture", ) add_pvi_info( - PviGroup.INPUTS, + PviGroup.CAPTURE, self._capture_control_record, capture_control_record_name, builder.boolOut, @@ -177,10 +418,28 @@ def __init__(self, client: AsyncioClient, record_prefix: str): record_prefix + ":" + capture_control_record_name.upper() ) - status_message_record_name = EpicsName(self._HDF5_PREFIX + ":Status") - self._status_message_record = builder.stringIn( + capture_mode_record_name = EpicsName(self._DATA_PREFIX + ":CaptureMode") + self._capture_mode_record = builder.mbbOut( + capture_mode_record_name, + *[capture_mode.name for capture_mode in CaptureMode], + initial_value=0, + DESC="Choose how to hdf writer flushes", + ) + add_pvi_info( + PviGroup.CAPTURE, + self._capture_mode_record, + capture_mode_record_name, + builder.mbbOut, + ) + self._capture_mode_record.add_alias( + record_prefix + ":" + capture_mode_record_name.upper() + ) + + status_message_record_name = EpicsName(self._DATA_PREFIX + ":Status") + self._status_message_record = builder.longStringIn( status_message_record_name, initial_value="OK", + length=200, DESC="Reports current status of HDF5 capture", ) add_pvi_info( @@ -193,7 +452,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): record_prefix + ":" + status_message_record_name.upper() ) - currently_capturing_record_name = EpicsName(self._HDF5_PREFIX + ":Capturing") + currently_capturing_record_name = EpicsName(self._DATA_PREFIX + ":Capturing") self._currently_capturing_record = builder.boolIn( currently_capturing_record_name, ZNAM=ZNAM_STR, @@ -223,108 +482,46 @@ def _parameter_validate(self, record: RecordWrapper, new_val) -> bool: return False return True - async def _update_full_file_name(self, new_val) -> None: - self._full_file_name_record.set(self._get_filename()) + async def _update_full_file_path(self, new_val) -> None: + self._full_file_path_record.set(self._get_filepath()) async def _handle_hdf5_data(self) -> None: """Handles writing HDF5 data from the PandA to file, based on configuration in the various HDF5 records. This method expects to be run as an asyncio Task.""" - captured_frames: int = 0 try: - # Keep the start data around to compare against, for the case where a new - # capture, and thus new StartData, is sent without Capture ever being - # disabled - start_data: Optional[StartData] = None - self._num_written_record.set(captured_frames) - # Only one filename - user must stop capture and set new FileName/FilePath - # for new files - pipeline: List[Pipeline] = create_default_pipeline( - iter([self._get_filename()]) + # Set up the hdf buffer + num_capture: int = self._num_capture_record.get() + capture_mode: CaptureMode = CaptureMode(self._capture_mode_record.get()) + buffer = HDF5Buffer( + capture_mode, + self._get_filepath(), + num_capture, + self._status_message_record.set, + self._currently_capturing_record.set, + self._num_received_record.set, ) - flush_period: float = self._flush_period_record.get() + buffer.start_pipeline() + flush_period: float = self._flush_period_record.get() async for data in self._client.data( scaled=False, flush_period=flush_period ): logging.debug(f"Received data packet: {data}") - if isinstance(data, ReadyData): - self._currently_capturing_record.set(1) - self._status_message_record.set("Starting capture") - elif isinstance(data, StartData): - if start_data and data != start_data: - # PandA was disarmed, had config changed, and rearmed. - # Cannot process to the same file with different start data. - logging.error( - "New start data detected, differs from previous start " - "data for this file. Aborting HDF5 data capture." - ) - - self._status_message_record.set( - "Mismatched StartData packet for file", - severity=alarm.MAJOR_ALARM, - alarm=alarm.STATE_ALARM, - ) - pipeline[0].queue.put_nowait( - EndData(captured_frames, EndReason.START_DATA_MISMATCH) - ) - - break - if start_data is None: - # Only pass StartData to pipeline if we haven't previously - # - if we have there will already be an in-progress HDF file - # that we should just append data to - start_data = data - pipeline[0].queue.put_nowait(data) - - elif isinstance(data, FrameData): - num_frames_to_capture: int = self._num_capture_record.get() - if ( - num_frames_to_capture > 0 - and captured_frames > num_frames_to_capture - ): - # Discard extra collected data points if necessary - data.data = data.data[: num_frames_to_capture - captured_frames] - captured_frames = num_frames_to_capture - pipeline[0].queue.put_nowait(data) - sizes = [ds.size for ds in pipeline[1].datasets] - if sizes: # not empty - self._num_written_record.set(min(sizes)) - - if ( - num_frames_to_capture > 0 - and captured_frames >= num_frames_to_capture - ): - # Reached configured capture limit, stop the file - logging.info( - f"Requested number of frames ({num_frames_to_capture}) " - "captured, disabling Capture." - ) - self._status_message_record.set( - "Requested number of frames captured" - ) - pipeline[0].queue.put_nowait( - EndData(captured_frames, EndReason.OK) - ) - break - elif isinstance(data, EndData): - pipeline[0].queue.put_nowait(EndData(captured_frames, EndReason.OK)) + + buffer.handle_data(data) + if buffer.finish_capturing: break - else: - raise RuntimeError( - f"Data was recieved that was of type {type(data)}, not" - "StartData, EndData, ReadyData or FrameData" - ) - # Ignore EndData - handle terminating capture with the Capture - # record or when we capture the requested number of frames except CancelledError: logging.info("Capturing task cancelled, closing HDF5 file") self._status_message_record.set("Capturing disabled") # Only send EndData if we know the file was opened - could be cancelled # before PandA has actually send any data - if start_data: - pipeline[0].queue.put_nowait(EndData(captured_frames, EndReason.OK)) + if buffer.start_data: + buffer.put_data_to_file( + EndData(buffer.number_of_received_rows, EndReason.OK) + ) except Exception: logging.exception("HDF5 data capture terminated due to unexpected error") @@ -335,23 +532,23 @@ async def _handle_hdf5_data(self) -> None: ) # Only send EndData if we know the file was opened - exception could happen # before file was opened - if start_data: - pipeline[0].queue.put_nowait( - EndData(captured_frames, EndReason.UNKNOWN_EXCEPTION) + if buffer.start_data: + buffer.put_data_to_file( + EndData(buffer.number_of_received_rows, EndReason.UNKNOWN_EXCEPTION) ) finally: logging.debug("Finishing processing HDF5 PandA data") - stop_pipeline(pipeline) - self._num_written_record.set(captured_frames) + buffer.stop_pipeline() + self._num_received_record.set(buffer.number_of_received_rows) self._capture_control_record.set(0) self._currently_capturing_record.set(0) - def _get_filename(self) -> str: + def _get_filepath(self) -> str: """Create the file path for the HDF5 file from the relevant records""" return "/".join( ( - self._file_path_record.get(), + self._directory_record.get(), self._file_name_record.get(), ) ) @@ -374,7 +571,7 @@ def _capture_validate(self, record: RecordWrapper, new_val: int) -> bool: """Check the required records have been set before allowing Capture=1""" if new_val: try: - self._get_filename() + self._get_filepath() except ValueError: logging.exception("At least 1 required record had no value") return False diff --git a/src/pandablocks_ioc/_pvi.py b/src/pandablocks_ioc/_pvi.py index ce74d379..d84f3647 100644 --- a/src/pandablocks_ioc/_pvi.py +++ b/src/pandablocks_ioc/_pvi.py @@ -39,6 +39,7 @@ class PviGroup(Enum): READBACKS = "Readbacks" OUTPUTS = "Outputs" CAPTURE = "Capture" + HDF = "HDF" TABLE = "Table" # TODO: May not need this anymore @@ -83,7 +84,10 @@ def add_pvi_info( if useComboBox: widget = ComboBox() else: - if record_creation_func in (builder.longStringOut, builder.stringOut): + if record_creation_func in ( + builder.longStringOut, + builder.stringOut, + ): widget = TextWrite(format=TextFormat.string) else: widget = TextWrite(format=None) @@ -91,6 +95,14 @@ def add_pvi_info( component = SignalRW(name=pvi_name, pv=record_name, widget=widget) access = "rw" else: + if record_creation_func in ( + builder.longStringIn, + builder.stringIn, + ): + widget = TextRead(format=TextFormat.string) + else: + widget = TextRead(format=None) + component = SignalR(name=pvi_name, pv=record_name, widget=TextRead()) access = "r" block, field = record_name.split(":", maxsplit=1) diff --git a/tests/test-bobfiles/HDF5.bob b/tests/test-bobfiles/DATA.bob similarity index 73% rename from tests/test-bobfiles/HDF5.bob rename to tests/test-bobfiles/DATA.bob index 51d60d04..46921f3d 100644 --- a/tests/test-bobfiles/HDF5.bob +++ b/tests/test-bobfiles/DATA.bob @@ -3,13 +3,13 @@ 0 0 426 - 277 + 413 4 4 Title TITLE - HDF5 - TEST_PREFIX: + DATA - TEST_PREFIX: 0 0 426 @@ -26,11 +26,11 @@ 1 - INPUTS + HDF 5 30 416 - 156 + 106 true Label @@ -42,7 +42,7 @@ TextEntry - TEST_PREFIX:HDF5:FilePath + TEST_PREFIX:DATA:HDFDirectory 255 0 125 @@ -60,7 +60,7 @@ TextEntry - TEST_PREFIX:HDF5:FileName + TEST_PREFIX:DATA:HDFFileName 255 25 125 @@ -137,11 +137,58 @@ TextUpdate - TEST_PREFIX:HDF5:Status + TEST_PREFIX:DATA:HDFFullFilePath + 255 + 50 + 125 + 20 + + + + + 1 + 6 + + + + CAPTURE + 5 + 141 + 416 + 181 + true + + Label + DATA: Num Capture + 0 + 0 + 250 + 20 + + + TextEntry + TEST_PREFIX:DATA:NumCapture 255 0 125 20 + 1 + + + Label + DATA: Num Received + 0 + 25 + 250 + 20 + + + TextUpdate + TEST_PREFIX:DATA:NumReceived + 255 + 25 + 125 + 20 @@ -158,7 +205,7 @@ TextUpdate - TEST_PREFIX:HDF5:Capturing + TEST_PREFIX:DATA:Capturing 255 25 125 diff --git a/tests/test-bobfiles/index.bob b/tests/test-bobfiles/index.bob index 77167918..8fe3ed31 100644 --- a/tests/test-bobfiles/index.bob +++ b/tests/test-bobfiles/index.bob @@ -61,7 +61,7 @@ OpenDisplay - HDF5.bob + DATA.bob tab Open Display diff --git a/tests/test_hdf_ioc.py b/tests/test_hdf_ioc.py index b678bf44..d5fd7aa2 100644 --- a/tests/test_hdf_ioc.py +++ b/tests/test_hdf_ioc.py @@ -3,6 +3,7 @@ import asyncio import logging from asyncio import CancelledError +from collections import deque from multiprocessing.connection import Connection from pathlib import Path from typing import AsyncGenerator, Generator @@ -11,7 +12,7 @@ import numpy import pytest import pytest_asyncio -from aioca import caget, camonitor, caput +from aioca import DBR_CHAR_STR, CANothing, caget, camonitor, caput from fixtures.mocked_panda import ( TIMEOUT, MockedAsyncioClient, @@ -34,7 +35,7 @@ ) from softioc import asyncio_dispatcher, builder, softioc -from pandablocks_ioc._hdf_ioc import HDF5RecordController +from pandablocks_ioc._hdf_ioc import CaptureMode, HDF5Buffer, HDF5RecordController NAMESPACE_PREFIX = "HDF-RECORD-PREFIX" @@ -42,7 +43,7 @@ @pytest.fixture def new_random_hdf5_prefix(): test_prefix = append_random_uppercase(NAMESPACE_PREFIX) - hdf5_test_prefix = test_prefix + ":HDF5" + hdf5_test_prefix = test_prefix + ":DATA" return test_prefix, hdf5_test_prefix @@ -318,14 +319,14 @@ async def test_hdf5_ioc(hdf5_subprocess_ioc): test_prefix, hdf5_test_prefix = hdf5_subprocess_ioc - val = await caget(hdf5_test_prefix + ":FilePath") + val = await caget(hdf5_test_prefix + ":HDFDirectory", datatype=DBR_CHAR_STR) # Default value of longStringOut is an array of a single NULL byte - assert val.size == 1 + assert val == "" # Mix and match between CamelCase and UPPERCASE to check aliases work - val = await caget(hdf5_test_prefix + ":FILENAME") - assert val.size == 1 # As above for longStringOut + val = await caget(hdf5_test_prefix + ":HDFFILENAME", datatype=DBR_CHAR_STR) + assert val == "" val = await caget(hdf5_test_prefix + ":NumCapture") assert val == 0 @@ -336,20 +337,16 @@ async def test_hdf5_ioc(hdf5_subprocess_ioc): val = await caget(hdf5_test_prefix + ":CAPTURE") assert val == 0 - val = await caget(hdf5_test_prefix + ":Status") + val = await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) assert val == "OK" val = await caget(hdf5_test_prefix + ":Capturing") assert val == 0 -def _string_to_buffer(string: str): - """Convert a python string into a numpy buffer suitable for caput'ing to a Waveform - record""" - return numpy.frombuffer(string.encode(), dtype=numpy.uint8) - - -async def test_hdf5_ioc_parameter_validate_works(hdf5_subprocess_ioc_no_logging_check): +async def test_hdf5_ioc_parameter_validate_works( + hdf5_subprocess_ioc_no_logging_check, tmp_path +): """Run the HDF5 module as its own IOC and check the _parameter_validate method does not stop updates, then stops when capture record is changed""" @@ -357,60 +354,74 @@ async def test_hdf5_ioc_parameter_validate_works(hdf5_subprocess_ioc_no_logging_ # EPICS bug means caputs always appear to succeed, so do a caget to prove it worked await caput( - hdf5_test_prefix + ":FilePath", _string_to_buffer("/new/path"), wait=True + hdf5_test_prefix + ":HDFDirectory", + str(tmp_path), + datatype=DBR_CHAR_STR, + wait=True, ) - val = await caget(hdf5_test_prefix + ":FilePath") - assert val.tobytes().decode() == "/new/path" + val = await caget(hdf5_test_prefix + ":HDFDirectory", datatype=DBR_CHAR_STR) + assert val == str(tmp_path) - await caput(hdf5_test_prefix + ":FileName", _string_to_buffer("name.h5"), wait=True) - val = await caget(hdf5_test_prefix + ":FileName") - assert val.tobytes().decode() == "name.h5" + await caput( + hdf5_test_prefix + ":HDFFileName", "name.h5", wait=True, datatype=DBR_CHAR_STR + ) + val = await caget(hdf5_test_prefix + ":HDFFileName", datatype=DBR_CHAR_STR) + assert val == "name.h5" await caput(hdf5_test_prefix + ":Capture", 1, wait=True) assert await caget(hdf5_test_prefix + ":Capture") == 1 - await caput( - hdf5_test_prefix + ":FilePath", _string_to_buffer("/second/path"), wait=True - ) - val = await caget(hdf5_test_prefix + ":FilePath") - assert val.tobytes().decode() == "/new/path" # put should have been stopped + with pytest.raises(CANothing): + await caput( + hdf5_test_prefix + ":HDFFullFilePath", + "/second/path/name.h5", + wait=True, + datatype=DBR_CHAR_STR, + ) + val = await caget(hdf5_test_prefix + ":HDFFullFilePath", datatype=DBR_CHAR_STR) + assert val == str(tmp_path) + "/name.h5" # put should have been stopped @pytest.mark.parametrize("num_capture", [1, 1000, 10000]) -async def test_hdf5_file_writing( +async def test_hdf5_file_writing_first_n( hdf5_subprocess_ioc, tmp_path: Path, caplog, num_capture ): """Test that an HDF5 file is written when Capture is enabled""" test_prefix, hdf5_test_prefix = hdf5_subprocess_ioc - test_dir = str(tmp_path) + "\0" - test_filename = "test.h5\0" + val = await caget(hdf5_test_prefix + ":CaptureMode") + assert val == CaptureMode.FIRST_N.value + + test_dir = tmp_path + test_filename = "test.h5" await caput( - hdf5_test_prefix + ":FilePath", - _string_to_buffer(str(test_dir)), + hdf5_test_prefix + ":HDFDirectory", + str(test_dir), wait=True, - timeout=TIMEOUT, + datatype=DBR_CHAR_STR, ) - val = await caget(hdf5_test_prefix + ":FilePath") - assert val.tobytes().decode() == test_dir + val = await caget(hdf5_test_prefix + ":HDFDirectory", datatype=DBR_CHAR_STR) + assert val == str(test_dir) - val = await caget(hdf5_test_prefix + ":FullFileName") - # slash appended to file path for full file name - assert val.tobytes().decode() == "/".join([str(tmp_path), "\0"]) + await caput( + hdf5_test_prefix + ":HDFFileName", "name.h5", wait=True, datatype=DBR_CHAR_STR + ) + val = await caget(hdf5_test_prefix + ":HDFFileName", datatype=DBR_CHAR_STR) + assert val == "name.h5" await caput( - hdf5_test_prefix + ":FileName", - _string_to_buffer(test_filename), + hdf5_test_prefix + ":HDFFileName", + test_filename, wait=True, timeout=TIMEOUT, + datatype=DBR_CHAR_STR, ) - val = await caget(hdf5_test_prefix + ":FileName") - assert val.tobytes().decode() == test_filename + val = await caget(hdf5_test_prefix + ":HDFFileName", datatype=DBR_CHAR_STR) + assert val == test_filename - val = await caget(hdf5_test_prefix + ":FullFileName") - # value has \0 terminator, like test_filename - assert val.tobytes().decode() == "/".join([str(tmp_path), test_filename]) + val = await caget(hdf5_test_prefix + ":HDFFullFilePath", datatype=DBR_CHAR_STR) + assert val == "/".join([str(tmp_path), test_filename]) # Only a single FrameData in the example data assert await caget(hdf5_test_prefix + ":NumCapture") == 0 @@ -431,7 +442,101 @@ async def test_hdf5_file_writing( assert await capturing_queue.get() == 0 await caput(hdf5_test_prefix + ":Capture", 1, wait=True, timeout=TIMEOUT) - assert await caget(hdf5_test_prefix + ":NumWritten_RBV") <= num_capture + assert await caget(hdf5_test_prefix + ":NumReceived") <= num_capture + assert await capturing_queue.get() == 1 + + # The HDF5 data will be processed, and when it's done Capturing is set to 0 + assert await asyncio.wait_for(capturing_queue.get(), timeout=TIMEOUT) == 0 + + m.close() + + # Capture should have closed by itself + assert await caget(hdf5_test_prefix + ":Capture") == 0 + + assert await caget(hdf5_test_prefix + ":NumReceived") == num_capture + # Confirm file contains data we expect + with h5py.File(tmp_path / test_filename, "r") as hdf_file: + assert list(hdf_file) == [ + "COUNTER1.OUT.Max", + "COUNTER1.OUT.Mean", + "COUNTER1.OUT.Min", + "COUNTER2.OUT.Mean", + "COUNTER3.OUT.Value", + "PCAP.BITS2.Value", + "PCAP.SAMPLES.Value", + "PCAP.TS_START.Value", + ] + + assert len(hdf_file["/COUNTER1.OUT.Max"]) == num_capture + + assert ( + await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) + == "Requested number of frames captured" + ) + + +async def test_hdf5_file_writing_forever(hdf5_subprocess_ioc, tmp_path: Path, caplog): + """Test that an HDF5 file is written when Capture is enabled""" + + test_prefix, hdf5_test_prefix = hdf5_subprocess_ioc + num_capture = 10 + + val = await caget(hdf5_test_prefix + ":CaptureMode") + assert val == CaptureMode.FIRST_N.value + await caput(hdf5_test_prefix + ":CaptureMode", CaptureMode.FOREVER.value, wait=True) + val = await caget(hdf5_test_prefix + ":CaptureMode") + assert val == CaptureMode.FOREVER.value + + test_dir = tmp_path + test_filename = "test.h5" + await caput( + hdf5_test_prefix + ":HDFDirectory", + str(test_dir), + wait=True, + datatype=DBR_CHAR_STR, + ) + val = await caget(hdf5_test_prefix + ":HDFDirectory", datatype=DBR_CHAR_STR) + assert val == str(test_dir) + + await caput( + hdf5_test_prefix + ":HDFFileName", "name.h5", wait=True, datatype=DBR_CHAR_STR + ) + val = await caget(hdf5_test_prefix + ":HDFFileName", datatype=DBR_CHAR_STR) + assert val == "name.h5" + + await caput( + hdf5_test_prefix + ":HDFFileName", + test_filename, + wait=True, + timeout=TIMEOUT, + datatype=DBR_CHAR_STR, + ) + val = await caget(hdf5_test_prefix + ":HDFFileName", datatype=DBR_CHAR_STR) + assert val == test_filename + + val = await caget(hdf5_test_prefix + ":HDFFullFilePath", datatype=DBR_CHAR_STR) + assert val == "/".join([str(tmp_path), test_filename]) + + assert await caget(hdf5_test_prefix + ":NumCapture") == 0 + await caput( + hdf5_test_prefix + ":NumCapture", num_capture, wait=True, timeout=TIMEOUT + ) + + # Since we're in forever mode it shouldn't matter what num_capture is + assert await caget(hdf5_test_prefix + ":NumCapture") == num_capture + + # The queue expects to see Capturing go 0 -> 1 -> 0 as Capture is enabled + # and subsequently finishes + capturing_queue: asyncio.Queue = asyncio.Queue() + m = camonitor( + hdf5_test_prefix + ":Capturing", + capturing_queue.put, + ) + + # Initially Capturing should be 0 + assert await capturing_queue.get() == 0 + + await caput(hdf5_test_prefix + ":Capture", 1, wait=True, timeout=TIMEOUT) assert await capturing_queue.get() == 1 # The HDF5 data will be processed, and when it's done Capturing is set to 0 @@ -439,24 +544,306 @@ async def test_hdf5_file_writing( m.close() - # Close capture, thus closing hdf5 file - await caput(hdf5_test_prefix + ":Capture", 0, wait=True) + # The test panda writes 10000 rows before the capture is finished + assert await caget(hdf5_test_prefix + ":NumReceived") == 10000 assert await caget(hdf5_test_prefix + ":Capture") == 0 - assert await caget(hdf5_test_prefix + ":NumWritten_RBV") == num_capture # Confirm file contains data we expect - hdf_file = h5py.File(tmp_path / test_filename[:-1], "r") - assert list(hdf_file) == [ - "COUNTER1.OUT.Max", - "COUNTER1.OUT.Mean", - "COUNTER1.OUT.Min", - "COUNTER2.OUT.Mean", - "COUNTER3.OUT.Value", - "PCAP.BITS2.Value", - "PCAP.SAMPLES.Value", - "PCAP.TS_START.Value", + with h5py.File(tmp_path / test_filename, "r") as hdf_file: + assert list(hdf_file) == [ + "COUNTER1.OUT.Max", + "COUNTER1.OUT.Mean", + "COUNTER1.OUT.Min", + "COUNTER2.OUT.Mean", + "COUNTER3.OUT.Value", + "PCAP.BITS2.Value", + "PCAP.SAMPLES.Value", + "PCAP.TS_START.Value", + ] + + assert len(hdf_file["/COUNTER1.OUT.Max"]) == 10000 + assert ( + await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) + == "Finished capture, waiting for next ReadyData" + ) + + +@pytest.mark.parametrize("num_capture", [1, 1000, 10000]) +async def test_hdf5_file_writing_last_n( + hdf5_subprocess_ioc, tmp_path: Path, caplog, num_capture +): + """Test that an HDF5 file is written when Capture is enabled""" + + test_prefix, hdf5_test_prefix = hdf5_subprocess_ioc + + val = await caget(hdf5_test_prefix + ":CaptureMode") + assert val == CaptureMode.FIRST_N.value + await caput(hdf5_test_prefix + ":CaptureMode", 1, wait=True) + val = await caget(hdf5_test_prefix + ":CaptureMode") + assert val == CaptureMode.LAST_N.value + + test_dir = tmp_path + test_filename = "test.h5" + await caput( + hdf5_test_prefix + ":HDFDirectory", + str(test_dir), + wait=True, + datatype=DBR_CHAR_STR, + ) + val = await caget(hdf5_test_prefix + ":HDFDirectory", datatype=DBR_CHAR_STR) + assert val == str(test_dir) + + await caput( + hdf5_test_prefix + ":HDFFileName", "name.h5", wait=True, datatype=DBR_CHAR_STR + ) + val = await caget(hdf5_test_prefix + ":HDFFileName", datatype=DBR_CHAR_STR) + assert val == "name.h5" + + await caput( + hdf5_test_prefix + ":HDFFileName", + test_filename, + wait=True, + timeout=TIMEOUT, + datatype=DBR_CHAR_STR, + ) + val = await caget(hdf5_test_prefix + ":HDFFileName", datatype=DBR_CHAR_STR) + assert val == test_filename + + val = await caget(hdf5_test_prefix + ":HDFFullFilePath", datatype=DBR_CHAR_STR) + assert val == "/".join([str(tmp_path), test_filename]) + + # Only a single FrameData in the example data + assert await caget(hdf5_test_prefix + ":NumCapture") == 0 + await caput( + hdf5_test_prefix + ":NumCapture", num_capture, wait=True, timeout=TIMEOUT + ) + assert await caget(hdf5_test_prefix + ":NumCapture") == num_capture + + # The queue expects to see Capturing go 0 -> 1 -> 0 as Capture is enabled + # and subsequently finishes + capturing_queue: asyncio.Queue = asyncio.Queue() + m_capturing_queue = camonitor( + hdf5_test_prefix + ":Capturing", + capturing_queue.put, + ) + + # Initially Capturing should be 0 + assert await capturing_queue.get() == 0 + + # Initially Status should be "OK" + val = await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) + assert val == "OK" + + await caput(hdf5_test_prefix + ":Capture", 1, wait=True, timeout=TIMEOUT) + assert await capturing_queue.get() == 1 + + # The HDF5 data will be processed, and when it's done Capturing is set to 0 + assert await asyncio.wait_for(capturing_queue.get(), timeout=TIMEOUT) == 0 + + m_capturing_queue.close() + + await asyncio.sleep(1) + # Capture should have closed by itself + assert await caget(hdf5_test_prefix + ":Capture") == 0 + + val = await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) + assert val == "Finished capture" + + # We received all 10000 frames even if we asked to capture fewer. + assert await caget(hdf5_test_prefix + ":NumReceived") == 10000 + # Confirm file contains data we expect + with h5py.File(tmp_path / test_filename, "r") as hdf_file: + assert list(hdf_file) == [ + "COUNTER1.OUT.Max", + "COUNTER1.OUT.Mean", + "COUNTER1.OUT.Min", + "COUNTER2.OUT.Mean", + "COUNTER3.OUT.Value", + "PCAP.BITS2.Value", + "PCAP.SAMPLES.Value", + "PCAP.TS_START.Value", + ] + + # No data since we didn't receive an okay EndReason + assert len(hdf_file["/COUNTER1.OUT.Max"]) == 0 + + assert ( + await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) + == "Finished capture" + ) + + +@pytest_asyncio.fixture +def differently_sized_framedata(): + yield [ + ReadyData(), + StartData(DUMP_FIELDS, 0, "Scaled", "Framed", 52), + FrameData( + numpy.array( + [ + [0, 1, 1, 3, 5.6e-08, 1, 2], + [0, 2, 2, 6, 0.010000056, 2, 4], + [8, 3, 3, 9, 0.020000056, 3, 6], + [8, 4, 4, 12, 0.030000056, 4, 8], + [8, 5, 5, 15, 0.040000056, 5, 10], + [8, 6, 6, 18, 0.050000056, 6, 12], + [8, 7, 7, 21, 0.060000056, 7, 14], + [8, 8, 8, 24, 0.070000056, 8, 16], + [8, 9, 9, 27, 0.080000056, 9, 18], + [8, 10, 10, 30, 0.090000056, 10, 20], + ] + ) + ), + FrameData( + numpy.array( + [ + [0, 11, 11, 33, 0.100000056, 11, 22], + [8, 12, 12, 36, 0.110000056, 12, 24], + [8, 13, 13, 39, 0.120000056, 13, 26], + [8, 14, 14, 42, 0.130000056, 14, 28], + [8, 15, 15, 45, 0.140000056, 15, 30], + [8, 16, 16, 48, 0.150000056, 16, 32], + [8, 17, 17, 51, 0.160000056, 17, 34], + [8, 18, 18, 54, 0.170000056, 18, 36], + [8, 19, 19, 57, 0.180000056, 19, 38], + [0, 20, 20, 60, 0.190000056, 20, 40], + [8, 21, 21, 63, 0.200000056, 21, 42], + ] + ) + ), + FrameData( + numpy.array( + [ + [8, 22, 22, 66, 0.210000056, 22, 44], + [8, 23, 23, 69, 0.220000056, 23, 46], + [8, 24, 24, 72, 0.230000056, 24, 48], + [8, 25, 25, 75, 0.240000056, 25, 50], + [8, 26, 26, 78, 0.250000056, 26, 52], + [8, 27, 27, 81, 0.260000056, 27, 54], + [8, 28, 28, 84, 0.270000056, 28, 56], + [0, 29, 29, 87, 0.280000056, 29, 58], + [8, 30, 30, 90, 0.290000056, 30, 60], + [8, 31, 31, 93, 0.300000056, 31, 62], + ] + ) + ), + FrameData( + numpy.array( + [ + [8, 32, 32, 96, 0.310000056, 32, 64], + [8, 33, 33, 99, 0.320000056, 33, 66], + [8, 34, 34, 102, 0.330000056, 34, 68], + [8, 35, 35, 105, 0.340000056, 35, 70], + [8, 36, 36, 108, 0.350000056, 36, 72], + [8, 37, 37, 111, 0.360000056, 37, 74], + [0, 38, 38, 114, 0.370000056, 38, 76], + [8, 39, 39, 117, 0.380000056, 39, 78], + [8, 40, 40, 120, 0.390000056, 40, 80], + [8, 41, 41, 123, 0.400000056, 41, 82], + ] + ) + ), + FrameData( + numpy.array( + [ + [8, 42, 42, 126, 0.410000056, 42, 84], + [8, 43, 43, 129, 0.420000056, 43, 86], + [8, 44, 44, 132, 0.430000056, 44, 88], + [8, 45, 45, 135, 0.440000056, 45, 90], + [8, 46, 46, 138, 0.450000056, 46, 92], + [0, 47, 47, 141, 0.460000056, 47, 94], + [8, 48, 48, 144, 0.470000056, 48, 96], + [8, 49, 49, 147, 0.480000056, 49, 98], + [8, 50, 50, 150, 0.490000056, 50, 100], + [8, 51, 51, 153, 0.500000056, 51, 102], + ] + ) + ), + FrameData( + numpy.array( + [ + [8, 52, 52, 156, 0.510000056, 52, 104], + [8, 53, 53, 159, 0.520000056, 53, 106], + [8, 54, 54, 162, 0.530000056, 54, 108], + [8, 55, 55, 165, 0.540000056, 55, 110], + [0, 56, 56, 168, 0.550000056, 56, 112], + [8, 57, 57, 171, 0.560000056, 57, 114], + [8, 58, 58, 174, 0.570000056, 58, 116], + ] + ) + ), + EndData(58, EndReason.OK), ] - assert len(hdf_file["/COUNTER1.OUT.Max"]) == num_capture + +def test_hdf_buffer_last_n(differently_sized_framedata, tmp_path): + filepath = str(tmp_path / "test_file.h5") + capturing_output = [] + status_output = [] + num_received_output = [] + buffer = HDF5Buffer( + CaptureMode.LAST_N, + filepath, + 21, + status_output.append, + capturing_output.append, + num_received_output.append, + ) + buffer.put_data_to_file = lambda x: ... + + for data in differently_sized_framedata: + buffer.handle_data(data) + + assert buffer.number_of_received_rows == 58 + assert buffer.number_of_rows_in_circular_buffer == 21 + + expected_cut_off_data = deque( + [ + FrameData( + numpy.array( + [ + [0, 38, 38, 114, 0.370000056, 38, 76], + [8, 39, 39, 117, 0.380000056, 39, 78], + [8, 40, 40, 120, 0.390000056, 40, 80], + [8, 41, 41, 123, 0.400000056, 41, 82], + ] + ) + ), + FrameData( + numpy.array( + [ + [8, 42, 42, 126, 0.410000056, 42, 84], + [8, 43, 43, 129, 0.420000056, 43, 86], + [8, 44, 44, 132, 0.430000056, 44, 88], + [8, 45, 45, 135, 0.440000056, 45, 90], + [8, 46, 46, 138, 0.450000056, 46, 92], + [0, 47, 47, 141, 0.460000056, 47, 94], + [8, 48, 48, 144, 0.470000056, 48, 96], + [8, 49, 49, 147, 0.480000056, 49, 98], + [8, 50, 50, 150, 0.490000056, 50, 100], + [8, 51, 51, 153, 0.500000056, 51, 102], + ] + ) + ), + FrameData( + numpy.array( + [ + [8, 52, 52, 156, 0.510000056, 52, 104], + [8, 53, 53, 159, 0.520000056, 53, 106], + [8, 54, 54, 162, 0.530000056, 54, 108], + [8, 55, 55, 165, 0.540000056, 55, 110], + [0, 56, 56, 168, 0.550000056, 56, 112], + [8, 57, 57, 171, 0.560000056, 57, 114], + [8, 58, 58, 174, 0.570000056, 58, 116], + ] + ) + ), + ] + ) + + for output_data, expected_data in zip( + buffer.circular_buffer, expected_cut_off_data + ): + numpy.testing.assert_array_equal(output_data.data, expected_data.data) def test_hdf_parameter_validate_not_capturing(hdf5_controller: HDF5RecordController): @@ -498,7 +885,7 @@ async def mock_data(scaled, flush_period): yield item # Set up all the mocks - hdf5_controller._get_filename = MagicMock( # type: ignore + hdf5_controller._get_filepath = MagicMock( # type: ignore return_value="Some/Filepath" ) hdf5_controller._client.data = mock_data # type: ignore @@ -538,7 +925,7 @@ async def mock_data(scaled, flush_period): yield item # Set up all the mocks - hdf5_controller._get_filename = MagicMock( # type: ignore + hdf5_controller._get_filepath = MagicMock( # type: ignore return_value="Some/Filepath" ) hdf5_controller._client.data = mock_data # type: ignore @@ -607,7 +994,7 @@ async def mock_data(scaled, flush_period): yield item # Set up all the mocks - hdf5_controller._get_filename = MagicMock( # type: ignore + hdf5_controller._get_filepath = MagicMock( # type: ignore return_value="Some/Filepath" ) hdf5_controller._client.data = mock_data # type: ignore @@ -669,7 +1056,7 @@ async def mock_data(scaled, flush_period): raise CancelledError # Set up all the mocks - hdf5_controller._get_filename = MagicMock( # type: ignore + hdf5_controller._get_filepath = MagicMock( # type: ignore return_value="Some/Filepath" ) hdf5_controller._client.data = mock_data # type: ignore @@ -724,7 +1111,7 @@ async def mock_data(scaled, flush_period): raise Exception("Test exception") # Set up all the mocks - hdf5_controller._get_filename = MagicMock( # type: ignore + hdf5_controller._get_filepath = MagicMock( # type: ignore return_value="Some/Filepath" ) hdf5_controller._client.data = mock_data # type: ignore @@ -789,13 +1176,13 @@ async def test_capture_on_update_cancel_unexpected_task( task_mock.cancel.assert_called_once() -def test_hdf_get_filename( +def test_hdf_get_filepath( hdf5_controller: HDF5RecordController, ): - """Test _get_filename works when all records have valid values""" + """Test _get_filepath works when all records have valid values""" - hdf5_controller._file_path_record = MagicMock() - hdf5_controller._file_path_record.get = MagicMock( # type: ignore + hdf5_controller._directory_record = MagicMock() + hdf5_controller._directory_record.get = MagicMock( # type: ignore return_value="/some/path" ) @@ -804,14 +1191,14 @@ def test_hdf_get_filename( return_value="some_filename" ) - assert hdf5_controller._get_filename() == "/some/path/some_filename" + assert hdf5_controller._get_filepath() == "/some/path/some_filename" def test_hdf_capture_validate_valid_filename( hdf5_controller: HDF5RecordController, ): """Test _capture_validate passes when a valid filename is given""" - hdf5_controller._get_filename = MagicMock( # type: ignore + hdf5_controller._get_filepath = MagicMock( # type: ignore return_value="/valid/file.h5" ) @@ -829,7 +1216,7 @@ def test_hdf_capture_validate_invalid_filename( hdf5_controller: HDF5RecordController, ): """Test _capture_validate fails when filename cannot be created""" - hdf5_controller._get_filename = MagicMock( # type: ignore + hdf5_controller._get_filepath = MagicMock( # type: ignore side_effect=ValueError("Mocked value error") ) @@ -840,7 +1227,7 @@ def test_hdf_capture_validate_exception( hdf5_controller: HDF5RecordController, ): """Test _capture_validate fails due to other exceptions""" - hdf5_controller._get_filename = MagicMock( # type: ignore + hdf5_controller._get_filepath = MagicMock( # type: ignore side_effect=Exception("Mocked error") ) From 81a606d30574c8a52cc040d7d9640cbf8b979040 Mon Sep 17 00:00:00 2001 From: Eva Lott Date: Thu, 7 Dec 2023 16:04:27 +0000 Subject: [PATCH 08/12] Changed `add_pvi_info` to `add_automatic_pvi_info` Changed `add_pvi_info` to `add_automatic_pvi_info` and seperated out outliar pvi info PVs (e.g `PCAP:ARM`) to their own functions. Made slight changes to the PVs in `DATA:`. --- src/pandablocks_ioc/_hdf_ioc.py | 50 ++++--------- src/pandablocks_ioc/_pvi.py | 71 ++++++++++++++---- src/pandablocks_ioc/ioc.py | 14 ++-- tests/test-bobfiles/DATA.bob | 126 ++++++++++++++++++++++++++++++-- tests/test_hdf_ioc.py | 58 +-------------- 5 files changed, 199 insertions(+), 120 deletions(-) diff --git a/src/pandablocks_ioc/_hdf_ioc.py b/src/pandablocks_ioc/_hdf_ioc.py index 1ba39c6f..d2d2a861 100644 --- a/src/pandablocks_ioc/_hdf_ioc.py +++ b/src/pandablocks_ioc/_hdf_ioc.py @@ -19,7 +19,7 @@ from softioc import alarm, builder from softioc.pythonSoftIoc import RecordWrapper -from ._pvi import PviGroup, add_pvi_info +from ._pvi import PviGroup, add_automatic_pvi_info, add_data_capture_pvi_info from ._types import ONAM_STR, ZNAM_STR, EpicsName HDFReceived = Union[ReadyData, StartData, FrameData, EndData] @@ -55,7 +55,6 @@ def __init__( filepath: str, number_of_rows_to_capture: int, status_message_setter: Callable, - capturing_record_setter: Callable, number_received_setter: Callable, ): # Only one filename - user must stop capture and set new FileName/FilePath @@ -65,7 +64,6 @@ def __init__( self.filepath = filepath self.number_of_rows_to_capture = number_of_rows_to_capture self.status_message_setter = status_message_setter - self.capturing_record_setter = capturing_record_setter self.number_received_setter = number_received_setter if ( @@ -242,13 +240,11 @@ def _handle_EndData(self, data: EndData): self.finish_capturing = True self.status_message_setter("Finished capture") - self.capturing_record_setter(0) self.put_data_to_file(data) def handle_data(self, data: HDFReceived): match data: case ReadyData(): - self.capturing_record_setter(1) self.status_message_setter("Starting capture") case StartData(): self._handle_StartData(data) @@ -278,7 +274,6 @@ class HDF5RecordController: _flush_period_record: RecordWrapper _capture_control_record: RecordWrapper # Turn capture on/off _status_message_record: RecordWrapper # Reports status and error messages - _currently_capturing_record: RecordWrapper # If HDF5 file currently being written _handle_hdf5_data_task: Optional[asyncio.Task] = None @@ -302,7 +297,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): validate=self._parameter_validate, on_update=self._update_full_file_path, ) - add_pvi_info( + add_automatic_pvi_info( PviGroup.HDF, self._directory_record, directory_record_name, @@ -320,7 +315,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): validate=self._parameter_validate, on_update=self._update_full_file_path, ) - add_pvi_info( + add_automatic_pvi_info( PviGroup.HDF, self._file_name_record, file_name_record_name, @@ -336,7 +331,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): length=path_length + 1 + filename_length, DESC="Full HDF5 file name with directory", ) - add_pvi_info( + add_automatic_pvi_info( PviGroup.HDF, self._full_file_path_record, full_file_path_record_name, @@ -354,7 +349,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): DRVL=0, ) - add_pvi_info( + add_automatic_pvi_info( PviGroup.CAPTURE, self._num_capture_record, num_capture_record_name, @@ -372,7 +367,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): DESC="Number of frames written to HDF file.", ) - add_pvi_info( + add_automatic_pvi_info( PviGroup.CAPTURE, self._num_received_record, num_received_record_name, @@ -389,7 +384,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): DESC="Frequency that data is flushed (seconds)", EGU="s", ) - add_pvi_info( + add_automatic_pvi_info( PviGroup.CAPTURE, self._flush_period_record, flush_period_record_name, @@ -408,11 +403,10 @@ def __init__(self, client: AsyncioClient, record_prefix: str): validate=self._capture_validate, DESC="Start/stop HDF5 capture", ) - add_pvi_info( + add_data_capture_pvi_info( PviGroup.CAPTURE, - self._capture_control_record, capture_control_record_name, - builder.boolOut, + self._capture_control_record, ) self._capture_control_record.add_alias( record_prefix + ":" + capture_control_record_name.upper() @@ -425,7 +419,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): initial_value=0, DESC="Choose how to hdf writer flushes", ) - add_pvi_info( + add_automatic_pvi_info( PviGroup.CAPTURE, self._capture_mode_record, capture_mode_record_name, @@ -442,7 +436,7 @@ def __init__(self, client: AsyncioClient, record_prefix: str): length=200, DESC="Reports current status of HDF5 capture", ) - add_pvi_info( + add_automatic_pvi_info( PviGroup.OUTPUTS, self._status_message_record, status_message_record_name, @@ -452,23 +446,6 @@ def __init__(self, client: AsyncioClient, record_prefix: str): record_prefix + ":" + status_message_record_name.upper() ) - currently_capturing_record_name = EpicsName(self._DATA_PREFIX + ":Capturing") - self._currently_capturing_record = builder.boolIn( - currently_capturing_record_name, - ZNAM=ZNAM_STR, - ONAM=ONAM_STR, - DESC="If HDF5 file is currently being written", - ) - add_pvi_info( - PviGroup.OUTPUTS, - self._currently_capturing_record, - currently_capturing_record_name, - builder.boolIn, - ) - self._currently_capturing_record.add_alias( - record_prefix + ":" + currently_capturing_record_name.upper() - ) - def _parameter_validate(self, record: RecordWrapper, new_val) -> bool: """Control when values can be written to parameter records (file name etc.) based on capturing record's value""" @@ -493,12 +470,12 @@ async def _handle_hdf5_data(self) -> None: # Set up the hdf buffer num_capture: int = self._num_capture_record.get() capture_mode: CaptureMode = CaptureMode(self._capture_mode_record.get()) + filepath = self._get_filepath() buffer = HDF5Buffer( capture_mode, - self._get_filepath(), + filepath, num_capture, self._status_message_record.set, - self._currently_capturing_record.set, self._num_received_record.set, ) buffer.start_pipeline() @@ -542,7 +519,6 @@ async def _handle_hdf5_data(self) -> None: buffer.stop_pipeline() self._num_received_record.set(buffer.number_of_received_rows) self._capture_control_record.set(0) - self._currently_capturing_record.set(0) def _get_filepath(self) -> str: """Create the file path for the HDF5 file from the relevant records""" diff --git a/src/pandablocks_ioc/_pvi.py b/src/pandablocks_ioc/_pvi.py index d84f3647..109fcb38 100644 --- a/src/pandablocks_ioc/_pvi.py +++ b/src/pandablocks_ioc/_pvi.py @@ -54,13 +54,64 @@ class PviInfo: component: Component -def add_pvi_info( +def add_pvi_info_to_record( + record: RecordWrapper, + record_name: EpicsName, + access: str, +): + block, field = record_name.split(":", maxsplit=1) + block_name_suffixed = f"pvi.{field.lower().replace(':', '_')}.{access}" + record.add_info( + "Q:group", + { + RecordName(f"{block}:PVI"): { + block_name_suffixed: { + "+channel": "NAME", + "+type": "plain", + "+trigger": block_name_suffixed, + } + } + }, + ) + + +def add_data_capture_pvi_info( + group: PviGroup, + data_capture_record_name: EpicsName, + data_capture_pvi_record: RecordWrapper, +): + component = SignalRW( + data_capture_record_name, + data_capture_record_name, + widget=ButtonPanel(actions=dict(Start=1, Stop=0)), + read_widget=LED(), + ) + add_pvi_info_to_record(data_capture_pvi_record, data_capture_record_name, "rw") + Pvi.add_pvi_info( + record_name=data_capture_record_name, group=group, component=component + ) + + +def add_pcap_arm_pvi_info(group: PviGroup, pcap_arm_pvi_record: RecordWrapper): + pcap_arm_record_name = EpicsName("PCAP:ARM") + component = SignalRW( + pcap_arm_record_name, + pcap_arm_record_name, + widget=ButtonPanel(actions=dict(Arm=1, Disarm=0)), + read_widget=LED(), + ) + add_pvi_info_to_record(pcap_arm_pvi_record, pcap_arm_record_name, "rw") + Pvi.add_pvi_info(record_name=pcap_arm_record_name, group=group, component=component) + + +def add_automatic_pvi_info( group: PviGroup, record: RecordWrapper, record_name: EpicsName, record_creation_func: Callable, ) -> None: - """Create the most common forms of the `PviInfo` structure""" + """Create the most common forms of the `PviInfo` structure. + Generates generic components from""" component: Component writeable: bool = record_creation_func in OUT_RECORD_FUNCTIONS useComboBox: bool = record_creation_func == builder.mbbOut @@ -105,20 +156,8 @@ def add_pvi_info( component = SignalR(name=pvi_name, pv=record_name, widget=TextRead()) access = "r" - block, field = record_name.split(":", maxsplit=1) - block_name_suffixed = f"pvi.{field.lower().replace(':', '_')}.{access}" - record.add_info( - "Q:group", - { - RecordName(f"{block}:PVI"): { - block_name_suffixed: { - "+channel": "NAME", - "+type": "plain", - "+trigger": block_name_suffixed, - } - } - }, - ) + + add_pvi_info_to_record(record, record_name, access) Pvi.add_pvi_info(record_name=record_name, group=group, component=component) diff --git a/src/pandablocks_ioc/ioc.py b/src/pandablocks_ioc/ioc.py index 148488f5..f65625a6 100644 --- a/src/pandablocks_ioc/ioc.py +++ b/src/pandablocks_ioc/ioc.py @@ -41,7 +41,13 @@ from softioc.pythonSoftIoc import RecordWrapper from ._hdf_ioc import HDF5RecordController -from ._pvi import Pvi, PviGroup, add_positions_table_row, add_pvi_info +from ._pvi import ( + Pvi, + PviGroup, + add_automatic_pvi_info, + add_pcap_arm_pvi_info, + add_positions_table_row, +) from ._tables import TableRecordWrapper, TableUpdater from ._types import ( ONAM_STR, @@ -652,7 +658,7 @@ def _create_record_info( record_name, *labels, *args, **extra_kwargs, **kwargs ) - add_pvi_info( + add_automatic_pvi_info( group=group, record=record, record_name=record_name, @@ -1727,9 +1733,7 @@ def create_block_records( DESC="Arm/Disarm the PandA", ) - add_pvi_info( - PviGroup.INPUTS, pcap_arm_record, EpicsName("PCAP:ARM"), builder.Action - ) + add_pcap_arm_pvi_info(PviGroup.INPUTS, pcap_arm_record) HDF5RecordController(self._client, self._record_prefix) diff --git a/tests/test-bobfiles/DATA.bob b/tests/test-bobfiles/DATA.bob index 46921f3d..22b8ae03 100644 --- a/tests/test-bobfiles/DATA.bob +++ b/tests/test-bobfiles/DATA.bob @@ -3,7 +3,7 @@ 0 0 426 - 413 + 388 4 4 @@ -197,17 +197,132 @@ Label - Capturing + DATA: Flush Period 0 - 25 + 50 + 250 + 20 + + + TextEntry + TEST_PREFIX:DATA:FlushPeriod + 255 + 50 + 125 + 20 + 1 + + + Label + DATA: Capture + 0 + 75 + 250 + 20 + + + WritePV + TEST_PREFIX:DATA:Capture + + + $(pv_name) + 1 + $(name) + + + Start + 255 + 75 + 38 + 20 + $(actions) + + + WritePV + TEST_PREFIX:DATA:Capture + + + $(pv_name) + 0 + $(name) + + + Stop + 298 + 75 + 38 + 20 + $(actions) + + + LED + TEST_PREFIX: + 350 + 75 + 20 + 20 + + + Label + DATA: Capture Mode + 0 + 100 + 250 + 20 + + + ComboBox + TEST_PREFIX:DATA:CaptureMode + 255 + 100 + 125 + 20 + + + Label + All Postion Capture Parameters + 0 + 125 + 250 + 20 + + + OpenDisplay + + + PandA_POSITIONS_TABLE.bob + tab + Open Display + + + All Postion Capture Parameters + 255 + 125 + 125 + 20 + $(actions) + + + + OUTPUTS + 5 + 327 + 416 + 56 + true + + Label + DATA: Status + 0 + 0 250 20 TextUpdate - TEST_PREFIX:DATA:Capturing + TEST_PREFIX:DATA:Status 255 - 25 + 0 125 20 @@ -215,6 +330,7 @@ 1 + 6 diff --git a/tests/test_hdf_ioc.py b/tests/test_hdf_ioc.py index d5fd7aa2..07959625 100644 --- a/tests/test_hdf_ioc.py +++ b/tests/test_hdf_ioc.py @@ -12,7 +12,7 @@ import numpy import pytest import pytest_asyncio -from aioca import DBR_CHAR_STR, CANothing, caget, camonitor, caput +from aioca import DBR_CHAR_STR, CANothing, caget, caput from fixtures.mocked_panda import ( TIMEOUT, MockedAsyncioClient, @@ -340,9 +340,6 @@ async def test_hdf5_ioc(hdf5_subprocess_ioc): val = await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) assert val == "OK" - val = await caget(hdf5_test_prefix + ":Capturing") - assert val == 0 - async def test_hdf5_ioc_parameter_validate_works( hdf5_subprocess_ioc_no_logging_check, tmp_path @@ -430,25 +427,8 @@ async def test_hdf5_file_writing_first_n( ) assert await caget(hdf5_test_prefix + ":NumCapture") == num_capture - # The queue expects to see Capturing go 0 -> 1 -> 0 as Capture is enabled - # and subsequently finishes - capturing_queue: asyncio.Queue = asyncio.Queue() - m = camonitor( - hdf5_test_prefix + ":Capturing", - capturing_queue.put, - ) - - # Initially Capturing should be 0 - assert await capturing_queue.get() == 0 - await caput(hdf5_test_prefix + ":Capture", 1, wait=True, timeout=TIMEOUT) assert await caget(hdf5_test_prefix + ":NumReceived") <= num_capture - assert await capturing_queue.get() == 1 - - # The HDF5 data will be processed, and when it's done Capturing is set to 0 - assert await asyncio.wait_for(capturing_queue.get(), timeout=TIMEOUT) == 0 - - m.close() # Capture should have closed by itself assert await caget(hdf5_test_prefix + ":Capture") == 0 @@ -525,24 +505,7 @@ async def test_hdf5_file_writing_forever(hdf5_subprocess_ioc, tmp_path: Path, ca # Since we're in forever mode it shouldn't matter what num_capture is assert await caget(hdf5_test_prefix + ":NumCapture") == num_capture - # The queue expects to see Capturing go 0 -> 1 -> 0 as Capture is enabled - # and subsequently finishes - capturing_queue: asyncio.Queue = asyncio.Queue() - m = camonitor( - hdf5_test_prefix + ":Capturing", - capturing_queue.put, - ) - - # Initially Capturing should be 0 - assert await capturing_queue.get() == 0 - await caput(hdf5_test_prefix + ":Capture", 1, wait=True, timeout=TIMEOUT) - assert await capturing_queue.get() == 1 - - # The HDF5 data will be processed, and when it's done Capturing is set to 0 - assert await asyncio.wait_for(capturing_queue.get(), timeout=TIMEOUT) == 0 - - m.close() # The test panda writes 10000 rows before the capture is finished assert await caget(hdf5_test_prefix + ":NumReceived") == 10000 @@ -618,28 +581,11 @@ async def test_hdf5_file_writing_last_n( ) assert await caget(hdf5_test_prefix + ":NumCapture") == num_capture - # The queue expects to see Capturing go 0 -> 1 -> 0 as Capture is enabled - # and subsequently finishes - capturing_queue: asyncio.Queue = asyncio.Queue() - m_capturing_queue = camonitor( - hdf5_test_prefix + ":Capturing", - capturing_queue.put, - ) - - # Initially Capturing should be 0 - assert await capturing_queue.get() == 0 - # Initially Status should be "OK" val = await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) assert val == "OK" await caput(hdf5_test_prefix + ":Capture", 1, wait=True, timeout=TIMEOUT) - assert await capturing_queue.get() == 1 - - # The HDF5 data will be processed, and when it's done Capturing is set to 0 - assert await asyncio.wait_for(capturing_queue.get(), timeout=TIMEOUT) == 0 - - m_capturing_queue.close() await asyncio.sleep(1) # Capture should have closed by itself @@ -777,7 +723,6 @@ def differently_sized_framedata(): def test_hdf_buffer_last_n(differently_sized_framedata, tmp_path): filepath = str(tmp_path / "test_file.h5") - capturing_output = [] status_output = [] num_received_output = [] buffer = HDF5Buffer( @@ -785,7 +730,6 @@ def test_hdf_buffer_last_n(differently_sized_framedata, tmp_path): filepath, 21, status_output.append, - capturing_output.append, num_received_output.append, ) buffer.put_data_to_file = lambda x: ... From a0221c8c809657a1f14ca53deb693af09edb93c3 Mon Sep 17 00:00:00 2001 From: Eva Lott Date: Thu, 14 Dec 2023 11:00:08 +0000 Subject: [PATCH 09/12] Added a NumWritten PV Added a number written record the setter of which is passed into the client. --- src/pandablocks_ioc/_hdf_ioc.py | 27 +++++++++++++++-- tests/test-bobfiles/DATA.bob | 51 +++++++++++++++++++++++---------- tests/test_hdf_ioc.py | 2 ++ 3 files changed, 63 insertions(+), 17 deletions(-) diff --git a/src/pandablocks_ioc/_hdf_ioc.py b/src/pandablocks_ioc/_hdf_ioc.py index d2d2a861..262b0251 100644 --- a/src/pandablocks_ioc/_hdf_ioc.py +++ b/src/pandablocks_ioc/_hdf_ioc.py @@ -56,6 +56,7 @@ def __init__( number_of_rows_to_capture: int, status_message_setter: Callable, number_received_setter: Callable, + number_captured_setter: Callable, ): # Only one filename - user must stop capture and set new FileName/FilePath # for new files @@ -65,6 +66,7 @@ def __init__( self.number_of_rows_to_capture = number_of_rows_to_capture self.status_message_setter = status_message_setter self.number_received_setter = number_received_setter + self.number_captured_setter = number_captured_setter if ( self.capture_mode == CaptureMode.LAST_N @@ -79,7 +81,9 @@ def put_data_to_file(self, data: HDFReceived): logging.exception(f"Failed to save the data to HDF5 file: {ex}") def start_pipeline(self): - self.pipeline = create_default_pipeline(iter([self.filepath])) + self.pipeline = create_default_pipeline( + iter([self.filepath]), number_captured_setter=self.number_captured_setter + ) def get_written_data_size(self): return min([ds.size for ds in self.pipeline[1].datasets]) @@ -271,6 +275,7 @@ class HDF5RecordController: _file_number_record: RecordWrapper _file_format_record: RecordWrapper _num_capture_record: RecordWrapper + _num_captured_record: RecordWrapper _flush_period_record: RecordWrapper _capture_control_record: RecordWrapper # Turn capture on/off _status_message_record: RecordWrapper # Reports status and error messages @@ -360,11 +365,28 @@ def __init__(self, client: AsyncioClient, record_prefix: str): record_prefix + ":" + num_capture_record_name.upper() ) + num_captured_record_name = EpicsName(self._DATA_PREFIX + ":NumCaptured") + self._num_captured_record = builder.longIn( + num_captured_record_name, + initial_value=0, + DESC="Number of frames written to file.", + ) + + add_automatic_pvi_info( + PviGroup.CAPTURE, + self._num_captured_record, + num_captured_record_name, + builder.longIn, + ) + self._num_captured_record.add_alias( + record_prefix + ":" + num_captured_record_name.upper() + ) + num_received_record_name = EpicsName(self._DATA_PREFIX + ":NumReceived") self._num_received_record = builder.longIn( num_received_record_name, initial_value=0, - DESC="Number of frames written to HDF file.", + DESC="Number of frames received from panda.", ) add_automatic_pvi_info( @@ -477,6 +499,7 @@ async def _handle_hdf5_data(self) -> None: num_capture, self._status_message_record.set, self._num_received_record.set, + self._num_captured_record.set, ) buffer.start_pipeline() diff --git a/tests/test-bobfiles/DATA.bob b/tests/test-bobfiles/DATA.bob index 22b8ae03..505e927c 100644 --- a/tests/test-bobfiles/DATA.bob +++ b/tests/test-bobfiles/DATA.bob @@ -3,7 +3,7 @@ 0 0 426 - 388 + 413 4 4 @@ -155,7 +155,7 @@ 5 141 416 - 181 + 206 true Label @@ -176,7 +176,7 @@ Label - DATA: Num Received + DATA: Num Captured 0 25 250 @@ -184,7 +184,7 @@ TextUpdate - TEST_PREFIX:DATA:NumReceived + TEST_PREFIX:DATA:NumCaptured 255 25 125 @@ -197,17 +197,38 @@ Label - DATA: Flush Period + DATA: Num Received 0 50 250 20 + + TextUpdate + TEST_PREFIX:DATA:NumReceived + 255 + 50 + 125 + 20 + + + + + 1 + + + Label + DATA: Flush Period + 0 + 75 + 250 + 20 + TextEntry TEST_PREFIX:DATA:FlushPeriod 255 - 50 + 75 125 20 1 @@ -216,7 +237,7 @@ Label DATA: Capture 0 - 75 + 100 250 20 @@ -232,7 +253,7 @@ Start 255 - 75 + 100 38 20 $(actions) @@ -249,7 +270,7 @@ Stop 298 - 75 + 100 38 20 $(actions) @@ -258,7 +279,7 @@ LED TEST_PREFIX: 350 - 75 + 100 20 20 @@ -266,7 +287,7 @@ Label DATA: Capture Mode 0 - 100 + 125 250 20 @@ -274,7 +295,7 @@ ComboBox TEST_PREFIX:DATA:CaptureMode 255 - 100 + 125 125 20 @@ -282,7 +303,7 @@ Label All Postion Capture Parameters 0 - 125 + 150 250 20 @@ -297,7 +318,7 @@ All Postion Capture Parameters 255 - 125 + 150 125 20 $(actions) @@ -306,7 +327,7 @@ OUTPUTS 5 - 327 + 352 416 56 true diff --git a/tests/test_hdf_ioc.py b/tests/test_hdf_ioc.py index 07959625..98c969ef 100644 --- a/tests/test_hdf_ioc.py +++ b/tests/test_hdf_ioc.py @@ -725,12 +725,14 @@ def test_hdf_buffer_last_n(differently_sized_framedata, tmp_path): filepath = str(tmp_path / "test_file.h5") status_output = [] num_received_output = [] + num_captured_output = [] buffer = HDF5Buffer( CaptureMode.LAST_N, filepath, 21, status_output.append, num_received_output.append, + num_captured_output.append, ) buffer.put_data_to_file = lambda x: ... From 91870165310e98c4ee4a8f5fd7b8e84525969f72 Mon Sep 17 00:00:00 2001 From: Eva Lott Date: Thu, 14 Dec 2023 14:04:43 +0000 Subject: [PATCH 10/12] Fixed bugs in the hdf writer Fixed bugs in HDF writing for last_n. Fixed bug where thread.start would be called multiple times for `NumCapturedSetter`. Added a test with larger data. Pinned pvxs in pyproject.toml, this will ideally be done in softioc itself soon. --- src/pandablocks_ioc/_hdf_ioc.py | 141 ++++++++++++------- tests/fixtures/mocked_panda.py | 5 +- tests/test_hdf_ioc.py | 242 +++++++++++++++++--------------- 3 files changed, 218 insertions(+), 170 deletions(-) diff --git a/src/pandablocks_ioc/_hdf_ioc.py b/src/pandablocks_ioc/_hdf_ioc.py index 262b0251..034adae0 100644 --- a/src/pandablocks_ioc/_hdf_ioc.py +++ b/src/pandablocks_ioc/_hdf_ioc.py @@ -11,6 +11,7 @@ from pandablocks.hdf import ( EndData, FrameData, + Pipeline, StartData, create_default_pipeline, stop_pipeline, @@ -41,12 +42,23 @@ class CaptureMode(Enum): FOREVER = 2 +class NumCapturedSetter(Pipeline): + def __init__(self, number_captured_setter: Callable) -> None: + self.number_captured_setter = number_captured_setter + self.number_captured_setter(0) + super().__init__() + + self.what_to_do = {int: self.set_record} + + def set_record(self, value: int): + self.number_captured_setter(value) + + class HDF5Buffer: _buffer_index = None start_data = None number_of_received_rows = 0 finish_capturing = False - circular_buffer: Deque[FrameData] = deque() number_of_rows_in_circular_buffer = 0 def __init__( @@ -56,17 +68,30 @@ def __init__( number_of_rows_to_capture: int, status_message_setter: Callable, number_received_setter: Callable, - number_captured_setter: Callable, + number_captured_setter_pipeline: NumCapturedSetter, ): # Only one filename - user must stop capture and set new FileName/FilePath # for new files + self.circular_buffer: Deque[FrameData] = deque() self.capture_mode = capture_mode + + match capture_mode: + case CaptureMode.FIRST_N: + self._handle_FrameData = self._capture_first_n + case CaptureMode.LAST_N: + self._handle_FrameData = self._capture_last_n + case CaptureMode.FOREVER: + self._handle_FrameData = self._capture_forever + case _: + raise RuntimeError("Invalid capture mode") + self.filepath = filepath self.number_of_rows_to_capture = number_of_rows_to_capture self.status_message_setter = status_message_setter self.number_received_setter = number_received_setter - self.number_captured_setter = number_captured_setter + self.number_captured_setter_pipeline = number_captured_setter_pipeline + self.number_captured_setter_pipeline.number_captured_setter(0) if ( self.capture_mode == CaptureMode.LAST_N @@ -74,6 +99,12 @@ def __init__( ): raise RuntimeError("Number of rows to capture must be > 0 on LAST_N mode") + self.start_pipeline() + + def __del__(self): + if self.pipeline[0].is_alive(): + stop_pipeline(self.pipeline) + def put_data_to_file(self, data: HDFReceived): try: self.pipeline[0].queue.put_nowait(data) @@ -82,15 +113,9 @@ def put_data_to_file(self, data: HDFReceived): def start_pipeline(self): self.pipeline = create_default_pipeline( - iter([self.filepath]), number_captured_setter=self.number_captured_setter + iter([self.filepath]), self.number_captured_setter_pipeline ) - def get_written_data_size(self): - return min([ds.size for ds in self.pipeline[1].datasets]) - - def stop_pipeline(self): - stop_pipeline(self.pipeline) - def _handle_StartData(self, data: StartData): if self.start_data and data != self.start_data: # PandA was disarmed, had config changed, and rearmed. @@ -111,12 +136,19 @@ def _handle_StartData(self, data: StartData): self.finish_capturing = True - if self.start_data is None: - # Only pass StartData to pipeline if we haven't previously - # - if we have there will already be an in-progress HDF file - # that we should just append data to + # Only pass StartData to pipeline if we haven't previously + else: + # In LAST_N mode, wait till the end of capture to write + # the StartData to file. + # In FOREVER mode write the StartData to file if it's the first received. + if ( + self.capture_mode == CaptureMode.FIRST_N + or self.capture_mode == CaptureMode.FOREVER + and not self.start_data + ): + self.put_data_to_file(data) + self.start_data = data - self.put_data_to_file(data) def _capture_first_n(self, data: FrameData): """ @@ -180,14 +212,15 @@ def _capture_last_n(self, data: FrameData): first_frame_data = self.circular_buffer.popleft() first_frame_data_length = len(first_frame_data.data) - if first_frame_data_length >= self.number_of_rows_to_capture: + if first_frame_data_length > self.number_of_rows_to_capture: # More data than we want to capture, all in a single FrameData # We can just slice with the NumCapture since this has to be the # only FrameData in the buffer at this point assert len(self.circular_buffer) == 0 - first_frame_data.data = first_frame_data.data[ + shrinked_data = first_frame_data.data[ -self.number_of_rows_to_capture : ].copy() + first_frame_data.data = shrinked_data self.circular_buffer.appendleft(first_frame_data) self.number_of_rows_in_circular_buffer = self.number_of_rows_to_capture elif ( @@ -201,9 +234,8 @@ def _capture_last_n(self, data: FrameData): self.number_of_rows_in_circular_buffer - self.number_of_rows_to_capture ) - first_frame_data.data = first_frame_data.data[ - indices_to_discard: - ].copy() + shrinked_data = first_frame_data.data[indices_to_discard:].copy() + first_frame_data.data = shrinked_data self.circular_buffer.appendleft(first_frame_data) self.number_of_rows_in_circular_buffer -= indices_to_discard assert ( @@ -217,40 +249,48 @@ def _capture_last_n(self, data: FrameData): self.number_received_setter(self.number_of_received_rows) - def _handle_FrameData(self, data: FrameData): + def _handle_EndData(self, data: EndData): match self.capture_mode: - case CaptureMode.FIRST_N: - self._capture_first_n(data) case CaptureMode.LAST_N: - self._capture_last_n(data) - case CaptureMode.FOREVER: - self._capture_forever(data) - - def _handle_EndData(self, data: EndData): - if self.capture_mode == CaptureMode.LAST_N: - # Put all the data to file - self.status_message_setter( - "Finishing capture, writing buffered frames to file" - ) - # In LAST_N only write FrameData if the EndReason is OK - if data.reason == EndReason.OK: + # In LAST_N only write FrameData if the EndReason is OK + if data.reason not in (EndReason.OK, EndReason.MANUALLY_STOPPED): + self.status_message_setter( + f"Stopped capturing with reason {data.reason}, " + "skipping writing of buffered frames" + ) + self.finish_capturing = True + return + + self.status_message_setter( + "Finishing capture, writing buffered frames to file" + ) + self.put_data_to_file(self.start_data) for frame_data in self.circular_buffer: self.put_data_to_file(frame_data) - if self.capture_mode == CaptureMode.FOREVER: - self.start_data = None - self.status_message_setter("Finished capture, waiting for next ReadyData") - else: - self.finish_capturing = True - self.status_message_setter("Finished capture") + case CaptureMode.FOREVER: + if data.reason != EndReason.MANUALLY_STOPPED: + self.status_message_setter( + "Finished capture, waiting for next ReadyData" + ) + return + + case CaptureMode.FIRST_N: + pass # Frames will have already been written in FirstN + + case _: + raise RuntimeError("Unknown capture mode") + self.status_message_setter("Finished capture") + self.finish_capturing = True self.put_data_to_file(data) def handle_data(self, data: HDFReceived): match data: case ReadyData(): - self.status_message_setter("Starting capture") + pass case StartData(): + self.status_message_setter("Starting capture") self._handle_StartData(data) case FrameData(): self._handle_FrameData(data) @@ -259,7 +299,7 @@ def handle_data(self, data: HDFReceived): case _: raise RuntimeError( f"Data was recieved that was of type {type(data)}, not" - "StartData, EndData, ReadyData or FrameData" + "StartData, EndData, ReadyData, or FrameData" ) @@ -493,16 +533,18 @@ async def _handle_hdf5_data(self) -> None: num_capture: int = self._num_capture_record.get() capture_mode: CaptureMode = CaptureMode(self._capture_mode_record.get()) filepath = self._get_filepath() + + number_captured_setter_pipeline = NumCapturedSetter( + self._num_captured_record.set + ) buffer = HDF5Buffer( capture_mode, filepath, num_capture, self._status_message_record.set, self._num_received_record.set, - self._num_captured_record.set, + number_captured_setter_pipeline, ) - buffer.start_pipeline() - flush_period: float = self._flush_period_record.get() async for data in self._client.data( scaled=False, flush_period=flush_period @@ -518,9 +560,9 @@ async def _handle_hdf5_data(self) -> None: self._status_message_record.set("Capturing disabled") # Only send EndData if we know the file was opened - could be cancelled # before PandA has actually send any data - if buffer.start_data: + if buffer.capture_mode != CaptureMode.LAST_N: buffer.put_data_to_file( - EndData(buffer.number_of_received_rows, EndReason.OK) + EndData(buffer.number_of_received_rows, EndReason.MANUALLY_STOPPED) ) except Exception: @@ -532,14 +574,13 @@ async def _handle_hdf5_data(self) -> None: ) # Only send EndData if we know the file was opened - exception could happen # before file was opened - if buffer.start_data: + if buffer.start_data and buffer.capture_mode != CaptureMode.LAST_N: buffer.put_data_to_file( EndData(buffer.number_of_received_rows, EndReason.UNKNOWN_EXCEPTION) ) finally: logging.debug("Finishing processing HDF5 PandA data") - buffer.stop_pipeline() self._num_received_record.set(buffer.number_of_received_rows) self._capture_control_record.set(0) diff --git a/tests/fixtures/mocked_panda.py b/tests/fixtures/mocked_panda.py index 495e2944..7c2b9c0c 100644 --- a/tests/fixtures/mocked_panda.py +++ b/tests/fixtures/mocked_panda.py @@ -227,15 +227,12 @@ async def data( flush_every_frame = flush_period is None conn = DataConnection() conn.connect(scaled) - try: - f = open(Path(__file__).parent.parent / "raw_dump.txt", "rb") + with open(Path(__file__).parent.parent / "raw_dump.txt", "rb") as f: for raw in chunked_read(f, 200000): for data in conn.receive_bytes( raw, flush_every_frame=flush_every_frame ): yield data - finally: - f.close() def get_multiprocessing_context(): diff --git a/tests/test_hdf_ioc.py b/tests/test_hdf_ioc.py index 98c969ef..1d6d8e6a 100644 --- a/tests/test_hdf_ioc.py +++ b/tests/test_hdf_ioc.py @@ -35,7 +35,12 @@ ) from softioc import asyncio_dispatcher, builder, softioc -from pandablocks_ioc._hdf_ioc import CaptureMode, HDF5Buffer, HDF5RecordController +from pandablocks_ioc._hdf_ioc import ( + CaptureMode, + HDF5Buffer, + HDF5RecordController, + NumCapturedSetter, +) NAMESPACE_PREFIX = "HDF-RECORD-PREFIX" @@ -208,7 +213,7 @@ def fast_dump_expected(): [8, 58, 58, 174, 0.570000056, 58, 116], ) ), - EndData(58, EndReason.DISARMED), + EndData(58, EndReason.OK), ] @@ -430,10 +435,12 @@ async def test_hdf5_file_writing_first_n( await caput(hdf5_test_prefix + ":Capture", 1, wait=True, timeout=TIMEOUT) assert await caget(hdf5_test_prefix + ":NumReceived") <= num_capture + await asyncio.sleep(1) # Capture should have closed by itself assert await caget(hdf5_test_prefix + ":Capture") == 0 assert await caget(hdf5_test_prefix + ":NumReceived") == num_capture + assert await caget(hdf5_test_prefix + ":NumCaptured") == num_capture # Confirm file contains data we expect with h5py.File(tmp_path / test_filename, "r") as hdf_file: assert list(hdf_file) == [ @@ -455,83 +462,8 @@ async def test_hdf5_file_writing_first_n( ) -async def test_hdf5_file_writing_forever(hdf5_subprocess_ioc, tmp_path: Path, caplog): - """Test that an HDF5 file is written when Capture is enabled""" - - test_prefix, hdf5_test_prefix = hdf5_subprocess_ioc - num_capture = 10 - - val = await caget(hdf5_test_prefix + ":CaptureMode") - assert val == CaptureMode.FIRST_N.value - await caput(hdf5_test_prefix + ":CaptureMode", CaptureMode.FOREVER.value, wait=True) - val = await caget(hdf5_test_prefix + ":CaptureMode") - assert val == CaptureMode.FOREVER.value - - test_dir = tmp_path - test_filename = "test.h5" - await caput( - hdf5_test_prefix + ":HDFDirectory", - str(test_dir), - wait=True, - datatype=DBR_CHAR_STR, - ) - val = await caget(hdf5_test_prefix + ":HDFDirectory", datatype=DBR_CHAR_STR) - assert val == str(test_dir) - - await caput( - hdf5_test_prefix + ":HDFFileName", "name.h5", wait=True, datatype=DBR_CHAR_STR - ) - val = await caget(hdf5_test_prefix + ":HDFFileName", datatype=DBR_CHAR_STR) - assert val == "name.h5" - - await caput( - hdf5_test_prefix + ":HDFFileName", - test_filename, - wait=True, - timeout=TIMEOUT, - datatype=DBR_CHAR_STR, - ) - val = await caget(hdf5_test_prefix + ":HDFFileName", datatype=DBR_CHAR_STR) - assert val == test_filename - - val = await caget(hdf5_test_prefix + ":HDFFullFilePath", datatype=DBR_CHAR_STR) - assert val == "/".join([str(tmp_path), test_filename]) - - assert await caget(hdf5_test_prefix + ":NumCapture") == 0 - await caput( - hdf5_test_prefix + ":NumCapture", num_capture, wait=True, timeout=TIMEOUT - ) - - # Since we're in forever mode it shouldn't matter what num_capture is - assert await caget(hdf5_test_prefix + ":NumCapture") == num_capture - - await caput(hdf5_test_prefix + ":Capture", 1, wait=True, timeout=TIMEOUT) - - # The test panda writes 10000 rows before the capture is finished - assert await caget(hdf5_test_prefix + ":NumReceived") == 10000 - assert await caget(hdf5_test_prefix + ":Capture") == 0 - # Confirm file contains data we expect - with h5py.File(tmp_path / test_filename, "r") as hdf_file: - assert list(hdf_file) == [ - "COUNTER1.OUT.Max", - "COUNTER1.OUT.Mean", - "COUNTER1.OUT.Min", - "COUNTER2.OUT.Mean", - "COUNTER3.OUT.Value", - "PCAP.BITS2.Value", - "PCAP.SAMPLES.Value", - "PCAP.TS_START.Value", - ] - - assert len(hdf_file["/COUNTER1.OUT.Max"]) == 10000 - assert ( - await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) - == "Finished capture, waiting for next ReadyData" - ) - - @pytest.mark.parametrize("num_capture", [1, 1000, 10000]) -async def test_hdf5_file_writing_last_n( +async def test_hdf5_file_writing_last_n_endreason_not_ok( hdf5_subprocess_ioc, tmp_path: Path, caplog, num_capture ): """Test that an HDF5 file is written when Capture is enabled""" @@ -592,30 +524,20 @@ async def test_hdf5_file_writing_last_n( assert await caget(hdf5_test_prefix + ":Capture") == 0 val = await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) - assert val == "Finished capture" + assert ( + val == "Stopped capturing with reason EndReason.DISARMED, " + "skipping writing of buffered frames" + ) # We received all 10000 frames even if we asked to capture fewer. assert await caget(hdf5_test_prefix + ":NumReceived") == 10000 - # Confirm file contains data we expect - with h5py.File(tmp_path / test_filename, "r") as hdf_file: - assert list(hdf_file) == [ - "COUNTER1.OUT.Max", - "COUNTER1.OUT.Mean", - "COUNTER1.OUT.Min", - "COUNTER2.OUT.Mean", - "COUNTER3.OUT.Value", - "PCAP.BITS2.Value", - "PCAP.SAMPLES.Value", - "PCAP.TS_START.Value", - ] - # No data since we didn't receive an okay EndReason - assert len(hdf_file["/COUNTER1.OUT.Max"]) == 0 + # We didn't write any frames since the endreason was `EndReason.DISARMED`, + # not endreason `EndReason.OK` + assert await caget(hdf5_test_prefix + ":NumCaptured") == 0 - assert ( - await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) - == "Finished capture" - ) + # Confirm no data was written + assert not (tmp_path / test_filename).exists() @pytest_asyncio.fixture @@ -721,20 +643,63 @@ def differently_sized_framedata(): ] +def test_hdf_buffer_forever(differently_sized_framedata, tmp_path): + filepath = str(tmp_path / "test_file.h5") + status_output = [] + num_received_output = [] + num_captured_output = [] + frames_written_to_file = [] + num_captured_output = [] + num_captured_setter_pipeline = NumCapturedSetter(num_captured_output.append) + buffer = HDF5Buffer( + CaptureMode.FOREVER, + filepath, + 21, + status_output.append, + num_received_output.append, + num_captured_setter_pipeline, + ) + buffer.put_data_to_file = frames_written_to_file.append + + for data in differently_sized_framedata: + buffer.handle_data(data) + + assert buffer.number_of_received_rows == 58 + assert not buffer.finish_capturing + + differently_sized_framedata[-1] = EndData(58, EndReason.MANUALLY_STOPPED) + + for data in differently_sized_framedata: + buffer.handle_data(data) + + assert buffer.number_of_received_rows == 116 + assert buffer.finish_capturing + + assert len(frames_written_to_file) == 14 + sum( + len(frame.data) + for frame in frames_written_to_file + if isinstance(frame, FrameData) + ) == 116 + + def test_hdf_buffer_last_n(differently_sized_framedata, tmp_path): filepath = str(tmp_path / "test_file.h5") status_output = [] num_received_output = [] num_captured_output = [] + frames_written_to_file = [] + num_captured_output = [] + num_captured_setter_pipeline = NumCapturedSetter(num_captured_output.append) buffer = HDF5Buffer( CaptureMode.LAST_N, filepath, 21, status_output.append, num_received_output.append, - num_captured_output.append, + num_captured_setter_pipeline, ) - buffer.put_data_to_file = lambda x: ... + buffer.put_data_to_file = frames_written_to_file.append for data in differently_sized_framedata: buffer.handle_data(data) @@ -786,10 +751,63 @@ def test_hdf_buffer_last_n(differently_sized_framedata, tmp_path): ] ) - for output_data, expected_data in zip( - buffer.circular_buffer, expected_cut_off_data - ): - numpy.testing.assert_array_equal(output_data.data, expected_data.data) + output_frames = [ + frame_data + for frame_data in frames_written_to_file + if isinstance(frame_data, FrameData) + ] + for expected_frame, output_frame in zip(expected_cut_off_data, output_frames): + numpy.testing.assert_array_equal(expected_frame.data, output_frame.data) + + +def test_hdf_buffer_last_n_large_data(tmp_path): + filepath = str(tmp_path / "test_file.h5") + status_output = [] + num_received_output = [] + num_captured_output = [] + frames_written_to_file = [] + num_captured_setter_pipeline = NumCapturedSetter(num_captured_output.append) + buffer = HDF5Buffer( + CaptureMode.LAST_N, + filepath, + 25000, + status_output.append, + num_received_output.append, + num_captured_setter_pipeline, + ) + buffer.put_data_to_file = frames_written_to_file.append + + large_data = [ + ReadyData(), + StartData([], 0, "Scaled", "Framed", 52), + FrameData(numpy.zeros((25000))), + FrameData(numpy.zeros((25000))), + FrameData(numpy.zeros((25000))), + FrameData(numpy.zeros((25000))), + FrameData(numpy.zeros((25000))), + FrameData(numpy.append(numpy.zeros((15000)), numpy.arange(1, 10001))), + EndData(150000, EndReason.OK), + ] + + for data in large_data: + buffer.handle_data(data) + + assert buffer.number_of_received_rows == 150000 + assert buffer.number_of_rows_in_circular_buffer == 25000 + + expected_output = [ + StartData([], 0, "Scaled", "Framed", 52), + FrameData(numpy.append(numpy.zeros((15000)), numpy.arange(1, 10001))), + EndData(150000, EndReason.OK), + ] + + output_frames = [ + frame_data + for frame_data in frames_written_to_file + if isinstance(frame_data, FrameData) + ] + assert len(output_frames) == 1 + numpy.testing.assert_array_equal(output_frames[0].data, expected_output[1].data) def test_hdf_parameter_validate_not_capturing(hdf5_controller: HDF5RecordController): @@ -851,8 +869,6 @@ async def mock_data(scaled, flush_period): assert pipeline_mock[0].queue.put_nowait.call_count == 7 pipeline_mock[0].queue.put_nowait.assert_called_with(EndData(5, EndReason.OK)) - mock_stop_pipeline.assert_called_once() - @patch("pandablocks_ioc._hdf_ioc.stop_pipeline") @patch("pandablocks_ioc._hdf_ioc.create_default_pipeline") @@ -888,12 +904,10 @@ async def mock_data(scaled, flush_period): hdf5_controller._status_message_record.get() == "Requested number of frames captured" ) - # len 12 as ReadyData isn't pushed to pipeline, only Start and Frame data. - assert pipeline_mock[0].queue.put_nowait.call_count == 12 + # len 13 for 2 StartData, 10 FrameData and 1 EndData + assert pipeline_mock[0].queue.put_nowait.call_count == 13 pipeline_mock[0].queue.put_nowait.assert_called_with(EndData(10, EndReason.OK)) - mock_stop_pipeline.assert_called_once() - @patch("pandablocks_ioc._hdf_ioc.stop_pipeline") @patch("pandablocks_ioc._hdf_ioc.create_default_pipeline") @@ -963,8 +977,6 @@ async def mock_data(scaled, flush_period): EndData(1, EndReason.START_DATA_MISMATCH) ) - mock_stop_pipeline.assert_called_once() - @patch("pandablocks_ioc._hdf_ioc.stop_pipeline") @patch("pandablocks_ioc._hdf_ioc.create_default_pipeline") @@ -1016,9 +1028,9 @@ async def mock_data(scaled, flush_period): assert hdf5_controller._status_message_record.get() == "Capturing disabled" # len 2 - one StartData, one EndData assert pipeline_mock[0].queue.put_nowait.call_count == 2 - pipeline_mock[0].queue.put_nowait.assert_called_with(EndData(0, EndReason.OK)) - - mock_stop_pipeline.assert_called_once() + pipeline_mock[0].queue.put_nowait.assert_called_with( + EndData(0, EndReason.MANUALLY_STOPPED) + ) @patch("pandablocks_ioc._hdf_ioc.stop_pipeline") @@ -1078,8 +1090,6 @@ async def mock_data(scaled, flush_period): EndData(0, EndReason.UNKNOWN_EXCEPTION) ) - mock_stop_pipeline.assert_called_once() - async def test_capture_on_update( hdf5_controller: HDF5RecordController, From 63e97dc23a094243f5358b0c2c2ea66c0b7d6f09 Mon Sep 17 00:00:00 2001 From: Eva Lott Date: Fri, 5 Jan 2024 13:21:55 +0000 Subject: [PATCH 11/12] Wrote user docs for saving data --- docs/images/data_bobfile.png | Bin 0 -> 41547 bytes docs/user/how-to/capture-hdf.rst | 38 +++++++++++++++++++++++++++++++ docs/user/index.rst | 1 + 3 files changed, 39 insertions(+) create mode 100644 docs/images/data_bobfile.png create mode 100644 docs/user/how-to/capture-hdf.rst diff --git a/docs/images/data_bobfile.png b/docs/images/data_bobfile.png new file mode 100644 index 0000000000000000000000000000000000000000..4f4747c99495db720624e530b28794d4b3bec1d2 GIT binary patch literal 41547 zcmcG$1ymdT-nKhHDORKuDPD>eFVf-^in~j2cXy{qDPG*&y|`O(r?@)=5AO1%&$I2_ za^AhZbIz>A00Rkukj(YF@9Uo+S!oeeWPD@*08qt51?2$%rV091f(Q#eGB}%N27L!- zFCeCfh={nbCbJ3vZvZhtenpp*gC%E;S6~9*7<^QTL`nxEhh^i3gCJck3XiZZTo|J4 ztxPMv|EW~j7Q(q#FjYjYgl87wu0u4XObaSm{{%mo^@a@Qi~pwG)tz4$=`+N)7dKwl z9x;n|tydc1+^*Iwj2)G9Z~QUF;nZee-uTP5e8GD~>VF>a5Bu)IeL(g-<;D4WxzkF1 zi1r)AsP!Q7IMhRRugQi$_lR0$SOJxXO??!jLvKkq03$1{%ImtQSgx@9Aeq*@?fNmV zah$!L2+k{EMjhm)6w9hy=CT&fMzGZ5qlSB_<;DeandBXJZ-4K^ zz=zz%hOR^eKnLe_X%Tk@KIDa>wl2w;XII5-UyH=6mq7M_w5QvZwBFa?ytA|vt$pu} z`K4nr+FR9@rG;^BUOcCFfzQJyHKO8U3*B5dk8``V{D#Ci1qefwJ)fh>V?PWSE%JG9 z1Vud$&rri1{jl8)?^-(CAUKuuVY619v#DV|2w6bUxXt4=!PVQqyfb3qkrTt=@_;aF zP@cVN>Z+p575&_;8l?O7;?~UAIODdcOyL8K@_6aXtA{l)u`8iiFUV$+T&W=ZiWT94 zT&CE$dhC?*%oPR<(CfCo?FiQmv{w+~CW3USz8NaT8!y-)gHk-za>>eyK59B*TXGE+ zX=$&pmb^y?fMg@gG{7e3p_Y$sDkhwFn`;iQptYTr55ia0FyX_I zN##vA7lQyeoH7=B-?Nl!;9cl6A&XI|iQS>&$LWw+7?OIj^13 zc*W&Obb3mt>e0MbMs`SH0a+eik3GmevqbFsHsV_mr1px}>;>@w4^)2c-(YqQSY8@y zO|q6x=}p{bNz#`h-#Uacw++P>(;$cx2c%1;ugs*toriqOP4*fsQchx?9MP%T2rtv9 zw^{ocP%P60*6k@lXVRZ0NZD?=|PxWT2Q{+(z#s(h^ zZE1P?=S%4A=?`HH}3;hL?>W-$T#hvL~+lG%O zb!y|_o42!Pp1H;@QiA+e5n0OBP!L2Uze!5nINmse5iIu1OIRom!zMk9F~&z8R~*W2 zi{zF%Le;#PO#4TXk5JcVRmDSib0u%BBjpNN0UP}0h8pL=PT%-Bte;W-7P8A64s@5r z5gIpBYP!QoM$C+5$9T@{O`F)e;5N!5V!Bd0<6tsJd~W-Tq&c`l@guq-H5-FhEAUm$ zsgK)NDfxW#bMMPkFuuoJMq#^vx!_tze&4}EXMElv3b4Sj1)rI|!ZoR({HRD;Uj_^i z^PcofUtmB*VHrK5k!_)->Bk;zNlTt12Kyaj>Rb~HlI(Fxn5@~x>SAVUOSOe_MNbFP z{Dbw$=J&^y?yaSJtKBbaA+EkfDVc7+e;5_?hrP1c?zU8C+ya1O!oW-WcL#Y)PiaVe zsZww@?66-{Wu2VI2T}+rO5F&(&TpHKS{!-gv!%k4DRVSi?{~+J<&BT17b0kQ)O#xq zIfqtODx%H}eGVyej#zMRiSgZ64Y%iFi=4|glFAx`>ona@ik_=1 zwi@)cdP^)t5Z5(C_`?CsUu#h>8vT}jaD9HDyja0y)M|D)u?C?nuV^u@)|_5N(ySeX zvlT0sf@;jBDg#*f_QC7-{VDpRNznV#8W7asg-gS;v(5U?Cv=4b*;oAq_LawLNavvjYTGE&d+xO)wz*5KY)nGC+2!f z)F#;GoOo^%rVz_~c_{}glVY;daW7xPoD&6ge&YIkW-*%K8!O6-+8Ic_4}YNFy24Hv zIa%v?^(B4u?s2B=NMn}0(y%|M{eD(I@Q515NB_M|U+=uIYev!q!LsHvp!&c-P>I;Q zq&v@i0CnZ^u*M2RjSC0OBsfT>>I$4&%`ONBhsipblm;&}X}P z#a-R9*Pv3E;6c(ahLfzrmGCTeHXPPlyX#xIin3Uw{(He+Ffq^cRuK1(_K(JLwTC!f z=~(iRfz*D?hK_>IZn)EAO)Mtz;vu0@RK!Lsxkb2ij6#zRN7yyGKR+Xqes*!zWY-Eb zoT)lHJ>4`7@)n{aBIhL8*=D=aFgs~4YQF4H88^($;Zt4CcAKyu^Or4k3A7iBB5akV z-t>_***xI262ip|;Xi^YZnm;IkAad81=EK}WnBS2e+;+Yy(8Alu{rOe2)fg2ZC6Je zSOG*OF$&GbKy(tVHZM(gDKz7Q*@|4PjP!$K-W_ML{#+8uLoKuyML#SB^i~G=T`ycY z;lf~i(zs{ONmg7owjR|-ET)-89;EDA>A)uzI5yI!4Qb8s;ev4>gME%=pT4mtYDkhJ@_7e z--rxD>R+@?q?MF+R>{o5Osx}hitEhiU#4l-N4;Z`QGC241idiZ=@D}tjZYqaV7?OB#Na($p8H4X0?#>p8Fkr@$nuv`T{`wiiUzM)5uCA`KlG$Y=;r&Zy?ytI! zk9QDv`Q0|lg(CLrckcPrUn93S4R}3fV+Yw-SXeAJ>KUQmmna!Gu*O|g1qT$3kN1sj z8X1Y%=jiDVdQw(_c6sDBNwdtBi;4bug+k zeDlnAn_qjkj-DW2sQ68g{E}fme7sLo&A~YvX6is+b8dD|s9P%_qUNO-&xhXFl(GIc zrisS;f_)F(bjIE0+YhDLX-7I{-h|f!NxElM3KRB|*525PsL9y4o@`=DoyxLAZ{G!P z+MVQ&c(D`uGTp;5vq2D*KGbshP_E5H+T1O1V z^cyg%4a6o2W!svRsuWl3aiBGq8C^3xn&YB7mh46*yxq<(bk{pGOLCDTB1QCtQgqgF zewi0ylm{P+(c0{u09lZbzVVeUEt&{D)AFkyd%n#(6J?FBUSdL6tR4lm*&TMom46Lt zLhZ!8M&_)lywWGlts?f}baHN=Jp5>9PCD&tyDE_&>OW68GWIK!dArw? zf9(Wcc4?W-({{3Skl^X#thG5EDiEBrW6A&M*TUT*0$&o`Ov@-aX(4@o0%^UST$nKL z>k`U*kJmm>ZgH*G|7){&0XDycJNYM&e0DvVho|q^%#CpE^^(wzrzV3yv+}u){7Bgg z#Bev|l)FS@xe(athqw`wAmWx+H7B-0N~dW?Lv{mUr6b!KT=jhJ^^jgvPBfyWRneV> znOo0qj#JrZo2SkAB-gD(#iU7@+AC;WuG&37-;vN)YfbE=E#4axXZ0vC9uD*N7zxye znIqIC-hAT$H|r-J853Z8i}B0y%Ke1dcu^^V@1lGw0XrHZ85N|1h{AVzIl7f_O%IhZ zLK^Zk<#;Ey65#*Hw8COMgRz3@O$&&Z!t^~K9(q;lbv4-eQvow6pQ`r2Mfb?$E0w}a z<{S_^i>8@{BUw~GI|)ANIoIOUw^dG5qWx;hP1W&ZF|=5(?~YR&PDx&?8JA`?>}btt z$rf7o_+l?d*;>3b*=s4_+sO|W@pW+>$i_M&VSf-|b3b`3P^_gqd7homj_A|3g(n{T zig{+zdVnxe-u4*%F-Ho;oRq?P{xTa!etS2ZB!cbXcGM?AT^2wU@!$eW?yKp3gj=}E z3Z4OgOTULZ1}$e+C$nT}?lEo-&&@AjEJ#lR)4phjoU5kD34d{}rY!Busd$gFu@)gZ zhtbDvJ$D#D7L9jf(a)K%;Yv-mNt`mM#Hs~Eh|n74Bk@r|@-$Lm!gody$+iy58lM=H ztai+md>SRL2o$^`={i*(F<1TZe7tC(z8?T`%d`rAADkaCm!JKhdoVlC@5Dxm>ZTU2 z1!p>CeK2NiMAFJzrNH=@Js}e9;oFR_^Rwi0!ux#MQmb_>dC5RX;o-Qyz`-tEW~j-qL&u9U7NW|s#tKBxw#&vN>C4qfrL z^d$)C7mptHA#{`Ix|5(UUg-2FC>eCPH`y1KveSPW)@e_A25eO&&uFPR?9SIKO6~VtHhAO414;vD z#@t-$A$2pPd(3tge!6n;!mJ!cuWK@s>*oplWgk=|R#cy}ThYE&=OH7bzu7%x;W9=e zXc(k6k;o|i9;?l1^TxgJx+6S~0yo{Vr6&}JWXXF^sT-+i+$|{wrO7$nGsU`H)w~5y zn!uV59Kz+SIcfm-MD|)7AM!vno)4N%cpDUlxL+>Cu>isOkt^XZzKQAp)MzTviHY5? zHYv|T&FpOZQ)B|`diy`^VP3vpL4bg3 z?350eooP`}Q1*pKT6)XBQ3f7060)^5#pr0I&f~s2COEVLQJQ7$_nm};KWAaGVpprE z@c_5BlPPm+SJ+Bzs#EWg6UtipD&*OmJR`92M^9fBN_p;zGPB46y=FUcnfg{A8jb584JZxv+Zmm3! zits!-@yeO2N45+?UE!ubqrj7AP2_u|xT#o|TFV8TSFf^HG@?Y&Lj}^}Vn^da;_!wM zBlaxJcGS`1jOJKs4R7MGmeRz*hDOc0VM9aRoKcySC;6F2xV_7WM5bXXo_q3UW_u(J z4NvUsla6ml7^)5|B5)Z~Qc|iJYU=9vY}W;aBplnpS}XMO)HUNy$0P5V>u8)aq>!Y@ zb0xkFl{jc$W*JsAwW&p)-aNu6Cot^4CXStH0L{Z+|GKpvW`VjWUaVg(_naKBz=z;$9J6Iob)D^NW=$D@d?Vdi6VjxPMp4xa% zlA3CzeR{*>QZ?Hv*moQn7G}wwf+i-$I5doRV9{zH)!ZCcSg1UlOweoy#m}i=qv0gH z{Jam|H}ctqJJOtrw1~bK$N|uAKnYN4nP{|wp4^MD$;IYpe{axVy6^ijHzgEdQCR+E zUw;2akroPC8yg$Y^Su&H%B%C#ENXE&94>ZtRaFx_tctHp`fm+fUUc3+T&>s&3p1+M zcDaWv^XLb>J$rm$uv|z=O2V)GeY^0|Bq`nDzw20Q&wTO7VD4Uj_SG~d<-XNWGEQ!S zbr2TgnV2mHb{e)Jn^_X+K#YzF+i)|_I3d3*A%l>jhKw^4ex>5!&nXk1b-O-seR?e| z%A~qrWsOj;9eiKDEIf6U6qcABB zts%4DNA;>vvfsqa!kZ2yWEX+V3aMnRR+bN`E(w}eK91W;*Qx}Ollt}={h``7xJIC} zn(q!;p2^d4(3>&6G_&+h;R9IGr{*Hm#Bi%3?k_lfp>AH7@j5v@H7PCIh(FfFXzH@K z{G110U+aKCm(uJ2H%YPRIZ<7I%Sot7ZklDr6`GhFd3*?;Zh5!?I){SfLFj+jM>U!u`p7PXaSvcn`*1B^D<3UT^<9MRI4-0%(xaeb- z%B|=K33u@it?riI^8VDZ8cMF@~t$XCvdU!B`gbWY3 zV%6x4fA@O$DzjvBLZKN8U;a4Q?~ z^F{Lq-ipS(U#@npX^ndm_lh{?sK3gs15=wBXVhH}}~`xmGh4Di5E$OdWeL63$7BLEC^7^k=& zhm&3LqL``lBh)rthz?q1=SR@Qe*be!Yg2{s318vKwH7RNJZ@wNL%S<6ZFr{5Ur`5% zQ=;fvl5aM3F|jo2K%R|>SIk-u3f-OkwBY>Ql*_`@X0zA+BWeT20qC@-&=aM~UXCDw zTYO|_ScSK(s1Kszz!qHXV4^1teJH#Q3L484>-5D&>c*5W&EvnYj)ncS)0UHFAW6w~ zq@3kyE+T9-#7qa_02$nA&zHzB#rhIrib``guA4xZ;b3CrrXRc;q%n6#5U z@Gr~4G+Ag`9MdDc++@BETCTNmQjgn@j2LNg!vb%flod_p3m(iVU4sE1u~vGOA8|FW z0f8r^8VBVrAf1epk(PxFy8S<5jY{l)#+u@`3r^d4mpcya#fE)1N;n{t+sMxv;tC$n zo_{B@df%P&X=^@!6u!~o^0WS#^ z%^DslgJ?8RkV4VH+u85} z*y)wCHws))QGBr1mMc-R64llKziV=Ky0h1F2NNGHrOt`hRbQ9{s2N@c_EO z*L?ZH81WRu>To$MLjwbY*$?l30|edR9`VNW7Py)Ul;Z+IXkek#8QRZmRwJG(UpQ$7 znz3vw5!P&d_8E7QPNa1D2j~7STAm~n2e3ZKs+DvP4ho(r%khN7X1N^?I;)TJ83#Y* zgOeoU8BIS!>ar%eKo?vEY%(%diynfF2^3TiAw12-ovtu$`>lk#u5eu7}QrRME43Hl4Q ziNBJ-(8JnCY$3+eB^9bZ17>In<7H_|#R56BO@w|zDbc`OWis%-^umlyS?g|hEptW07_DXTj};bT+a-jGOw^vKRdN!(O#T=c;_FEU!!$(SMcq|bT4g| zoBQFbtk4S^;^@ue0D;UgQN3TYbDoRAopLClSIlKmtiVd_ZqJ zW3g!PGR`L7?BImEVtY|vlieXt$4^UaWT76M@Fgm`glYQ6_?u>K)|k;K?%vo*uR|y$Q@9}2WpjfTqD%KbJRlZZvWJ< zQJT=`VOO{J>%w+kQa^M4%s7;{*(fhIOJEcj*v_?^K04f`q`lj3dAQ>KwNV1k|NRpz zOgs0TYvpLL;7nrj)zRGRF;1tEG5CCzFHCl)#EWuoO%8c>2VXJ(Kt_GNVedrv#I(i3 zk;GjBN4jO>7w5qYbS?TtO+nEEbq?xsUK!)|ZwAaGfx?InXwGbJ>!m;Ua+V@-#Ibrr zea;O6sqk3k?;@a?l?(IDee@iIL`B&mC04t0Fo~wC62Jl3lHQl>T7t23xT>rbEVO(Z z4VQX(v6j&c^GOy~`G!S~l<&;U{WZtJKB0>sw>v2w-=A5=`>&j6k5#Nsz)E`cJm@ET z-B)%0$ff51qK>LM$q3!5Yq9`*nIv`{p;eNINw~&DuG$|s@~3&0d}gHZ7oIwHC12s; z-R2_!sRxnj72BgV^SorDWsNVqCN*|^l$~SkTk=4AGdr=}kkwH4B`lCZ=+V=F0AaU- zRAo5Drd$+T@r+k4?CfTXQy6RwL&~it8I^&I4QH8M0y#6H<}!)wPF`~mgOmy5wsjWQ zhtz7e8*;s_1$d>u#~&Chf5ac$k?)BJV#4IH2^NQ4ptT?qtN!}InPFc?VyZu&W47}1 zKx49pEFy}<{w#L1oaXe-w8g@3eEe5lhLiv`PBe{UH*Ouq?WakB9s~IeD~hB#r)qM3 zPe@Ue5DdJPX=+C z&FjGftA7cSV_V+Mhg2{h=3ZpA^rzIJRI-+?i4lCLsbi!ww{Dvr9v3>Fwprl@H&Bgu zZArrL%W6bcRQS*!pZzODgin7LMb6j!R6yuw{2#gDmoekjsc)J#Ni%DHFF;h~Piv)I zt|g@xkfc5HMig_>;TLKqz8XlJesQF)rovn}x9*5#e6%qN&LF(6YiRMllqkiUmWI`G z@{ui?TL@?;Ux}ktmFVmZ8uN~Vk(T)7s+Ncc-AwystkdT>>}UHlh56NCH3LfS$G^bM zeeOYbVx@0*X0gv^RDHeks8CKLEgL&ML9RL5a(A%d0oMb1C}re-t;SaAVN151hz!I8 zwGpzvD-I|gO!=_vHG=DW)R+cMugD_iRu|lt8(ut=Ge-U!jM#)W)r4h~EG#Gx27!XN zY@-SP4M^n6?r_>QZ`^z@KqjFoEFlIS*v&=`j_-ntyenygYX(qFyARpDejRVZYl0ki z#^=oCie~)NJV2JmR+KZY14pS#k14auP_M!{-$_54s4Z!7P~%Bz)iLcy7kF(ZmFs-x zSMJlmixtSs%|}kcF-S5?dj*7JY%o8o;0q};e50r# zy!Y1j^5PBGR0LB8L(2*f?%cvcI=d~DHwud^*Jp*E3nNRFzOehQz3Z04rixAh%{&^u z3ftWXsl{U^iJHB}6L`{faVPqZp@D(RGk%QgK1I#qFioG|@Ud~?!ktzIwwL1x5ZU@K zi?Z?oNH4ejwj`9JR`{tANzbiva&n-pJq4cqYWo+d; zivJNl{uM|5?~c%<*JK5&I$rL_gmNnzSWSHTNwEwnk(+qIZ_AOZz&2Q5+Gt~-@0&r* zpwt|bImLQ*nb7-;-?kphWRmr>ZN0(n6cc{U*W7x-O3A8E806h+G4IrjTnqC!wm^c2 znXu0f^qJM1(LGtt$p|F4jAx`v3DoADjx`-Nb)|PhyPAAljbtU2ggk2M%1M*HG;!jp7gJ#_mplBMB-Nu++3 zlg@DGhmS0hcqz!$a+Y^i?$5dY-Wn1atqvZs1Vq9>-U=Pg+Np1%e>W$$HMyxn&2_zo z15iYv+ZvF>^~|C%{lF-bhU4wz3hoU-ka}!jr#PKQ&=!l2J+Z!K=i*lRf-&Cnso{&O zt;=qu2=Bt@c(l7daZy-Vy(X)THk3K+RT^a)!ji^>7V@{CsT8L z;%w2NwpS?d4{}i8?I!U<|9EpM$OvsC;ep$^Uxv}?32_5M&C=0r$?G&FYi=(Ep2T8v zaSr*t9B^G_Mm%faDyuYAs~C-l4rd#LD-A4oqLu&*{|Ox8y~zJ@WFEa89claF_J)m zRw9$GfR&XM8;!Ep>_UNDHVoYJ9fr>d7w5a>Xp1i&V|xWftWE}tMEJr}Psg6tg(H&x zp+)dda(Cq)El1Z%n$Y*DeuEJil-S@J8~#&lKQDaJZ&qZ^;%t4x?6!Z!yFdA&4!2DS zw8QK;9Mj2LReQz5u^==$Qe66bvsBU(K%9aT&}Bo{L3L0|s3vZdR||7DuIAzfR4vE`>T30s=9P0;n#ZholHy7qd1a61X6U#Pu!8a z8>gZuc;(ca!-mWBchSH>8a+7G>|aRc+r{zm|V*Ti{pNrASk;dvVA!6Gr9rTB!py*54GuK`YXt%3%)XG3=l%n%zog zt)?lSioskm8`BeQiQQLW}#=-pbqTA@O)x(TawK>%%~DbEN3>)OQLP zdMs7Wb<$8{(a_r3d%X*ZiYmWInf2O!>l3mhzH0L8X0@gLO$oXZ3W!rG^s$A`yX=c0 zaQibf2)bv!m*Ws9_~hl~rL2qtEw*umo>Sn2P(*<^WCHPCWr9wi_F271#CTZfB;kEb zivoG4D0Z-LPtW0e4Vf4miSR}%bQFjY-+Jhcu&y?Zrp1_aq$7FjYp9vV9m(sh6$XCe zSghMaN8FS0rq2$pIG+CMa%`VdM1_i`ET8N3u=+-mIW%=PhEr1NcRAbXq=+A6k(7+- z>A6J}|MmhB8y0qOVeV%xfrptY2lJ2-}iyEWP-iR=5}5{Z>989SZhS+~+)z2}o! zdX=m)>=Cjo^GqyC!>Z*0#&LV_V8;4&(Fj@LHafrDGu>-AJ~+Y>95j2Ye+`Bye2~9t zl{+|GAO~8g^d6xlG(LQ-{UY+rswxis_HLoOS79)p;RE<~+Vm`f9IJKY-ra7uTB>aj z<`dD~#QAE0dxh@G?VZcTRdNgC2U@V)WU$!h+Y16-$IkBft-HF|h>zuB5jU>8F_Fuj zw=Z6E7z%AJ`PRLKlpjvuJh*`&aKA6@hMHJU&U}Q5QgT1=l`)P`sw0`Crz7y<{f+~sB( zYPr}ZhPfhsx6hfi$8w#`&{GT#$70qkrn z>9W_XKQQ60-P0R!DaFlY=x+AFZK%k{Oo@Scr;Ik>58i!&h2COdEK88rN>8o3Z5u@tvYGn{zh7N;yw6qh~^6BCEo6r+e%=B$;-69c! zz_^zCua5oUAu{g>Z1&CsE0yetHijh{N6{iwAJk@y!VW=R+@|Y32WQ^gSuj3tR2y!- zt4-WiJ6cS8jWy48ZmsoNZQBkLsjH&AtNJ}w>ME@{D*5Jo11D+mqx7O3+tm8!6qLSn z;xC5d0nLLx?y}Rkv>Fdx9&?%T)ZtDwg7#LM;Vj1I+~YZ&UhL=(!N18xrINNhxo*#H zx`0?0$2swrE^2l$Ijm*0#-g@p>7q=PAOJwl;g8&oNRrVd)=hyQ#ab}YacELQVgMjd z{{AH=tEsAu(I@J-@7Abl8qoVN=ig<|=T}C1>x&cR}qO`Om?N%VCMo^k<-APg8edEh3?w{kMUqeG_RZIa7@c+EV<)Up+U2& zvXiY>y3CV}dhMzLx_XdLeJQ(Be!(+R%;VsFXz6BBy{N6E8wyKY&Lkd}$Hr~inRj^^ za5wsAc8&OyvqE-J713X?i5J>H-^T7FK*y?&m8eEH*%qqCc$-jiKT4#YP1G~;M<3q^ zePVFY$KnjMnAw+Ftv%Kz!rJ99HfAaRNhtK4yuegS_v?7yxO|Z-_{+%2HoDb4Im18C zk%K=4EQIaM&`|Q8L1P~62MJa6I3iZXQR;>dPQwM~)1Jb&YWnG?EsE9=14zDaG7e^3 zIv%DSv1G!U^TF?Tn@fg5p0wbs8plY|Eby0pizSkoE1yYq%34=s9UExRd2rek9HAmk4MhTiJEshcj4FT(?U;W%sh-X>ViYm^ z8XNKF8q$P3#m&+Q;hP#1YKv@no-S+As&q5h*H!-b@xy9!(fvNy#BQf-Q0}Zr0!jF4 zY##~x;+Uow;S*nY5)0RTYgXkm(!ZqEUA}C#T;bZR+^+ob%0!J0m!OzEwoIyYWtH+| zwO=l0#XJPx+xNM3gGM{cxtw=?WlKE8;W;TElow{|xtK6N3;&hG@1dpVuM;f(=57sgZ}QBhVGh_%a!BeKIl#2% zff}FgZDuzy_kjvRcO*E-vh33(Ktp}gI!;|_$4~bTQ367Z7r;$Vo6cB6w}j<}4vo>Z z2KyWKcT{J-C=x~A>gdf#WI=c}`4k7lq%@emm-3yUtTR-4n$|1H)LaFHVWJ2#9i=kLACf5QM9@=FU{|dgUG3<5R8T0d!c>+w52B?*l?Bf z-+T4t)0=izuSfNNBLzT%{oh^xWRKw20&zs&Rht(n>(D9Mz{RWvRLFXsGFlK!9n5@9 zpXj0pbq{ZgbbAc&YCzDzOVNz2)Z9l)Ih#S-HxtDyE?e1LH5IA=o+;}pXM4jbbr)4B zXp>X&$_!8QRv2pPGnYZ`j=-;5jK6&@z(`JyJ-xq?NW^?{8YoojG5Fo5_dvx~4`LZ= z)>_kvLFb;lSLjuPhSdL1+?O{0kK(>ON=Gp4K_FT@l`m`KL&L|kmzFylWR&|ynbu#u z4D?OG2lIuFWD*N#ts&vuLxtPvxPK(`g*e0o(NC{xw{0{WI|WGc?z=Wnp4VC*O_pgc z#Eqn}yd~b6aukF_Cy~USand!VrN+lo$w=JO>g~Syb@S`Epf))L|8ZCrxd4-W2J-PLy z-CbB8ErIlX&JxH28L^-PFJ?&3$kB!}VK_*qnM!9WU|pYoD)rlUmvq}Rs@tELpH(V@ zwYGyLk@JqPj{!klLy!66(6Cp5J!6$_t-iS+ZS=tt%;@viPr&nc1EL57nnxoyertne2 zL#@?&u$jHtVU8^QFjFGX(}02;hPpzt?qnt40NDnc7Mt#06aBi?(!M+MyK14f<(s>9 zX#bz4^_h|(DAc_ZmbR^OErdcOy4(sSrSjAw_>x-fzpx`qvZPzDA7;GZnxBU!HawgT zif;=D6c9?S2V}PyBk{C?IPF7MV!Bxn5;s@ExzB*~GdjI%T0Q~er$XM!z722e^R2lQ zbA&5n^s9$@{@%=N0zxF7D^QzNLYbRGnnqgw!(`^)Z2?9;nS@&<$=SJUy1as$d|Was zaxfgww;q0^U3VZ`Hq#uoPc^53ma@ZV*;{4B^1&hvoK$VM-Z;o%NFd(ibo+54a^nsZ z>k6r-|2m9(Zlx;-oE?Dsd1)~ZpV_xKu&-k8`3AyBv$L6N|cMk(i7-}DTjF%WiW9RB_6)O+;a%lw&i~YaiN0z>4 ze+ZtwIN?OAYZgdia_VFUzRC!0)zvjt1a(;j(a6*3}K~*8ec~e z$CQ+sz*J-2FOrk#Rm`b_BELhkeL>+K@*jz(9~0iAObt9F{8r8-V1ToAm%jH+?{ODU z03W+!JTP(dMqkUQ@4ON-rmN#V3;_HOeW6IDJtz0`D|<}92TM%SA597$Cj}29m?8%x z75`+e`$d;#MDX82=Hvv6qpcNaj8%6MdbgaOhOj=jxCnmv2`Yi=8Y&4a(sw6+4V*T@ zxgW`E&cVb6_BQh)o(&(OP`#(>LbC+fD4bgmkpj@r9|$(4GyVb(5SEq+(*N&D1m*aY z|E<@>9`@R7gjU6y%BvK5Kk3^Yw$)c+QHEX(0r2sw`x(X z=cr?bbM5ci`%S68T6mMrRh}@XZHOf!oAwl2gM8%IUkFk90B8z}ohLSS3 z#F%g3_i{?dgeeOsY-;G~tp>pC5Y<(BAw2y;ZC9vT7l0Q%GB_~Q*ViX1;pKKWG?LgU ze8h}AkGVr*uoaI+*hkFl^&`8mI^pl>&~-uW-(*saen*`zXSn9DvF5m6u@=+B{4*?rK({Qt?y@SjAvjjZv`IO`=O;zx+whCQ|~0s>{FHheHdz5Rz*x0-Oy zx3ren-M9Db&92m%S_$+ckg8sH>>c?2-86yFWz2W~9i^>(jx63<&Kg{zfgyqXbDT?Dm{ewP z04zZ-@e#g+hD$RKCn+(+HgBadDm`7hO=NwHx{7(_haO9zF7G?X&%L6AmzS4R(kV7} zc132%B(t+ZIb)OJChgnLC{atKs_8QRk)^5YpNk>oq7ESNLnT_Jh$t-a8Z2@bObJ@g zA~MC7;yQXkXO<>kX5?Q&Jr)vGk}8IvJ4^R1FAQ6t6H6P$ZWn4tv~+9vXa zXCe-Zlvh10S*Wfaij}_t2u9*r_t;%}WV~|;Sl?IuVV}%{Ur(psr~hCJ+_0lz{3e_E ze?zi{=>NAQTMJI1pWkV2Zsv9c-Op{Mk}wjp-hhptYcT#rL2}blQTy87La`6^M&Q z^d#q3bs-V?aif|RBNwF;+azMt9;r81o8zX*Uh!}~oR|5#9dcKCovjyFCwq{O?%Ed0 zBiRMm(O=K^KYHwa4PmUgv_|;|g7!^eIUl4Sj7N!RTOE$dhkU8A#eOaL-XplXxtR2RXy78L&h~*kdGO1thx|jA zY0I9k!)F@N%F-InY7zP%?%%*EPQm`0Nl*7q3u}D|xdptq=-dgk; zv{Zk%v`C{9c!18IdW^?kRoW4#N;^(vxL4GB$okOUF=qvW;OnUU9H4S>z1ujO)|z%_ zd?lH8xQds4Wwun3I{G@D^2BuHRi0b5t;yV=wQBr9{dG(ooDtT$6K6e?ZVlc_uf4@i zsn+}E`1df9@{`kuO&L>Pi0Lg$lN|`LxmQ?>+^>mrFe47Rr%(4OqIu?P9FEr74gwwh zO4aRXsWoG^e65*=TOK%j@9kRO7#2a&IqxUWCLXcNgkE`b9w^TgR()+^t*y1k<1`jf zQL;?W{PaX>VO6V9nn^A!01NnF;NeBL8x5@J^w25NUgK4wwk}fugK*n)!9bF zd@5|I4&7B{mBN^-exk0kMo*KtULQ0{s#g$syhwwX!WW@nmvkTOlJVQS@B7wZ=mmbD zuYc#(f`AAE@J9jgXL8v0gRp?KvQ`!jGi5ZsGwrYDnrsTBJ$I2yLE|O%8MVqEHl$K4 z=$%&2s_kFxnhAKexm^U?9Eg;wV$8XZ#PL&kp5q?}k@qvVDJcg|tb22f4%@E*KI;%R z&%3m3_F5*!A(K!dRyNc^cW>$dc=$Zs2)#@EXAqxnQ@s+#tT{HhHdLm5wyvvu7y(Hmz6ic7ubx z4%e)V*W)(I)?C1Vx8X^W-ej(p!f5F2^PMka|Ao;Li&J%h&jRvrcq&xEPS-51PNUrY zd9-9qqGE@w`j5shrm%7to%Z54*F0FjaGeoBdffZ_!5T>WU@<8-h@GPwNSboADGI&~=0gS)2*etAv#Byi`#ML5)n)h=3jfRTt=3;iyS<&B#9xKK z38UJs;WjIFfDnUv59sx^5S&eS#~VJECyq9^9%B@l5@N#chWx;5_L%W)T`VRr9@X%a zKIAo@an?V0uZu&bno9H?TGn=Vbsa1Xp_ zRPf)r-ai}Pf85`;m&KBuOUq()gU|LkgVBjHXUN8t{QmHdY}g6Wyy_qQ{7D0H5Fu#-1w>&&N3=GmeYXyaF+zCRSf|Vta0>7$t zJLBlp@$c<8j3oVE$mGgDyu!uZ*r8_mP`0niE2xpA%BOX&?rl(W25YQG>~hAH=3(nw3zbEWk}rG@g=qSIaS?VEs$WCn_T!jhIDGgs~} zkwW`2lqEdTim@Fp=k5A0(+jm>o5S&Z3F^V*?WK4JKr)xG9k%YS=VtL=lM%|)O@&KN;vb_%SS;3er^PF{JkcYf~d~B?bezT zWaMIP0Z6e@w(^F;ryxLbx2@^%FsMIUq%e!Y>#%4yBHr`eYb|{(KB@bxSQPo;r1a6J z?7SZvFQ$1DWHd#rY4=PWi-P}i2#Pw5PRogklzhc>=_#+M zH|(V1zso6mgnf8b<^%)QP|1LQJ;Qu{Jg>+DC zeVWw9elW8g`ig%9pYiY1m3~OLtW^@zOz)rr`1IZH53`C$kFy6`!K>cKY#kPB35kmg z?Ip*jfD*G1D5`{*Z{W`z1+Qk#u=k^&;_YwTv)$*Rgjdtkmd&KTG=R4`ISpN)TmeTI zUH&MGl!_Hq81`e3sp?T{ihG^nXm!f*k%1~Ujt7IwjUXBiQb7RtDhi#%srvL*6beQR zz~OGyZ6khBV}%$?i^DQIv0jRxI@7yfB!UO+;`%|O;-VCkTr#_dH5=o<46Z0Q`%5l5P;eC;?JniE#_evk^<#Ax9{U&>Rr|Ic8!R<< z-UWs(SRHM4DJ3xRI<8+n_rs3V<#0N0n1Q7}?%Y0kPG@Q-qxnn+0X)^N^6oQI;to3iC>u??%xnSh1RXToIrDWN&*zmEX(=P9 z-Bt#f@#|Jy4x4K}$ekLpigFCOv(K-_0u}fbt}oM+l3_e3x@$h`FmYK2uI~D3f&w2W z>0=#D9^dB|-jZ=PVgPj?_(s~ccsHp%>ShOnMTzSZD~i)ycnA)z6L~fywx);7HI3I+ zx025jFgvY>bHRDux*FxXg;0R9^}{-y>`3>UZrsIO<*vw0o@7<2)!SNm(b;yH9(C#< z7|v7xpuM)-dhh>%Ex8f^+alr})Vpe4U9MNR6sc=H6mIv;HupLOPhc9~ z$EQb>C~f5@N|g%(ktC8;_)}E*%{n!6s#sJTjsh)?d61v*1YQc91-c4mvR*OfkWc@Hl99_O;OK@#Y|cf5oF7d6Q!L2zsK;!%^0mZ?P+ z_UhvI+*RzSmG5ZDA?CGWzchnBSIY~^^vR~$;$)?-zke3Q_`qk{Mz7&`~! z6=UCQ56XUoM=s}hcRG4qHdjQN+PP4H7tZ<6aM^`FYqP@nJh^brLFdg&#PK^B5sbys zaDY6@u}d#|P7ZV-aQ5~ES;N1R zc9kM;YPY&%iLc<`m5Ny5YPuc1<+@(I4=<7=uAs2`1{CGRaU!97m`t5?W#G4C9!_9^L1ZKL{0#mEa zYwD7EGG)CH*?6g-|DWGo?#)Rt!`h2Zgr5^!>|cG6pD+2Q;}l!l+dOW!+0q%cL+nbA z=oE;1C_e)HagGOLZXaL)e&aNPZJd^Kb^ZPrbYL_=^z~>^+Jb<6>~be8743os$PlVL z6yO?djmq~P3ECNV;NML4K;xwo0!X~QX+QfBEg*qs=IV>juMP9Di~$m9?#fqB5Pxjq z4hn7o&$m!fU+yrJw6UN#*9Vi+p7z_-sQYOKxUy{G($U+5pa$dbDmyznGdF3e{-q6m zxASgXCf~V0BF0*CE-5Uw73ty}#q)HJXb!D-?=NLm?Jo`nKoK?uCZpUp3J-|&At%h1 z^6fRSrT8p1%SDG-C1qrsjN-X(Y>TZYO%~IUoo$wSM$4Tx-&QO5Tc(c}9)Q3})4Lqj zw_$a+qO6yer?VqnACHm_W5x-K@jmB!2p=tMIGOQZs=R0~e7Rl~lj6HmR0Tp#`5Y!A zj=D?n-RsR)?A+~VnO-a@%;whCi?`cx455Ivp0ewQko`N&sXnUSWgU7CN5Ll>O(h;D zJ0wOWQcL?|vnXQA!|#YimSo?raH`pdG*ww@Yhm9N+DY-?qTs#_mgm zLIM?=@X6VK#59yda;d)F}Gv7{ABSVuYgOcdt-!# zCM_+Elh4W7nb+kc#oxcTx9Hh#MZAE;wk|MsB;F38t|N%9AQ}(0$zlpV)X-e{{xE>) z?yX9#Nz9L?ZOECRKSf&GW#n1%GEFGrnha2xJEwURoHM=*QMt|^{{9Z5#ke>(kcCzv zP9tn#rJA)#&uN&A>C&(R{KKX|Ri--Wa<7*|c~H+WyrM(Xrzy}rS0P2;dZc8TUTs-U zw7z4F2?du?4htcvW1xT@^5C9Xj(;8;w8m~dCvvw6D_TGwM~Bay2ZF9eiyvfRpv`)J zq5R}AhmCCux%gGJ*o(zylzXh0KXRv}OvS{UfQ33(#2tH*)&CUBDV95ff#y-Wl95p3R(p=Y-fnbN`UI0;5VS5VtSXYutw$k6u1DT?MEIVfT6j82+$pX3wnZ% z$d^>m#39b$03)ycAgVQ22F@@^@HksBT%lQ2 zSigWm*D#jyv?L_9(}&=J#l|{hO*kOpo_zRf@Sf>k{{({H(Af|d)rlOo@BM+gcFu_k z8wdc`12&)%P{}p*6BYn?ek6oM_{5`ndU{ehoi^Uue$A}Se9&@TzH>UNRO3u7qtgxD z*>NYQm`=$2R$8E1uG3t1{LSo(E7}@@+;S&vwH&jAfbnx$Y$cP=Y?$f9AS7PhRf*NB zQ|hTq&Hx3>Ayi=8S$^O23pJB9tLv!~3zKqB#C2hUdo$rJ&hXMa$1-sp>@H#BO@KysgtGW^jb`EYBQKc5&_Vp#v!D!4l}u#3AJyAZTHw~O`dRyg-*fJfUP zsxegHUT*Vg$DofZoo20?m4OxD=U+v5T!M#`*&6_|DUPzMQ>FJ4KjmeI{ z#mTAFZqu)%GDLZZmXJp(Gj~pX5d$U?vM)5t3IM4x<+|-J)%l|u?6p9F`GV+0y^!Nq z@+K0@Rm{ba%31=$L|3l!T_!#x3ITuwY}>nZ%;dq_BMSaQB9p10lc@@vK~jA&l{C?; zQUa=xcX7D88*$m6h98o>e#{i!x5&jwEWb1O8zzy0^BHEXGFxJDT<~)^-*|&xqWHeI z(~=$PbyX)KcXw+xUz6FX!>0BCQC+yv9GUiRx37g)+UgSlw`QGfeSL)mQ4=-i3s!xw z2rNU{vRk>s#2EMoZ6uSCBWEhPcuBY?3M*1V|i ze3Rg1SYqdr_4sY!o9qx4@Yy#JpVCUt#WWc%nU#H}!!v?I#Ivzt?~>~^WM_~lfC4j+ znrdHuYCy;o6xOgIOXeYf`Wet?*XJ)RtSgvp17v5;2(NL4qo=I!1U#3WIOGlYL}p8dsR z7gg@%@*P^2E_Nwbf^tAcQd)YDKBCZkRe!(cYyG#>_X6@P?o)Fomj249D6%XyxaRu2 zeV(T+tpKwDS)p_HSEA*UxDnkz^M%y5DPaBQSJ58f<})aE8n25$yUdW*~MMT3=e zH{?rw?W9tAjH>6q=v6nSeHT=%xnVusepgv_sCx)KB|d~Ln@y?q73#e2&W#wjMEx|e7aGJw`p`EIZo00X$-)4??*wW&KQ*xWs{1llCkcmysS6P(X=`){? z2>M2=z*pqn!?aCfulyrOIbFCuOH){FdBOc~*q;p;gR|N6^-wNtr&y^%!0FCx$VZeV zCU&!QToQ{c+lL$2Ft!X2Uubw#EO^dOJbusnLCp2(i0t|SM#=H^?7Ln2m`oh67o4UD zaYK}LwQc#o7uX-4GUPa17*-hf`AU+N8(GJfA4U=g_IkPtwtNhIRLE;nUKUq)4#}1_ zO>Wg~nTSC^ES8JoZB*i-{j-Vno!2#*N_~(2r?ww&?x#a1gdt&Kgx7V^5s32RR_r6 z(_)uc2|CIh8`HFC-&tQZtFE@X<3ql4Vk^YEuK<9>Xck;w+OKRCUT;cD2`q8DMep}* zx!S?-z^WLQB5X2L{?;KJ`hU1skCimaC0`G1V&BsqAFyG&REv>(q*Q_7ZyhJ|C`<5U z_;EBJhl=YbWb&Jt(%Fs-(ApU}s&^SmEFNq>JTe)k6q1onYC>epTcvEL~8b=)V_>1n7r+tMMh66G{ z+f>8@V1luwL)0${c?AOo{1W;VPSQe}?x>k$yQ?qv(3kis0^fs3Jcw#v(1O3eOceVw zyhi&hy51rq9c~PC0}>na=H(HeMG=7jfvWndkcN=05+k61wds@Z52hqcX=3?s9g5bf z6^r35CJT29b01E1Z6SH`v3(douF#ZhlpVQoJ_Hu{B{?S7-o2QWj+VZy9Ex;bV~^%J{YTRbGDFpIGKkF z*)Q<+((m%|L@u*7F9EOQMEQNwcVtA98dXJ>J7S{mIsXxIju ztGC7%Sy2Xp*zwFfjl0X+C?H7!<+FqbWDrbF+=nm|>c@wofY7Sn!FebSMqAy4>O4*= zFFU2{p$SUgJ7XuQqAz{?&{8pkD0@n@Ijj4=t=7zlsEKoH9^+Ku>O)S%T3%0UF+0Yx zqDz%KvvU{@o-pCSTKD-olw|Wd$862<_s&i)Z{!GFrDzYkH_4e@RFMc`Mze3VKDxT* z;&^B72u9^>g!6`+U6k6pC0uOx#*wWW@^AX!cP;%Y`vkeN z)MEm(#GsITPJXGd53}zILu^$!$GfXBTkgykPIjilOBe&~F3vC;qWiMamt9l8sZ2)J zZXBpH^I|6GAZ>7IUbfhXiaBlp$SS0it1BXxQ*8mc*tblbUs%313ImW!ep@U(XcFce z!iVW0bh6JZoDXrwYkd(W*S0|Q2$|n9in{mxJy36g4Jdy*V$1QCgdj3j&jRnEpvBSN zTFX+MaiwTnW(UQ*y%sUAr{6a|4;|Q27T-gqJA}L<-05R0wKpk-yM{`s4m>~^GX+lJ zfkL-d|P{?He7Zb_nZzqm?xklV(oJ9S~6BAuen7qL^ zQTqv7ncNZ${~hsn2MXpL{t@tV@e&LG`yVcVD7-BImhd2rA`Mo^zMQBELQl{GG6sh4 z!0MU_dU6@L1ifFy-N#2z?)yd=egS)yWUEHD%486S1S7^N%{-; zu*PazIc_v4vLHOA+@EcWqmGscxTv`7mi#@phD8BSws#i8 zRG$vroiC^o1#qf*Ebt7?vsv+>k&ADxN$3!$-&Yv{%OaOGs&g&bVFkWa$uZblyW5ak z4h3&xjiLMM3s++`tF(2$NS3BQYM_t*wKEGwjKQo^pWaIU^SjSiub9f3GS@E;C(H4x zeIHr7n{GRe+rsnx!&dFMz?`3P6UvwNU!Uhmm$)3^hv>zl6`0=C0L{6udX_&cndR{CtpneEHZTGHoIQx^WKy# z)xFH_^TBB0&pA2_teYMO9!_VplUcs0c8uaFCIe(nl*u&RLiYh;87G5JaPk^lBIx{uQ<5{Jc116&OzQ<~_ zb*oiAbWo1vNhvo7Pv@HZ{@;T_jjO26gD+iL%oWlb$*AFv)0~aaXM|Tl91d-WX&lCtFMP5!W#3Dqjt?MHLZMAiKZUy&O|!;vs(EfC*Mr=VqSIEVbN|Yh&vWbOEr?{5|F{RSq>9J~J~DZDBqo zQ8Z-+gi9Ro!4tz@_an#1Us|NdoNM)S_6S}h8EPg&tD}dTCwdllwDk7rxpnu1y6-QsN_<~_MIr9~{JWVL zes}xrWXN!cIy*$%4xym^DITu{0EX#yhrMB#+c;3GS{(|nVYZsX&54|uxUQb80P?~$ zyQ_2u8sq~s5zMe}DUy(%h%umT@iN#*+|`G= zb0*5H*`tszdtn-qnVB+-)MaHwmUCPZHbmsH*M1dcWXK8IVfb{bC5!}-#OIp1^9b(8q=1F;-RB4pPim%&4SPTEQRC|Yzbx)Fd#pw z?v$0?Y~60XmrBC4%9%mC-J48q!WUVt*r!4JOAX@ih}0n9)7{$v!+~GDhzHTEik3rg z)Axr_ufwVAwsDoYwRYQ=Fug-XmIny=%#wt7P{2>RijoQ-P7nWbaBH4Yg++rTxEhuP z>&qE2IVuTe1`Ek!mAJWc^jUhm!_AQ-8Z$pJ4+f6C=G*wj%(7;A;cQw$KAFmMh#ORF zM34d~Bh&^z4JkZIZo-v@L~l=5T`l;DjR(T@bfGoMf~=R#&z@9#OeFspAO=>udZs<> z;(dUTy3I_MxsHXx|FA@_x3Yq25E-&|-iS<)ael%~M)L+^n6f(sT%{bJE8HL@t(I@6 z+zd*C9xD6>O-s0468S-kGAu)9H?ZA_fPy}PgmpEc^n3sefNb$^(v}_{JE0J5VMUOo zESL&*#Fo#O^0d^pWp<*)=8Jwfe$unHc`>hp&=o*Ev$kSLB*LMbb-%_B@GSi zYt6tML{1aD;>1k}TpX^K)QsI}=!sNL&mB^re2Lhbo+;ih%VRn;G)V7-0z+XpQorM` z#=`u8QJP$jnub-vUg`?@-L(tNmiHiwPo;l<$PBX#gL{lbx54Zx1r2uq=KglLFoRk> z7-K}6C?N;JK8)M~tpVKtEr1H3@W07$@cMuPhQ?E72haczHWZLBCauw2&+1_ahXqUw zID2$epA&uOS!wf5E+0@GSBA7znfb($AK^d)Utp($MjEee9 zpa;i~*`dJGX#os@;-JVuQqX5mb11f=@kWZ^LW-gx&VaTWO!;6dWzzFy? zsJYeAClF`_Sp!1*6P7R)7=Xl10~-_sqa#G*NwVX1vBB4#P}*0V)Pg6ADZTEQ35jM9|#OayFhHX0p}g>@!}J2sQHdF=kY@VF2JW zowy5d8?~7fDuU~6s`ZpJ_h~9*4J*jems>n+oH5Bxcvh%7hvqYmuge=)9+(-$l4w(k zMY@W|&J#@Jzx-As&(T=6zUo})o0|5^$!cxbZj7d!I);_*@fQI$A7;hPGQPu zuS$xlqh&a4eyOSl7ExZa@VBY3OA8 zHNgJXqzmKj^(N~RR_W7&qeUL-U8xGshhl2g(66ws1c|fxg`vJarh{)!Zr(Z%3%(QX zedo2nj!ElkcRb?JO%+=TMeC#+KqENjwf>xR8b<5x?06mcE`5$0 z+9^Pb-t;r1TCwD+)*4zDGf}EXk8$f_pq$c+KAn$!0MhjTLya^tz&?sdnC`f{k#oNO zFy*7VhK%C%iR0EX7}4kQmiV#QaVT!T{vh>lNs(`(OMD$8%F+X=?%#Wf-cq-D%JOBj zc#k=^||=u!r`WRGW2}IPg6Cm3=$5S{Y<8FrICnfiHLr3d)y{EcbbP zivKW6EExX8lr7C>hzx*7nBGE8ZDK^x0G_|iC-fMHckTlq>P=_aM{qB2 zKBgS+@pkvJ5qKqhWv?Or?ByAeS3Y4sNa^YI`A-G(KATZ2RX|1*^&zKrV0j z$!qvVk|9`%+av5Cn=n_xLaF)(L%>60?q|ciYjwwBPdxligSi_?<|Ll`+C!;*+4#3k zTl~UIKi3i(YY!H@uL|^Ju^{C||0gJX4qx0H!lfWr>8!_^g6n4FXt;bSqhvBv!z-Eg zNY7D0+gB$fbbZr-W;oHKn>2ePX*r_lKeb#YU(0vdb%UIaUSqH^FZvOx$sQy(zTEZL z{aoi%byZ1hIltKv3k%9rVR4NrrWKVCB5kbdyw0#c(P`12c$KQZSlGArGHrkil<0@= zg)7})SbbcC(wuHq8J20wmtkW~yB2;^$=Fcp|N<6ve(rb<6e7GLCJuk@GnW-bcA0G+$N;jKe=W4x|MxHeqehfg7CiuNL> zcgE(1Dm$f4SWsO^)6D;G)J|{wBID+|BQK^n1~uQ5$UL8=HO1R$7k-<9j})PL-D14a2peFPx)} z;fF3(TZ#F6-t}E~eES3aXVeo97#*s~&xO$`r9`IXd&O2wx80m&;hwX(H_}*D%aB>qPMt!AM;!TZ|@Iu zsLmPcms`474Z36V!1%PO`q)J_*rICIsjt{S&r|Um`&M$d6afBhZ65S)BHRVP;BlUZ zUv64K+?Q>XzwMQ`_vk1IIk_j}RWpTDBrsPBe!Qd}-XEI~1#7UhNI$!yqHu*0%`19d z>p&p_V0|&8x7>yX@swrP$=69-)#-b4iNzAks%#dl`Ug`vg~>RQ-iCHilJZU|wTvbC z?fLVM3B&TJ)hg0AHs4H-_n7t0pTKjay=ez{@j^8{zXi1n!&dr40e*^9t_#uv@BAnu zTcwG_W7Cv8`p~^C6N}@yPg}k7KgwMQo+b0#z0UVGS+Pv6F3U`h*OpCh0$AT%3ul_^ zK#mAeGfYu_cAu_Ezd?OWjh8|pWU@U9vWHJ;iIUCwd@%MeO|LCr0i%&c)q50Au0|w; z+E0Ipb<#0X(|kmLp1owv1foy{yVkHba!b+okBrGEp?lcvZi~lD6JhLoee(AB15 z-(w_r5ZHzL;&xDc^6~ zVVSZ`eq=h##+tYY-mZ$_QgFLf2shX`-p_e??rjLOrmtEo%@THd4(4wa-NnqJp9NLww)N|$(`ll0x|x!O zoTh$}Cqx!E2cL~J6XbF;{h01hk47QfZ2aP!ZTj62^0h?J7-fh#>C_a@Q^Ix({Ggtx zCS%BChOoXn?0DA)FMXaB3T8sG9|w-$!ua-h8SD}YO~K76kYw|uExrxyXa zCh_HoOmowq{b8pTGVvKE6qQz69qI~Pjf*Uu?)M+=*-jNF*XxGG`tW$`6@2|ZQ{QVZ z&u;YN7cMWQac>VKR_<|6p-6J!-8eelP6SS9F$LkjHX6<1VC_|{UwH9+M?4Ul6P!W) zCPFLW=7mSlLM-w_MdgkmHk4d2NLU_h#36ou9@C^M^EQu%xAC~hsSnQPZ5Fl3B(chf zy!9MY<}9rabUJIP=3ULV$uePRtYea&S#p7$2SeG^Z_u+M#2@81%fnu*bWOtpdATKy zn=E3So`NxPaRWB*s!d00WEU0}(eSj;S{ZM(XJYS{+33YoeVulfY!*LxI2jMV+DIlW0?%S1>l>VpmVM|0-mp z1V;q3-s{0Qhyci#NDIg*`0{~!Kyr{)K<_6AOv|#ZVKy0;S6KHbc+1+U8+mg>v$?204 zH({SkoRt1V7qqe)fRUgNWQcjE=RCzEp?JO@FpyG~7#hVYH|MdsqTH-2xm&4{LPduX zGG695Thv4aGB&PMPjQb$LLR+LOxeOX?=~B55}IiAj2@EX8FaY4AMWlhjxB~g7&KT) zqgmutGCAE#^#4|{B?#|UW|xJ|yB18&jEA<*;40=STpvs$byeicvit~ZZGApl?=t?J z`sg*YJ|s)Wt(%iW!p8u)QPG|E=Mqjn-mLbg)|%N%vSKcCI*Ji+m(&*rW?(m$3Bp~< zoU}(|zOt1p*caW-x}3H>AeA0#ABuGH!-tdPUDN@LKZ~#3$c`P!4DBf_vC* zi99s|0RER3B_m{>8dZGy(n=fE&BI<9ssVsM$<^TX9?D7f$k2jS?lvYY@53*!iuasN z{;0X##V$WSvKhBlp(zD{`{-(o)8S01ruS;4yU^xO?#{9_CKc;nVQceW%%^i#1K#tU za<(`zZXV?+j^b$GNfebKQe-tq*Prtt$XJC0#gigGKJ+k6i{(y4L`39CCqscD@BLh* zA*79kSAUGCxImeA)sS{0?p{4HIazbj!E2_c-DH7zQ`iPJh*IrsMZ_%I%*5 z|3!;$chvqnEq46+|0i0kB0QTgNokZ}CSU!7hj3sOp-J^m8J$i%+!vee)m&-C*U&## zK739rzZu(Jhol9o-ql<0Ef=}IYBdOfdnSDtK(%C# zB!Nsxq`lowe1`0CYFJP(=|GQ)5JVQpsejc+(T-5D5^C)T$h^u4@6ZznCeQ|@R9A)`_*)k>AIq^0H2N~^L>?Ctc|{x|l*3Mr;0Js)qQ zawMYSHKmwYAw%N;ewyunNHFfYB-{FX#kK@(Eo=5 zADKI=WaMO}KyfJQW#;ZjlTh5d`36{+_e>I%2)wZ?lj|?Dt#h_uJ#&K;{+JpYt7~YO zn3yQ2@9v-mTURw2%}2c5+FW%XjUVA-spo;G4bJSG8{T|J4r8s(RlqBFVY&Nj-@8FN zm5;JxIVBK8G3f4YzdV8UMAN+Um}uU%T~rU>EAljX+cA(fJ#r@xdEPM3rOmOdktC;) zsyi>mJZ*G`KXfL&L*Jfj%XH0{j)QaX9vWuq-#LzVxv#{wWgS*s`n3 zF!?$Cbu&RjIqjem*As~+@Y1~01iJRCcsT+8s20ow9^2H19t&QhC6~=O8}4OuY43Q- zbv}mOqxpK z;g9Wy(Zn3~s#?&DJGsULnc=&9rfHR-^4m_U)ndH8xr%9jDtypz;v?GK`di4zmIGZg zwd;1Dhk@B{)}wzR!LHp)3QWZ4{x7Vol>JH8Dr$$We$UQdj#i7FbfJ6g$jGvvo*>f` zbz84Q|HTM+A-)_$Cx+B5LqQ zY}YIF(C{!JA>sRw1zmFEe@Fw)Us#aF^1GuwSbv}k!UEyUgZ^P09sJTve;E%lZUK#- z-Cqia-l$ckgm=^Vnx%)^vu$4-|49B1bK(5JzyNs_9tM>S^d^=lNFHX?Ir6oR?lm0y-8(+Fi(iA=oE#j}WjhNO34a~8kc@a^4WPSN zF}dtgfC`2^B3E?VPZ}eaTzq&i9ajY1F^g41nPplA*|q@I7_xf(zkrL@U`;mF@s8cd z#Kit}+mezJh&|r65(<84u9&xkzzIkjH3Ux7UOLMEr+4&*+t=W@7H4_#}vP= zKOif_2`_Ya;k>Z!`1Sk#G5ne0xghF4G%_KDl=KJNTM~9gy^eC?LTG>=ui2tAjU5pR z$aA@4;Jdba{)Q79!M*&2QE$gEJiRPG;8&xY!FqCn8S8A%6#;SV1KQD~=9hIx zL&85RT62tap9(DaN5`0g$%u^#yHu5}kA6^W!ofJ5j$6q6x#*GwBKE?a4C7LynyZa3 z8qz^Vm5Wp(oHuy<<*3F>W#Ykx8(1S{c<(02NNoFtw7s{LFMcQ;1m+H6{;VTXTc|I@ z!G|YtMYZxT4PAOa9z_*R3dyP*{m$KdfOp=XsD5q0>3zj0T%>cWaoWh5lzrvBGIW*W zpIwWT3~Q$r+LmfbQNPl={Bu8k`o&3sc!VLfVO{Sqz~UOeNC^Hi)bBwvVyI~+NIMlsoZPC}U{p9ktk1t=QH7IBN%$(wCH!gW7X`4J;+Z%MhQAwur z`p{*gY4yJ#3o;?Y6EFg_&s#Y}ibU*?<~g^u+k22Omyh7=GaW1~kGaBGT_)`O!TXX)>F3cLSIC(p{*hNWJ2 z`T66xS1OGx2eU~%pdFehpS<~ht)iJF@6pMb{2^+6Y68W;w!4dTLlxSZ`c~bcyYsHGHw2Y#05V z82l<$_&J08vTP>uEdpqnXQN{o9*v0G^=vI%68@cwsHy4j!lRUecnxRHYl_fJN)FVdf5(2Oo zL2RmPpKfH2?YYar}c+Ntbpb&WQ$?%V-Q0qb81feRckr!;0(XCThP~P9pCF)s&E04eC;F5kf@g68 z)D_Y9$+hPXy)_JYa4GTmCXB-YGIc&xQW{%NfQOSQr9(l9P?iDNLJW{Qb+!z%Pq?P%}UAY`EakmuMxY=(V`7;~%Pu*!4` z=C$X`w=Weohq~E1ml>DMxP&6SsJd&L8Z&yWbmz3D@b_xH!_Ut7l@gnT#+|Xu{i!|l z&oAB4u*U_k4$}ZzP|WF#D|KLqUJIG?QF|B3}$cU zD`m>VwO>D&=&n@I)If2&2Zamm{djLz!&3@Xtef%Th*5j@Bh_7^0f4Y>SxmVqt2A{`h0_oN9^Y6j>OvcAF#wS z+V_29Q!~P8%U~wT5&rt3g;KNiJ9wCTm%X|E*}*U$=so0?79sbLP8ohQxzBmx$``Rf5XmEz8h4+}eUvE=t;VnQ^)ypsQ>0ltbR?h8d-l}20? zCR|IbD#H|X4papdnIl`TZ+!EE2G$7Lm=?RoRkZzvH#a4Wl|_=w=krAoS_bC7UXHmr zv@1_}2@2u#YIAE|E&tr14E^MhAj9o#W0VXqBTof7)7}r+Z-YQFl1)By!pZT2c_*I;#*jvkS?D`C*+N>XYnzV(*>zDA%9JUQH``<~aU)E4qz2}ovg@or)--1Tiu zj_i0;+%$&~?X(SXzz@SQED}TV5?PU4e7w?t#t9xf{4TJ5+$lMl&$P!-Krr9-q~pW)Y*I1*DGH*pv+2y-Q6)p(3&74p@-Up!Zz z!`e^G&&9;`@1~{`9NWvtGJk>ut8tM-yZOqdc8(v8c9* z!=MT2kf&1mjSAT{qH?`#Yl78g@ynu)J$q^VI8@f|`WfS7snFkID^Zh}^R}`JMEy(R zVZ&H-8wt8Vbc+$-e>2|9hbqG7u*Y2o63JT$YBT0y|D2=FL(oxj8KCbXIHQ~A*JP4U zS$NPWm_zD!_1LEq9o6ABv&6p%Hd4peH+Szack~d~$(47dM6sJE~8o5vrP+xR~ zX3v)RiJ+cU_JW3*Lf*gVb&N=!^N2Q09#HEd0G?&NF)vPzr^%--Z3}NLbuMb@+gdZa zrn>4)>QB|lroV($7AC4w$p0s^(hG$Ks6Q)4y+?J1B$&~A%8nlz>JFJrRg5ssbf>1W;bQIf&aqH$DG{3^Vx{MO+L16B`jG;9R1HUkBn4t` zgvf>8QV38p&=n*6K!gK|mO{`HYJCymU^NEp{$^7h1eH5eq_hKt_E}M{1GR4HpW!uB z-c6Kd;}1j50td{QS!9vDnnh%El>xP<=R- zeSfoeXwy!-0L}~hz(ROCkqtS{Bg^Yw$Ly#e2O4>|!||BS`NfA51e%Ezim^j~%~*2Q ztvQbQz|9d8f!UGFmC5+ffCl`rOkWpfVjDK3w|)^4A8m}xj3rVzc}cbc(a#8uho*>0 zD3e2J#EfC#f1yK3Wt`E%r+b)xgiiT*^iuIMoS=%3ZCw0kt{nGEqjED#ROIzdZ+9YHP=i)Y>)Nn6T)5G?&Y}mz{mAjG( zqX`(W=@2yVQ$%%pH$FXYwlG^Y8L!PINb^>wVd(b!YV4fLnkpzJAPQ-9Y3>WH(RYK( zG&+!}gKJR9rw@qyIBlF&ytIjASnv8-jB(h)MB3w^fLI5z2d#0J7rGEeE38J9o418% zhS4|xi6p1fUW?ppch>(XXg&s%f1OtOUXp;ip{$InKC_lo`)dMner{8RGk+rxpwQC# zeMf%$(yHs((+hIrx_Z8^yPIzOIGVfglJOMD%I`pUvzQ2h5vg3>FB7%N@6gKdCdb2m zpl6BU2$R{CD4~;mf6-nlmK7&AA}UYZ#VU@8njdJL2Z?cr!wM)g|wJd+TAkSoKUu>>j2>C!$tWf&OQcVd$1yH$=P_>wcAd!q3xQUz@8CP$fQ2W@o2d(iTF5E3-AWYb(*R&^FGF z{CKZKEP2XwjTTxC+j*FWQlu>UV5%GIVJINxxkxTdO5!=4noC#U0~s;kS3ucMFfIKLJu9Jg(?B0i-4g=kRn3pp-7Wnr6p7e z2+|2%Itg!_dFS3Q@11+^{=CnbIWv3KKF_RYJ-;o94VT$RfRN4l^)0a`yWZ#Zd$fuj zi^NYR@(ot2)3CsgODoLWrA4fI*vHHM2e+q=p7c9xdcda5kQ~yS;J&{+Jv{ z4X3P*|M4=HJJqX~3LtvOS{p>*d;W@R=LglJ#>S}sU__+>0G7yO`8BYezH3dlqCBoH zYKN*t6{K5||GyXT7{RZagf~(2BfjgXseFvt4Y?2U<&HcUHf_KvlR)r8QF+NDcB-6Q z^%XM6Eq%82qxqB4eupWvAbcylp=#RuDd@4bM#^1>ltTJ^`Aieqfg+vTD=Ky(yw#kG zU9HKil0*1{lOd<}f%*zapV|cKR^<4~>-7q`C&+Na|1!Alj`n$N>Sp#ti$xjiGF}=VgY^!alzv=(^LegMj{zI$ z!Xr47#D-B_`9Zv;bW&kgD)z0!eo$~i$y$7G+e&g*PPy5J$HxrnYqlg9@QSt#v)!0h z-$G&u8f#1M(V;GDjZv>r2B4`Couz4yMW;IknU?H&E#59`AK7xx)HuMAL|8D5w4|uN z1(6XWZXx4KlNeGxFhWb}Q}R3xn&(x*&4ww46VnUZ+@qZ*zIKOLPWZEl97B%)O zMKL)Rj!GxX^CcPA-!(X3846+SGt#Zp6j7(i5xA}mycCg)Hka1S(~m0APGscA%?btK zO1B*ONeIAG41tB->9F%|mGaLP!svja=ogD?6Ub@0Yzz@{%ly3Lh}R! zyu=L>x2fOlTIV&?U5(E|T8dZE)!uj`5J(PU>ahqoi@hL#qFa~C$GaaOuolx|O=WtI zFBDX|<2h#9e>Uk{1QY6w-{t-R=u5yK@FFwn0zemjk}|4UzMrtRwvL`0*=D2jr;Q{P z6L3!ztJ1=3|F#3h4E-X*&({5uq1&GWC-jTrF@;n9J!T3n#5_s{lfH$Lgp7;*N+2rlayQ=J?eQAnq~C`V44z$%JeswDciN+lE(6%uWCI0j zXJ&y%``z=q)XPVn0;EbocV}7OR_}_#vFH1cNRpig4AxX7l~ZN#fCoWer940%I`F*6 zASFUti-xQE>7x`)Dg-_E!MZ}wR&`)#hVD%IZQS^2BcgJ>8}e+5cJCS z^(K=GO+p4#LZ}lL4ZyI}q+MKZLAJsu6r_ z94E=@x4Sf}tNz}p~k7tu9{G7xKOr z*uYJhk4S|*7Fe2m5-?VgxW8?w@$G%G@1d7h$J+!sUYDt}&R@%&+ge~SSXEWk`5_%Z zBs(wfzNO`y-KGr>P>0tL1nNnbfMo|o=k|HL_nYcZAhZOPy{pp_5Em;v_l`TJb<(9# zdwd<3Ho$B+W-U&iGkoZ-^87mi@tO0oC?c07XP{NdU^?Hsol%*F;j0|wncqK$>_ET4 z+zy+ZKs#49>DL;P3|t6Ac=*8@HgZlBpzlg%0aiSMHQ)`MI4hGt_e)Dl$A1p%%+Ulo z+u~~H^fqKVJ`-XTGTfci!M3QAGB~|*eeEBJ%PW!)5{4FzGuzM7y6r`d@?fr+_;e|4 zw0G)cZE944<71%L6WcQ#osQ!zJ(~2iG#=OR=C7G5(^y^OIIj-EQ;d8f-OZOk(F4R^ zd&`DdOnW=d^p>6Gm9a1UUi|bzhA&XrM3KN8$bBf^vY^JcH0enSJx+{>vqIzI9aBDc z!}d$5(7MNts{3X?a=O|LXyuo;5;*(2Vu$PM;59DznX#t+$Q9zpyWqRa5!ZqNp9Gk? zO=VjG+9PiC_V$|Bx-2Pj!+Lsg>-_c8Q_sORd*xj-PUvg{DQSKLW|pzCy}X3qG9$}e zwclt^0DS+-FpIChKkxnf$7g4XF!eK(KS(RNltUACqa$8;EB-JTpH8cFjd{@JYQqpU zfQ_SsTz~&vp08b^APoI&7-7COUUE#x<*RtezBwP$;3azb=?7X!qXK?poO) zD)3Pg*xqc5D7&oP!vDFdcAH-?!=;MgFF`CRH#4+Aoo^-!A)RLu-TZ*Q3YNOa zUhQ}bIR{_2xn|cJsA{k6;5T=;z~Rrpq>}^eGNHwU@tkl$E0TSh>$p%7aHZPJGWMMDdI$hH7p3{Aj;V^#$5avK3rLuh6_wev-s&b$1&cIF`TrY zAj-zeFC#_APt&fw`KD~Rq!=)(i;a;iU^y>n!LZS@w*cDJ8;cTukN1 zj`m8*>o0nFnb{c(&GxoFx2%-sSJlG>6n5!&x=E}TL8}vfL|gQ!fBT*TUTnOX*xD>^ z&5P4pp3~)Qc6fgro5nP5V08Lv5`Mfm@`}41gD~{zqm8VX?ijn-hRpJ0IX~H2^&u|C zi3w|D5mF4}v(sNh!0E)u#USHNfO@0XVtJ{D<|^lt@e9yE4wC`}6pa$yA!aq$HlE_b zP|tEZK8Eiux#fPwe4(bqLb81Mn`Ah#W*{6HSF&9&cuc6Qdos}$1}ockh26gT43Y6^|Ce<@n8 z7q@5=NVQR@68sy2W(^K+%vFuQ@he>QF_X&6nEX+J2&k!Y-<*2C+luWxEUBHcv#7y# z;9V2;-H0Si?^J?}6?!Z}HL3Xms|0)YTK+xrm9ZQFdyG3G$z=2W${ zjTa73sD8z>pi~BheHpyAnVp)sH8ZiHSIx#gt)cx#qoH<|ge%C>s?l!TP5#UhMS-bfVi6@YsD-nxegF9 zbIt4u`^OxnnkY!}D}4KGMguPi83U)DeP?!|;sPaZhWf|v20@1fD|63eC%R_&bw)8NE9F72@_mA=k~OI#-UW7{!NmAlp!)%?p88@!O5_qo1(JTvv$CqPvQmsx z2jA@}=;3N_NgM)8%7gmWbhl^H*kxSGk!A-S{lxCr7k4H}O(}TB4B{jHhmx)e{tcs& zl>X8>Mkc&VfKD&yr`*4oQ(wy+vvW`gUUQ zrd)|lFGzB4^?QeR4y-*5%9bYV&5*s)v!Qcc*p%!HK%9$wv9g5QwRN0Ui zqso@2K1KN{_7tyF7H4&fT&+K_T8WqU_hQ{~7%#`5TCiSoI7vkXgJuQ>Z#Kz`O~qyJ z5ii1My&bkLk39C>t)Z%9baDQN^cx=iLzZ;kCHTz#1(q_WImm*HU0%2?wLhO4cd`1Q z_#lI3CqSPzlpI$WAS+w4^c%{ayZj?&+D*J2cHHP33E{0R_6UpgxSrm8hz)g_=KNfs zN_;=EDuyLVOrjbbVPDPvNK>iK^V!gFeQYZkxD+~S%Aa$EtEAW7szdpk0Z-dHhPA$9 zL`?|oYpEIY3R#53UPVSvBvB-bvc>%qHt#pIkXGIWc8 zqJQIXrF1%dJ=W+X@0l!*r*~4y#@^yT>`UwnW?#LF*M%Kojh1+gryQCABA3hvs|3dj zIFGovR0{q7AekIP;7foZ@=T32OP9Co7cFt;3udmrve7=hG$uh%&gWQ<>fd_qKkBY2 zg@#qK({f~FB)HWG03flEapn>m_B=odwzy}XH8eH9POSm}BA?#Br%(CHOU3SN#Pkm! lr07QCo_*XWkGPR@%G7jKOUdTne*x?9#Ag5i literal 0 HcmV?d00001 diff --git a/docs/user/how-to/capture-hdf.rst b/docs/user/how-to/capture-hdf.rst new file mode 100644 index 00000000..e966340e --- /dev/null +++ b/docs/user/how-to/capture-hdf.rst @@ -0,0 +1,38 @@ + +Capture data +============ + +The ``:DATA`` PVs are used to capture data from the panda. +These can be viewed from the DATA screen. + +.. image:: /images/data_bobfile.png + :alt: The data screen + :align: center + + +* The file directory and name are chosen with ``:DATA:HDFDirectory`` and ``:DATA:HDFFileName``. +* ``:DATA:NumCapture`` is the number of frames to capture in the file. +* ``:DATA:NumCaptured`` is the number of frames written to file. +* ``:DATA:NumReceived`` is the number of frames received from the panda. +* ``:DATA:FlushPeriod`` is the frequency that the data is flushed into frames in the client. +* ``:DATA:Capture`` will begin capturing data. +* ``:DATA:CaptureMode`` is one of the three capture modes listed below. + + +First N mode +------------ + +Begin capturing data and writing it to file as soon as it is received. Stop capturing once ``NumCapture`` +frames have been written or the panda has been disarmed. + + +Last N mode +----------- + +Begin capturing data in a buffer, once capturing has finished write the last ``NumCapture`` frames to disk. + + +Forever mode +------------ + +Keep capturing and writing frames. Once the panda has been disarmed wait for it to be armed again and continue writing. \ No newline at end of file diff --git a/docs/user/index.rst b/docs/user/index.rst index 2c94a0c0..31da93ae 100644 --- a/docs/user/index.rst +++ b/docs/user/index.rst @@ -26,6 +26,7 @@ side-bar. :maxdepth: 1 how-to/run-container + how-to/capture-hdf +++ From 9831d071711f86a1ec9a96c00c6d34a06e5d3df0 Mon Sep 17 00:00:00 2001 From: Eva Lott Date: Tue, 23 Jan 2024 09:40:12 +0000 Subject: [PATCH 12/12] Cleaned up after rebase --- pyproject.toml | 2 +- src/pandablocks_ioc/_pvi.py | 12 ++--- tests/test-bobfiles/DATA.bob | 85 +++++------------------------------ tests/test-bobfiles/index.bob | 4 +- tests/test_tables.py | 6 +-- 5 files changed, 24 insertions(+), 85 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 576799c3..c44d9067 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,7 @@ dependencies = [ "click", "h5py", "softioc>=4.4.0", - "pandablocks>=0.5.3", + "pandablocks~=0.7.0", "pvi~=0.7.0", ] # Add project dependencies here, e.g. ["click", "numpy"] dynamic = ["version"] diff --git a/src/pandablocks_ioc/_pvi.py b/src/pandablocks_ioc/_pvi.py index 109fcb38..7ddf8bab 100644 --- a/src/pandablocks_ioc/_pvi.py +++ b/src/pandablocks_ioc/_pvi.py @@ -81,9 +81,9 @@ def add_data_capture_pvi_info( data_capture_pvi_record: RecordWrapper, ): component = SignalRW( - data_capture_record_name, - data_capture_record_name, - widget=ButtonPanel(actions=dict(Start=1, Stop=0)), + name=epics_to_pvi_name(data_capture_record_name), + pv=data_capture_record_name, + widget=ButtonPanel(actions=dict(Start="1", Stop="0")), read_widget=LED(), ) add_pvi_info_to_record(data_capture_pvi_record, data_capture_record_name, "rw") @@ -95,9 +95,9 @@ def add_data_capture_pvi_info( def add_pcap_arm_pvi_info(group: PviGroup, pcap_arm_pvi_record: RecordWrapper): pcap_arm_record_name = EpicsName("PCAP:ARM") component = SignalRW( - pcap_arm_record_name, - pcap_arm_record_name, - widget=ButtonPanel(actions=dict(Arm=1, Disarm=0)), + name=epics_to_pvi_name(pcap_arm_record_name), + pv=pcap_arm_record_name, + widget=ButtonPanel(actions=dict(Arm="1", Disarm="0")), read_widget=LED(), ) add_pvi_info_to_record(pcap_arm_pvi_record, pcap_arm_record_name, "rw") diff --git a/tests/test-bobfiles/DATA.bob b/tests/test-bobfiles/DATA.bob index 505e927c..1cabfce4 100644 --- a/tests/test-bobfiles/DATA.bob +++ b/tests/test-bobfiles/DATA.bob @@ -34,7 +34,7 @@ true Label - Filepath + Hdfdirectory 0 0 250 @@ -52,7 +52,7 @@ Label - Filename + Hdffilename 0 25 250 @@ -70,71 +70,12 @@ Label - Numcapture + Hdffullfilepath 0 50 250 20 - - TextEntry - TEST_PREFIX:HDF5:NumCapture - 255 - 50 - 125 - 20 - 1 - - - Label - Flushperiod - 0 - 75 - 250 - 20 - - - TextEntry - TEST_PREFIX:HDF5:FlushPeriod - 255 - 75 - 125 - 20 - 1 - - - Label - Capture - 0 - 100 - 250 - 20 - - - TextEntry - TEST_PREFIX:HDF5:Capture - 255 - 100 - 125 - 20 - 1 - - - - OUTPUTS - 5 - 191 - 416 - 81 - true - - Label - Status - 0 - 0 - 250 - 20 - TextUpdate TEST_PREFIX:DATA:HDFFullFilePath @@ -147,7 +88,6 @@ 1 - 6 @@ -159,7 +99,7 @@ true Label - DATA: Num Capture + Numcapture 0 0 250 @@ -176,7 +116,7 @@ Label - DATA: Num Captured + Numcaptured 0 25 250 @@ -197,7 +137,7 @@ Label - DATA: Num Received + Numreceived 0 50 250 @@ -218,7 +158,7 @@ Label - DATA: Flush Period + Flushperiod 0 75 250 @@ -235,7 +175,7 @@ Label - DATA: Capture + Capture 0 100 250 @@ -277,7 +217,7 @@ LED - TEST_PREFIX: + TEST_PREFIX:DATA:Capture 350 100 20 @@ -285,7 +225,7 @@ Label - DATA: Capture Mode + Capturemode 0 125 250 @@ -311,7 +251,7 @@ OpenDisplay - PandA_POSITIONS_TABLE.bob + PandA_PositionsTable.bob tab Open Display @@ -333,7 +273,7 @@ true Label - DATA: Status + Status 0 0 250 @@ -351,7 +291,6 @@ 1 - 6 diff --git a/tests/test-bobfiles/index.bob b/tests/test-bobfiles/index.bob index 8fe3ed31..7f9a10ae 100644 --- a/tests/test-bobfiles/index.bob +++ b/tests/test-bobfiles/index.bob @@ -51,7 +51,7 @@ Label - HDF5 + DATA 23 55 250 @@ -66,7 +66,7 @@ Open Display - HDF5 + DATA 278 55 125 diff --git a/tests/test_tables.py b/tests/test_tables.py index adf105c7..b1fac61f 100644 --- a/tests/test_tables.py +++ b/tests/test_tables.py @@ -363,9 +363,9 @@ async def test_table_updater_update_mode_submit_exception_data_error( assert isinstance(table_updater.client.send, AsyncMock) table_updater.client.send.side_effect = Exception("Mocked exception") - table_updater.all_values_dict[ - EpicsName(EPICS_FORMAT_TABLE_NAME) - ] = InErrorException("Mocked in error exception") + table_updater.all_values_dict[EpicsName(EPICS_FORMAT_TABLE_NAME)] = ( + InErrorException("Mocked in error exception") + ) await table_updater.update_mode(TableModeEnum.SUBMIT.value)