From 7f1278b613b123dbfba4e7ca95fb111ecc9cb085 Mon Sep 17 00:00:00 2001 From: Rose Yemelyanova Date: Tue, 3 Oct 2023 17:22:00 +0100 Subject: [PATCH] init --- .vscode/settings.json | 18 +++++ src/blueapi/core/bluesky_types.py | 5 -- src/blueapi/core/device_walk.py | 17 ----- src/blueapi/plugins/data_writing_server.py | 76 ---------------------- src/blueapi/preprocessors/bundle_stage.py | 0 tests/plugins/test_data_writing.py | 42 ++++++------ 6 files changed, 39 insertions(+), 119 deletions(-) delete mode 100644 src/blueapi/core/device_walk.py delete mode 100644 src/blueapi/plugins/data_writing_server.py create mode 100644 src/blueapi/preprocessors/bundle_stage.py diff --git a/.vscode/settings.json b/.vscode/settings.json index 7eac220fe..05f314594 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -14,4 +14,22 @@ }, "esbonio.server.enabled": true, "esbonio.sphinx.confDir": "", + "cloudcode.kubeconfigs": [ + { + "name": "/home/rose/.kube/config_k8s-p38", + "configPath": "/home/rose/.kube/config_k8s-p38" + }, + ], + "cloudcode.active-kubeconfig": "/home/rose/.kube/config_k8s-p38", + "cloudcode.apigee.dockerOptions": { + "environmentVariables": { + "XTERM": "xterm-256color" + }, + "dns": "8.8.8.8", + "detached": true, + "privileged": true, + "labels": {}, + "volumes": {}, + "additionalArguments": "" + }, } diff --git a/src/blueapi/core/bluesky_types.py b/src/blueapi/core/bluesky_types.py index c9b285a36..d0e315f0a 100644 --- a/src/blueapi/core/bluesky_types.py +++ b/src/blueapi/core/bluesky_types.py @@ -132,8 +132,3 @@ def watch(self, __func: Callable) -> None: """ ... - - -@runtime_checkable -class HasComponents(Protocol): - component_names: List[str] diff --git a/src/blueapi/core/device_walk.py b/src/blueapi/core/device_walk.py deleted file mode 100644 index c2d3e65d8..000000000 --- a/src/blueapi/core/device_walk.py +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Iterable, Tuple - -from blueapi.core import Device, HasComponents - -DeviceTreeNode = Tuple[Device] - - -def walk_devices(devices: Iterable[Device]) -> Iterable: - for device in devices: - yield from walk_devices(get_components(device)) - yield device - - -def get_components(device: Device): - if isinstance(device, HasComponents): - for name in device.component_names: - yield getattr(device, name) diff --git a/src/blueapi/plugins/data_writing_server.py b/src/blueapi/plugins/data_writing_server.py deleted file mode 100644 index 3e5e40d98..000000000 --- a/src/blueapi/plugins/data_writing_server.py +++ /dev/null @@ -1,76 +0,0 @@ -import itertools -import os -from pathlib import Path -from typing import Dict - -from fastapi import FastAPI -from pydantic import BaseModel - -app = FastAPI() - -SCAN_NUMBER = itertools.count() - - -class DataCollection(BaseModel): - collection_number: int - group: str - raw_data_files_root: Path - nexus_file_path: Path - - -class DataCollectionSetupResult(BaseModel): - collection: DataCollection - directories_created: bool - - -COLLECTIONS: Dict[str, Dict[int, DataCollection]] = {} - - -@app.post("/collection/{group}") -def create_collection(group: str) -> DataCollectionSetupResult: - num = next(SCAN_NUMBER) - if group not in COLLECTIONS: - COLLECTIONS[group] = {} - if num in COLLECTIONS[group]: - raise Exception("Collection already exists") - - data_root = get_data_root(group) - raw_data_files_root = data_root / f"{group}-{num}" - nexus_file_path = data_root / f"{group}-{num}.nxs" - - collection = DataCollection( - collection_number=num, - group=group, - raw_data_files_root=raw_data_files_root, - nexus_file_path=nexus_file_path, - ) - - ensure_directories(collection) - COLLECTIONS[group][num] = collection - - return DataCollectionSetupResult( - collection=collection, - directories_created=True, - ) - - -def ensure_directories(collection: DataCollection) -> None: - root = collection.raw_data_files_root - os.makedirs(root, exist_ok=True) - if not (root.exists() or root.is_dir()): - raise Exception("Unable to make data directory") - - -def get_data_root(group: str) -> Path: - return Path(f"/tmp/data/{group}") - - -@app.get("/collection/{group}/{number}") -def get_collection(group: str, number: int) -> DataCollection: - return COLLECTIONS[group][number] - - -if __name__ == "__main__": - import uvicorn - - uvicorn.run(app, port=8089) diff --git a/src/blueapi/preprocessors/bundle_stage.py b/src/blueapi/preprocessors/bundle_stage.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/plugins/test_data_writing.py b/tests/plugins/test_data_writing.py index d827ecdd9..62ce20316 100644 --- a/tests/plugins/test_data_writing.py +++ b/tests/plugins/test_data_writing.py @@ -14,7 +14,7 @@ from blueapi.core import DataEvent, MsgGenerator from blueapi.plugins.data_writing import ( - DATA_COLLECTION_NUMBER, + DATA_SESSION, DataCollectionProvider, InMemoryDataCollectionProvider, data_writing_wrapper, @@ -79,15 +79,15 @@ def stageless_count() -> MsgGenerator: return (yield from bps.one_shot(detectors)) def inner_plan() -> MsgGenerator: - yield from run_wrapper(stageless_count(), md={DATA_COLLECTION_NUMBER: 1}) - yield from run_wrapper(stageless_count(), md={DATA_COLLECTION_NUMBER: 1}) - yield from run_wrapper(stageless_count(), md={DATA_COLLECTION_NUMBER: 2}) - yield from run_wrapper(stageless_count(), md={DATA_COLLECTION_NUMBER: 2}) + yield from run_wrapper(stageless_count(), md={DATA_SESSION: 1}) + yield from run_wrapper(stageless_count(), md={DATA_SESSION: 1}) + yield from run_wrapper(stageless_count(), md={DATA_SESSION: 2}) + yield from run_wrapper(stageless_count(), md={DATA_SESSION: 2}) yield from stage_wrapper(inner_plan(), detectors) -@run_decorator(md={DATA_COLLECTION_NUMBER: 12345}) +@run_decorator(md={DATA_SESSION: 12345}) @set_run_key_decorator("outer") def nested_run_with_metadata(detectors: List[FakeFileWritingDetector]) -> MsgGenerator: yield from set_run_key_wrapper(bp.count(detectors), "inner") @@ -114,7 +114,7 @@ def test_simple_run_gets_scan_number( provider, ) assert docs[0].name == "start" - assert docs[0].doc[DATA_COLLECTION_NUMBER] == 0 + assert docs[0].doc[DATA_SESSION] == 0 assert_all_detectors_used_collection_numbers(docs, detectors, [0]) @@ -132,8 +132,8 @@ def test_multi_run_gets_scan_numbers( ) start_docs = find_start_docs(docs) assert len(start_docs) == 2 - assert start_docs[0].doc[DATA_COLLECTION_NUMBER] == 0 - assert start_docs[1].doc[DATA_COLLECTION_NUMBER] == 1 + assert start_docs[0].doc[DATA_SESSION] == 0 + assert start_docs[1].doc[DATA_SESSION] == 1 assert_all_detectors_used_collection_numbers(docs, detectors, [0, 1]) @@ -149,8 +149,8 @@ def test_multi_run_single_stage( ) start_docs = find_start_docs(docs) assert len(start_docs) == 2 - assert start_docs[0].doc[DATA_COLLECTION_NUMBER] == 0 - assert start_docs[1].doc[DATA_COLLECTION_NUMBER] == 0 + assert start_docs[0].doc[DATA_SESSION] == 0 + assert start_docs[1].doc[DATA_SESSION] == 0 assert_all_detectors_used_collection_numbers(docs, detectors, [0, 0]) @@ -166,10 +166,10 @@ def test_multi_run_single_stage_multi_group( ) start_docs = find_start_docs(docs) assert len(start_docs) == 4 - assert start_docs[0].doc[DATA_COLLECTION_NUMBER] == 0 - assert start_docs[1].doc[DATA_COLLECTION_NUMBER] == 0 - assert start_docs[2].doc[DATA_COLLECTION_NUMBER] == 0 - assert start_docs[3].doc[DATA_COLLECTION_NUMBER] == 0 + assert start_docs[0].doc[DATA_SESSION] == 0 + assert start_docs[1].doc[DATA_SESSION] == 0 + assert start_docs[2].doc[DATA_SESSION] == 0 + assert start_docs[3].doc[DATA_SESSION] == 0 assert_all_detectors_used_collection_numbers(docs, detectors, [0, 0, 0, 0]) @@ -185,9 +185,9 @@ def test_nested_run_with_metadata( ) start_docs = find_start_docs(docs) assert len(start_docs) == 3 - assert start_docs[0].doc[DATA_COLLECTION_NUMBER] == 0 - assert start_docs[1].doc[DATA_COLLECTION_NUMBER] == 1 - assert start_docs[2].doc[DATA_COLLECTION_NUMBER] == 2 + assert start_docs[0].doc[DATA_SESSION] == 0 + assert start_docs[1].doc[DATA_SESSION] == 1 + assert start_docs[2].doc[DATA_SESSION] == 2 assert_all_detectors_used_collection_numbers(docs, detectors, [1, 2]) @@ -203,9 +203,9 @@ def test_nested_run_without_metadata( ) start_docs = find_start_docs(docs) assert len(start_docs) == 3 - assert start_docs[0].doc[DATA_COLLECTION_NUMBER] == 0 - assert start_docs[1].doc[DATA_COLLECTION_NUMBER] == 1 - assert start_docs[2].doc[DATA_COLLECTION_NUMBER] == 2 + assert start_docs[0].doc[DATA_SESSION] == 0 + assert start_docs[1].doc[DATA_SESSION] == 1 + assert start_docs[2].doc[DATA_SESSION] == 2 assert_all_detectors_used_collection_numbers(docs, detectors, [1, 2])