Skip to content

Commit

Permalink
Ruff and Pyrite linting changes
Browse files Browse the repository at this point in the history
  • Loading branch information
DiamondJoseph committed Apr 17, 2024
1 parent 6b554f2 commit e2f8317
Show file tree
Hide file tree
Showing 21 changed files with 70 additions and 83 deletions.
8 changes: 3 additions & 5 deletions src/ophyd_async/core/sim_signal_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def descriptor(self, source: str, value) -> Descriptor:
type(value) in primitive_dtypes
), f"invalid converter for value of type {type(value)}"
dtype = primitive_dtypes[type(value)]
return dict(source=source, dtype=dtype, shape=[])
return {"source": source, "dtype": dtype, "shape": []}

def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
if datatype is None:
Expand All @@ -52,7 +52,7 @@ def make_initial_value(self, datatype: Optional[Type[T]]) -> T:

class SimArrayConverter(SimConverter):
def descriptor(self, source: str, value) -> Descriptor:
return dict(source=source, dtype="array", shape=[len(value)])
return {"source": source, "dtype": "array", "shape": [len(value)]}

def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
if datatype is None:
Expand All @@ -76,9 +76,7 @@ def write_value(self, value: Union[Enum, str]) -> Enum:

def descriptor(self, source: str, value) -> Descriptor:
choices = [e.value for e in self.enum_class]
return dict(
source=source, dtype="string", shape=[], choices=choices
) # type: ignore
return {"source": source, "dtype": "string", "shape": [], "choices": choices} # type: ignore

def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
if datatype is None:
Expand Down
2 changes: 1 addition & 1 deletion src/ophyd_async/core/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def get_unique(values: Dict[str, T], types: str) -> T:


async def merge_gathered_dicts(
coros: Iterable[Awaitable[Dict[str, T]]]
coros: Iterable[Awaitable[Dict[str, T]]],
) -> Dict[str, T]:
"""Merge dictionaries produced by a sequence of coroutines.
Expand Down
16 changes: 8 additions & 8 deletions src/ophyd_async/epics/_backend/_aioca.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,14 +52,14 @@ def value(self, value: AugmentedValue):
return value

def reading(self, value: AugmentedValue):
return dict(
value=self.value(value),
timestamp=value.timestamp,
alarm_severity=-1 if value.severity > 2 else value.severity,
)
return {
"value": self.value(value),
"timestamp": value.timestamp,
"alarm_severity": -1 if value.severity > 2 else value.severity,
}

def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
return dict(source=source, dtype=dbr_to_dtype[value.datatype], shape=[])
return {"source": source, "dtype": dbr_to_dtype[value.datatype], "shape": []}


class CaLongStrConverter(CaConverter):
Expand All @@ -74,7 +74,7 @@ def write_value(self, value: str):

class CaArrayConverter(CaConverter):
def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
return dict(source=source, dtype="array", shape=[len(value)])
return {"source": source, "dtype": "array", "shape": [len(value)]}


@dataclass
Expand All @@ -92,7 +92,7 @@ def value(self, value: AugmentedValue):

def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
choices = [e.value for e in self.enum_class]
return dict(source=source, dtype="string", shape=[], choices=choices)
return {"source": source, "dtype": "string", "shape": [], "choices": choices}


class DisconnectedCaConverter(CaConverter):
Expand Down
26 changes: 13 additions & 13 deletions src/ophyd_async/epics/_backend/_p4p.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,15 +49,15 @@ def value(self, value):
def reading(self, value):
ts = value["timeStamp"]
sv = value["alarm"]["severity"]
return dict(
value=self.value(value),
timestamp=ts["secondsPastEpoch"] + ts["nanoseconds"] * 1e-9,
alarm_severity=-1 if sv > 2 else sv,
)
return {
"value": self.value(value),
"timestamp": ts["secondsPastEpoch"] + ts["nanoseconds"] * 1e-9,
"alarm_severity": -1 if sv > 2 else sv,
}

def descriptor(self, source: str, value) -> Descriptor:
dtype = specifier_to_dtype[value.type().aspy("value")]
return dict(source=source, dtype=dtype, shape=[])
return {"source": source, "dtype": dtype, "shape": []}

def metadata_fields(self) -> List[str]:
"""
Expand All @@ -74,7 +74,7 @@ def value_fields(self) -> List[str]:

class PvaArrayConverter(PvaConverter):
def descriptor(self, source: str, value) -> Descriptor:
return dict(source=source, dtype="array", shape=[len(value["value"])])
return {"source": source, "dtype": "array", "shape": [len(value["value"])]}


class PvaNDArrayConverter(PvaConverter):
Expand All @@ -98,7 +98,7 @@ def value(self, value):

def descriptor(self, source: str, value) -> Descriptor:
dims = self._get_dimensions(value)
return dict(source=source, dtype="array", shape=dims)
return {"source": source, "dtype": "array", "shape": dims}

def write_value(self, value):
# No clear use-case for writing directly to an NDArray, and some
Expand All @@ -122,15 +122,15 @@ def value(self, value):

def descriptor(self, source: str, value) -> Descriptor:
choices = [e.value for e in self.enum_class]
return dict(source=source, dtype="string", shape=[], choices=choices)
return {"source": source, "dtype": "string", "shape": [], "choices": choices}


class PvaEnumBoolConverter(PvaConverter):
def value(self, value):
return value["value"]["index"]

def descriptor(self, source: str, value) -> Descriptor:
return dict(source=source, dtype="integer", shape=[])
return {"source": source, "dtype": "integer", "shape": []}


class PvaTableConverter(PvaConverter):
Expand All @@ -139,15 +139,15 @@ def value(self, value):

def descriptor(self, source: str, value) -> Descriptor:
# This is wrong, but defer until we know how to actually describe a table
return dict(source=source, dtype="object", shape=[]) # type: ignore
return {"source": source, "dtype": "object", "shape": []} # type: ignore


class PvaDictConverter(PvaConverter):
def reading(self, value):
ts = time.time()
value = value.todict()
# Alarm severity is vacuously 0 for a table
return dict(value=value, timestamp=ts, alarm_severity=0)
return {"value": value, "timestamp": ts, "alarm_severity": 0}

def value(self, value: Value):
return value.todict()
Expand Down Expand Up @@ -279,7 +279,7 @@ async def put(self, value: Optional[T], wait=True, timeout=None):
write_value = self.initial_values[self.write_pv]
else:
write_value = self.converter.write_value(value)
coro = self.ctxt.put(self.write_pv, dict(value=write_value), wait=wait)
coro = self.ctxt.put(self.write_pv, {"value": write_value}, wait=wait)
try:
await asyncio.wait_for(coro, timeout)
except asyncio.TimeoutError as exc:
Expand Down
4 changes: 1 addition & 3 deletions src/ophyd_async/epics/_backend/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,4 @@ def get_supported_enum_class(
choices = tuple(v.value for v in datatype)
if set(choices).difference(pv_choices):
raise TypeError(f"{pv} has choices {pv_choices}: not all in {choices}")
return Enum(
"GeneratedChoices", {x or "_": x for x in pv_choices}, type=str
) # type: ignore
return Enum("GeneratedChoices", {x or "_": x for x in pv_choices}, type=str) # type: ignore
8 changes: 4 additions & 4 deletions src/ophyd_async/epics/areadetector/writers/_hdffile.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,10 @@ def stream_resources(self) -> Iterator[StreamResource]:
def stream_data(self, indices_written: int) -> Iterator[StreamDatum]:
# Indices are relative to resource
if indices_written > self._last_emitted:
indices = dict(
start=self._last_emitted,
stop=indices_written,
)
indices = {
"start": self._last_emitted,
"stop": indices_written,
}
self._last_emitted = indices_written
for bundle in self._bundles:
yield bundle.compose_stream_datum(indices)
Expand Down
7 changes: 3 additions & 4 deletions src/ophyd_async/epics/pvi/pvi.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,8 @@ def _verify_common_blocks(entry: PVIEntry, common_device: Type[Device]):
_verify_common_blocks(sub_sub_entry, sub_device) # type: ignore
else:
_verify_common_blocks(
entry.sub_entries[sub_name], sub_device # type: ignore
entry.sub_entries[sub_name],
sub_device, # type: ignore
)


Expand Down Expand Up @@ -238,9 +239,7 @@ async def _get_pvi_entries(entry: PVIEntry, timeout=DEFAULT_TIMEOUT):
sub_number_split = 1 if sub_number_split is None else sub_number_split
if sub_name_split not in entry.sub_entries:
entry.sub_entries[sub_name_split] = {}
entry.sub_entries[sub_name_split][
sub_number_split
] = sub_entry # type: ignore
entry.sub_entries[sub_name_split][sub_number_split] = sub_entry # type: ignore
else:
entry.sub_entries[sub_name] = sub_entry

Expand Down
1 change: 0 additions & 1 deletion src/ophyd_async/panda/trigger.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ class SeqTableInfo:


class StaticSeqTableTriggerLogic(TriggerLogic[SeqTableInfo]):

def __init__(self, seq: SeqBlock) -> None:
self.seq = seq

Expand Down
4 changes: 1 addition & 3 deletions src/ophyd_async/panda/writers/hdf_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,8 @@ class CaptureSignalWrapper:
# This should return a dictionary which contains a dict, containing the Capture
# signal object, and the value of that signal
async def get_signals_marked_for_capture(
capture_signals: Dict[str, SignalR]
capture_signals: Dict[str, SignalR],
) -> Dict[str, CaptureSignalWrapper]:

# Read signals to see if they should be captured
do_read = [signal.get_value() for signal in capture_signals.values()]

Expand All @@ -79,7 +78,6 @@ async def get_signals_marked_for_capture(
for signal_path, signal_object, signal_value in zip(
capture_signals.keys(), capture_signals.values(), signal_values
):

signal_path = signal_path.replace("_capture", "")
if (signal_value.value in iter(Capture)) and (signal_value.value != Capture.No):
signals_to_capture[signal_path] = CaptureSignalWrapper(
Expand Down
8 changes: 4 additions & 4 deletions src/ophyd_async/panda/writers/panda_hdf_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,10 @@ def stream_resources(self) -> Iterator[StreamResource]:
def stream_data(self, indices_written: int) -> Iterator[StreamDatum]:
# Indices are relative to resource
if indices_written > self._last_emitted:
indices = dict(
start=self._last_emitted,
stop=indices_written,
)
indices = {
"start": self._last_emitted,
"stop": indices_written,
}
self._last_emitted = indices_written
for bundle in self._bundles:
yield bundle.compose_stream_datum(indices)
1 change: 0 additions & 1 deletion src/ophyd_async/planstubs/prepare_trigger_and_dets.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ def prepare_static_seq_table_flyer_and_detectors_with_same_trigger(
repeats: int = 1,
period: float = 0.0,
):

trigger_info = TriggerInfo(
num=num * repeats,
trigger=DetectorTrigger.constant_gate,
Expand Down
6 changes: 3 additions & 3 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
import asyncio
import os
import subprocess
import sys
import time
Expand Down Expand Up @@ -29,8 +29,8 @@ def pytest_exception_interact(call):
@pytest.hookimpl(tryfirst=True)
def pytest_internalerror(excinfo):
raise excinfo.value


@pytest.fixture(scope="function")
def RE(request):
loop = asyncio.new_event_loop()
Expand Down
4 changes: 1 addition & 3 deletions tests/core/test_device.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,7 @@ def test_device_children(parent: DummyDeviceGroup):
def test_device_vector_children():
parent = DummyDeviceGroup("root")

device_vector_children = [
(name, child) for name, child in parent.dict_with_children.children()
]
device_vector_children = list(parent.dict_with_children.children())
assert device_vector_children == [("123", parent.dict_with_children[123])]


Expand Down
8 changes: 4 additions & 4 deletions tests/core/test_flyer.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,10 +97,10 @@ async def collect_stream_docs(
yield "stream_resource", self._file.stream_resource_doc

if indices_written >= self._last_emitted:
indices = dict(
start=self._last_emitted,
stop=indices_written,
)
indices = {
"start": self._last_emitted,
"stop": indices_written,
}
self._last_emitted = indices_written
self._last_flush = time.monotonic()
yield "stream_datum", self._file.compose_stream_datum(indices)
Expand Down
10 changes: 5 additions & 5 deletions tests/core/test_sim.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,23 +18,23 @@ class MyEnum(str, Enum):


def integer_d(value):
return dict(dtype="integer", shape=[])
return {"dtype": "integer", "shape": []}


def number_d(value):
return dict(dtype="number", shape=[])
return {"dtype": "number", "shape": []}


def string_d(value):
return dict(dtype="string", shape=[])
return {"dtype": "string", "shape": []}


def enum_d(value):
return dict(dtype="string", shape=[], choices=["Aaa", "Bbb", "Ccc"])
return {"dtype": "string", "shape": [], "choices": ["Aaa", "Bbb", "Ccc"]}


def waveform_d(value):
return dict(dtype="array", shape=[len(value)])
return {"dtype": "array", "shape": [len(value)]}


class MonitorQueue:
Expand Down
9 changes: 5 additions & 4 deletions tests/core/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,9 +139,11 @@ async def test_error_handling_value_errors(caplog):

# This should fail since the error is a ValueError
with pytest.raises(NotConnected) as e:
await dummy_device_two_working_one_timeout_two_value_error.connect(
timeout=0.01
),
(
await dummy_device_two_working_one_timeout_two_value_error.connect(
timeout=0.01
),
)
assert str(e.value) == str(TWO_WORKING_TWO_TIMEOUT_TWO_VALUE_ERROR_OUTPUT)

logs = caplog.get_records("call")
Expand Down Expand Up @@ -213,7 +215,6 @@ def test_not_connected_error_output():


async def test_combining_top_level_signal_and_child_device():

dummy_device1 = DummyDeviceCombiningTopLevelSignalAndSubDevice()
with pytest.raises(NotConnected) as e:
await dummy_device1.connect(timeout=0.01)
Expand Down
12 changes: 6 additions & 6 deletions tests/epics/test_signals.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,23 +157,23 @@ class MyEnum(str, Enum):


def integer_d(value):
return dict(dtype="integer", shape=[])
return {"dtype": "integer", "shape": []}


def number_d(value):
return dict(dtype="number", shape=[])
return {"dtype": "number", "shape": []}


def string_d(value):
return dict(dtype="string", shape=[])
return {"dtype": "string", "shape": []}


def enum_d(value):
return dict(dtype="string", shape=[], choices=["Aaa", "Bbb", "Ccc"])
return {"dtype": "string", "shape": [], "choices": ["Aaa", "Bbb", "Ccc"]}


def waveform_d(value):
return dict(dtype="array", shape=[len(value)])
return {"dtype": "array", "shape": [len(value)]}


ls1 = "a string that is just longer than forty characters"
Expand Down Expand Up @@ -389,7 +389,7 @@ async def test_pva_table(ioc: IOC) -> None:
enum=[MyEnum.c, MyEnum.b],
)
# TODO: what should this be for a variable length table?
descriptor = dict(dtype="object", shape=[])
descriptor = {"dtype": "object", "shape": []}
# Make and connect the backend
for t, i, p in [(MyTable, initial, put), (None, put, initial)]:
backend = await ioc.make_backend(t, "table")
Expand Down
Loading

0 comments on commit e2f8317

Please sign in to comment.