Skip to content

Commit 2802e7e

Browse files
authored
Merge branch 'main' into 1663-i09-energy
2 parents 42beac5 + c4df0a0 commit 2802e7e

File tree

16 files changed

+123
-147
lines changed

16 files changed

+123
-147
lines changed

pyproject.toml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ description = "Ophyd devices and other utils that could be used across DLS beaml
1414
dependencies = [
1515
"click",
1616
"ophyd",
17-
"ophyd-async[ca,pva]>=0.13.2",
17+
"ophyd-async[ca,pva]>=0.13.5",
1818
"bluesky>=1.14.5",
1919
"pyepics",
2020
"dataclasses-json",
@@ -54,7 +54,8 @@ dev = [
5454
"pre-commit",
5555
"psutil",
5656
"pydata-sphinx-theme>=0.12",
57-
"pyright",
57+
# Pin to previous pyright until https://github.com/microsoft/pyright/issues/11060 is fixed
58+
"pyright==1.1.406",
5859
"pytest",
5960
"pytest-asyncio",
6061
"pytest-cov",

src/dodal/beamlines/i19_2.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,17 @@
1+
from pathlib import Path
2+
3+
from ophyd_async.fastcs.eiger import EigerDetector
14
from ophyd_async.fastcs.panda import HDFPanda
25

36
from dodal.common.beamlines.beamline_utils import (
47
device_factory,
58
get_path_provider,
9+
set_path_provider,
610
)
711
from dodal.common.beamlines.beamline_utils import (
812
set_beamline as set_utils_beamline,
913
)
14+
from dodal.common.visit import StaticVisitPathProvider
1015
from dodal.devices.i19.access_controlled.blueapi_device import HutchState
1116
from dodal.devices.i19.access_controlled.shutter import AccessControlledShutter
1217
from dodal.devices.i19.backlight import BacklightPosition
@@ -31,6 +36,13 @@
3136
set_log_beamline(BL)
3237
set_utils_beamline(BL)
3338

39+
set_path_provider(
40+
StaticVisitPathProvider(
41+
BL,
42+
Path("/dls/i19-2/data/2025/cm40639-4/"),
43+
)
44+
)
45+
3446

3547
I19_2_ZEBRA_MAPPING = ZebraMapping(
3648
outputs=ZebraTTLOutputs(),
@@ -105,3 +117,13 @@ def panda() -> HDFPanda:
105117
prefix=f"{PREFIX.beamline_prefix}-EA-PANDA-01:",
106118
path_provider=get_path_provider(),
107119
)
120+
121+
122+
@device_factory()
123+
def eiger() -> EigerDetector:
124+
return EigerDetector(
125+
prefix=PREFIX.beamline_prefix,
126+
path_provider=get_path_provider(),
127+
drv_suffix="-EA-EIGER-01:",
128+
hdf_suffix="-EA-EIGER-01:OD:",
129+
)

src/dodal/cli.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
import click
66
from bluesky.run_engine import RunEngine
7-
from ophyd_async.core import NotConnected, StaticPathProvider, UUIDFilenameProvider
7+
from ophyd_async.core import NotConnectedError, StaticPathProvider, UUIDFilenameProvider
88
from ophyd_async.plan_stubs import ensure_connected
99

1010
from dodal.beamlines import all_beamline_names, module_name_for_beamline
@@ -79,7 +79,7 @@ def connect(beamline: str, all: bool, sim_backend: bool) -> None:
7979
# If exceptions have occurred, this will print details of the relevant PVs
8080
exceptions = {**instance_exceptions, **connect_exceptions}
8181
if len(exceptions) > 0:
82-
raise NotConnected(exceptions)
82+
raise NotConnectedError(exceptions)
8383

8484

8585
def _report_successful_devices(
@@ -113,7 +113,7 @@ def _connect_devices(
113113
# Connect ophyd-async devices
114114
try:
115115
run_engine(ensure_connected(*ophyd_async_devices.values(), mock=sim_backend))
116-
except NotConnected as ex:
116+
except NotConnectedError as ex:
117117
exceptions = {**exceptions, **ex.sub_errors}
118118

119119
# Only return the subset of devices that haven't raised an exception

src/dodal/devices/i22/nxsas.py

Lines changed: 5 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,9 @@
1-
import asyncio
2-
from collections.abc import Awaitable, Iterable
31
from dataclasses import dataclass, fields
42
from typing import TypeVar
53

64
from bluesky.protocols import Reading
75
from event_model.documents.event_descriptor import DataKey
8-
from ophyd_async.core import PathProvider
6+
from ophyd_async.core import PathProvider, merge_gathered_dicts
97
from ophyd_async.epics.adaravis import AravisDetector
108
from ophyd_async.epics.adcore import NDPluginBaseIO
119
from ophyd_async.epics.adpilatus import PilatusDetector
@@ -14,23 +12,6 @@
1412
T = TypeVar("T")
1513

1614

17-
# TODO: Remove this file as part of github.com/DiamondLightSource/dodal/issues/595
18-
# Until which, temporarily duplicated non-public method from ophyd_async
19-
async def _merge_gathered_dicts(
20-
coros: Iterable[Awaitable[dict[str, T]]],
21-
) -> dict[str, T]:
22-
"""Merge dictionaries produced by a sequence of coroutines.
23-
24-
Can be used for merging ``read()`` or ``describe``. For instance::
25-
26-
combined_read = await merge_gathered_dicts(s.read() for s in signals)
27-
"""
28-
ret: dict[str, T] = {}
29-
for result in await asyncio.gather(*coros):
30-
ret.update(result)
31-
return ret
32-
33-
3415
@dataclass
3516
class MetadataHolder:
3617
# TODO: just in case this is useful more widely...
@@ -124,7 +105,7 @@ def __init__(
124105
self._metadata_holder = metadata_holder
125106

126107
async def read_configuration(self) -> dict[str, Reading]:
127-
return await _merge_gathered_dicts(
108+
return await merge_gathered_dicts(
128109
r
129110
for r in (
130111
super().read_configuration(),
@@ -133,7 +114,7 @@ async def read_configuration(self) -> dict[str, Reading]:
133114
)
134115

135116
async def describe_configuration(self) -> dict[str, DataKey]:
136-
return await _merge_gathered_dicts(
117+
return await merge_gathered_dicts(
137118
r
138119
for r in (
139120
super().describe_configuration(),
@@ -167,7 +148,7 @@ def __init__(
167148
self._metadata_holder = metadata_holder
168149

169150
async def read_configuration(self) -> dict[str, Reading]:
170-
return await _merge_gathered_dicts(
151+
return await merge_gathered_dicts(
171152
r
172153
for r in (
173154
super().read_configuration(),
@@ -176,7 +157,7 @@ async def read_configuration(self) -> dict[str, Reading]:
176157
)
177158

178159
async def describe_configuration(self) -> dict[str, DataKey]:
179-
return await _merge_gathered_dicts(
160+
return await merge_gathered_dicts(
180161
r
181162
for r in (
182163
super().describe_configuration(),

tests/common/beamlines/test_device_instantiation.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from typing import Any
22

33
import pytest
4-
from ophyd_async.core import NotConnected
4+
from ophyd_async.core import NotConnectedError
55

66
from dodal.beamlines import all_beamline_modules
77
from dodal.utils import BLUESKY_PROTOCOLS, make_all_devices
@@ -23,7 +23,7 @@ def test_device_creation(module_and_devices_for_beamline):
2323
"""
2424
_, devices, exceptions = module_and_devices_for_beamline
2525
if len(exceptions) > 0:
26-
raise NotConnected(exceptions)
26+
raise NotConnectedError(exceptions)
2727
devices_not_following_bluesky_protocols = [
2828
name
2929
for name, device in devices.items()

tests/devices/current_amplifier/test_femto.py

Lines changed: 9 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from collections import defaultdict
1+
from collections.abc import Mapping
22
from enum import Enum
33
from unittest import mock
44
from unittest.mock import AsyncMock, Mock
@@ -235,9 +235,10 @@ class MockFemto3xxRaiseTime(float, Enum):
235235
],
236236
)
237237
async def test_femto_struck_scaler_read(
238+
run_engine: RunEngine,
239+
run_engine_documents: Mapping[str, list[dict]],
238240
mock_femto: FemtoDDPCA,
239241
mock_femto_struck_scaler_detector,
240-
run_engine: RunEngine,
241242
gain,
242243
raw_voltage,
243244
expected_current,
@@ -246,16 +247,12 @@ async def test_femto_struck_scaler_read(
246247
set_mock_value(mock_femto_struck_scaler_detector.counter().count_time, 1)
247248
set_mock_value(mock_femto_struck_scaler_detector.counter().readout, raw_voltage)
248249
set_mock_value(mock_femto_struck_scaler_detector.auto_mode, False)
249-
docs = defaultdict(list)
250250
mock_femto_struck_scaler_detector.current_amp().raise_timetable = (
251251
MockFemto3xxRaiseTime
252252
)
253253

254-
def capture_emitted(name, doc):
255-
docs[name].append(doc)
256-
257-
run_engine(count([mock_femto_struck_scaler_detector]), capture_emitted)
258-
assert docs["event"][0]["data"][
254+
run_engine(count([mock_femto_struck_scaler_detector]))
255+
assert run_engine_documents["event"][0]["data"][
259256
"mock_femto_struck_scaler_detector-current"
260257
] == pytest.approx(expected_current)
261258

@@ -274,9 +271,10 @@ def capture_emitted(name, doc):
274271
],
275272
)
276273
async def test_femto_struck_scaler_read_with_auto_gain(
274+
run_engine: RunEngine,
275+
run_engine_documents: Mapping[str, list[dict]],
277276
mock_femto: FemtoDDPCA,
278277
mock_femto_struck_scaler_detector,
279-
run_engine: RunEngine,
280278
gain,
281279
raw_voltage,
282280
expected_current,
@@ -302,13 +300,7 @@ def set_mock_counter():
302300
mock_femto_struck_scaler_detector.counter().trigger_start,
303301
lambda *_, **__: set_mock_counter(),
304302
)
305-
306-
docs = defaultdict(list)
307-
308-
def capture_emitted(name, doc):
309-
docs[name].append(doc)
310-
311-
run_engine(count([mock_femto_struck_scaler_detector]), capture_emitted)
312-
assert docs["event"][0]["data"][
303+
run_engine(count([mock_femto_struck_scaler_detector]))
304+
assert run_engine_documents["event"][0]["data"][
313305
"mock_femto_struck_scaler_detector-current"
314306
] == pytest.approx(expected_current, rel=1e-14)

tests/devices/current_amplifier/test_sr570.py

Lines changed: 11 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from collections import defaultdict
1+
from collections.abc import Mapping
22
from enum import Enum
33
from unittest import mock
44
from unittest.mock import AsyncMock, Mock
@@ -215,7 +215,7 @@ class MockSR570RaiseTimeTable(float, Enum):
215215

216216

217217
@pytest.mark.parametrize(
218-
"gain,raw_count, expected_current",
218+
"gain, raw_count, expected_current",
219219
[
220220
("SEN_1", 0.51e5, 0.51e-3),
221221
("SEN_3", -10e5, -2e-3),
@@ -226,8 +226,9 @@ class MockSR570RaiseTimeTable(float, Enum):
226226
],
227227
)
228228
async def test_sr570_struck_scaler_read(
229-
mock_sr570_struck_scaler_detector,
230229
run_engine: RunEngine,
230+
run_engine_documents: Mapping[str, list[dict]],
231+
mock_sr570_struck_scaler_detector,
231232
gain,
232233
raw_count,
233234
expected_current,
@@ -241,19 +242,14 @@ async def test_sr570_struck_scaler_read(
241242
mock_sr570_struck_scaler_detector.current_amp().raise_timetable = (
242243
MockSR570RaiseTimeTable
243244
)
244-
docs = defaultdict(list)
245-
246-
def capture_emitted(name, doc):
247-
docs[name].append(doc)
248-
249-
run_engine(count([mock_sr570_struck_scaler_detector]), capture_emitted)
250-
assert docs["event"][0]["data"][
245+
run_engine(count([mock_sr570_struck_scaler_detector]))
246+
assert run_engine_documents["event"][0]["data"][
251247
"mock_sr570_struck_scaler_detector-current"
252248
] == pytest.approx(expected_current)
253249

254250

255251
@pytest.mark.parametrize(
256-
"gain,raw_count, expected_current",
252+
"gain, raw_count, expected_current",
257253
[
258254
(
259255
"SEN_10",
@@ -273,8 +269,9 @@ def capture_emitted(name, doc):
273269
],
274270
)
275271
async def test_sr570_struck_scaler_read_with_autogain(
276-
mock_sr570_struck_scaler_detector,
277272
run_engine: RunEngine,
273+
run_engine_documents: Mapping[str, list[dict]],
274+
mock_sr570_struck_scaler_detector,
278275
gain,
279276
raw_count,
280277
expected_current,
@@ -301,14 +298,8 @@ def set_mock_counter():
301298
mock_sr570_struck_scaler_detector.counter().trigger_start,
302299
lambda *_, **__: set_mock_counter(),
303300
)
304-
305-
docs = defaultdict(list)
306-
307-
def capture_emitted(name, doc):
308-
docs[name].append(doc)
309-
310301
run_engine(prepare(mock_sr570_struck_scaler_detector, 1))
311-
run_engine(count([mock_sr570_struck_scaler_detector]), capture_emitted)
312-
assert docs["event"][0]["data"][
302+
run_engine(count([mock_sr570_struck_scaler_detector]))
303+
assert run_engine_documents["event"][0]["data"][
313304
"mock_sr570_struck_scaler_detector-current"
314305
] == pytest.approx(expected_current, rel=1e-14)

tests/devices/i04/test_transfocator.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from unittest.mock import AsyncMock, patch
33

44
import pytest
5+
from bluesky.protocols import Reading
56
from ophyd_async.core import (
67
init_devices,
78
wait_for_value,
@@ -19,10 +20,13 @@ async def fake_transfocator() -> Transfocator:
1920

2021

2122
def given_predicted_lenses_is_half_of_beamsize(transfocator: Transfocator):
22-
def lens_number_is_half_beamsize(value, *args, **kwargs):
23+
def lens_number_is_half_beamsize(
24+
reading: dict[str, Reading[float]], *args, **kwargs
25+
):
26+
value = reading[transfocator.beamsize_set_microns.name]["value"]
2327
set_mock_value(transfocator.predicted_vertical_num_lenses, int(value / 2))
2428

25-
transfocator.beamsize_set_microns.subscribe_value(lens_number_is_half_beamsize)
29+
transfocator.beamsize_set_microns.subscribe_reading(lens_number_is_half_beamsize)
2630

2731

2832
async def set_beamsize_to_same_value_as_mock_signal(

0 commit comments

Comments
 (0)