From 8eae376120754d094b4e2821c39f81ef43bf5ff2 Mon Sep 17 00:00:00 2001 From: gkalua Date: Fri, 13 Mar 2026 16:19:28 +0000 Subject: [PATCH 01/30] WIP-Updates-some-imports-and-names --- pyproject.toml | 2 +- src/dodal/beamlines/adsim.py | 4 +- src/dodal/beamlines/b01_1.py | 8 +- src/dodal/beamlines/i13_1.py | 4 +- src/dodal/beamlines/i22.py | 10 +- src/dodal/beamlines/i23.py | 4 +- src/dodal/beamlines/p38.py | 12 +- src/dodal/beamlines/p45.py | 8 +- src/dodal/beamlines/training_rig.py | 4 +- src/dodal/devices/beamlines/b16/detector.py | 19 +-- .../devices/beamlines/i10/diagnostics.py | 6 +- src/dodal/devices/beamlines/i11/mythen.py | 116 +++++++++++------- src/dodal/devices/beamlines/i13_1/merlin.py | 36 +++--- .../beamlines/i13_1/merlin_controller.py | 75 +++++++---- src/dodal/devices/beamlines/i22/nxsas.py | 16 +-- .../beamlines/i24/commissioning_jungfrau.py | 5 +- .../devices/beamlines/p99/andor2_point.py | 4 +- src/dodal/devices/controllers.py | 19 +-- .../electron_analyser/base/base_controller.py | 4 +- src/dodal/devices/single_trigger_detector.py | 44 +++++++ src/dodal/devices/tetramm.py | 24 ++-- tests/devices/beamlines/i11/test_mythen.py | 6 +- tests/devices/beamlines/i13_1/test_merlin.py | 13 +- tests/devices/test_tetramm.py | 29 ++--- uv.lock | 8 +- 25 files changed, 289 insertions(+), 191 deletions(-) create mode 100644 src/dodal/devices/single_trigger_detector.py diff --git a/pyproject.toml b/pyproject.toml index d78ba68e3b5..32ee3af1d26 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ description = "Ophyd devices and other utils that could be used across DLS beaml dependencies = [ "click", "ophyd", - "ophyd-async[ca,pva]>=v0.16.0", + "ophyd-async[ca,pva]>=v0.17a1", "bluesky>=1.14.5", "pyepics", "pillow", diff --git a/src/dodal/beamlines/adsim.py b/src/dodal/beamlines/adsim.py index a33a3dc8d66..c5b8667c3b2 100644 --- a/src/dodal/beamlines/adsim.py +++ b/src/dodal/beamlines/adsim.py @@ -83,6 +83,6 @@ def det(path_provider) -> SimDetector: return SimDetector( f"{PREFIX.beamline_prefix}-DI-CAM-01:", path_provider=path_provider, - drv_suffix=DET_SUFFIX, - fileio_suffix=HDF5_SUFFIX, + driver_suffix=DET_SUFFIX, + writer_suffix=HDF5_SUFFIX, ) diff --git a/src/dodal/beamlines/b01_1.py b/src/dodal/beamlines/b01_1.py index 8e0b19f3059..58d74686cfd 100644 --- a/src/dodal/beamlines/b01_1.py +++ b/src/dodal/beamlines/b01_1.py @@ -88,8 +88,8 @@ def spectroscopy_detector(path_provider: PathProvider) -> AravisDetector: return AravisDetector( pv_prefix, path_provider=path_provider, - drv_suffix=CAM_SUFFIX, - fileio_suffix=HDF5_SUFFIX, + driver_suffix=CAM_SUFFIX, + writer_suffix=HDF5_SUFFIX, plugins={ "roistat": NDROIStatIO(f"{pv_prefix}ROISTAT:", num_channels=3), }, @@ -108,8 +108,8 @@ def imaging_detector(path_provider: PathProvider) -> AravisDetector: return AravisDetector( f"{PREFIX.beamline_prefix}-DI-DCAM-01:", path_provider=path_provider, - drv_suffix=CAM_SUFFIX, - fileio_suffix=HDF5_SUFFIX, + driver_suffix=CAM_SUFFIX, + writer_suffix=HDF5_SUFFIX, ) diff --git a/src/dodal/beamlines/i13_1.py b/src/dodal/beamlines/i13_1.py index 4ce43ccc8f5..230b4e16669 100644 --- a/src/dodal/beamlines/i13_1.py +++ b/src/dodal/beamlines/i13_1.py @@ -42,8 +42,8 @@ def sample_xyz_lab_fa_stage() -> XYZStage: def side_camera() -> AravisDetector: return AravisDetector( prefix=f"{PREFIX}-OP-FLOAT-03:", - drv_suffix="CAM:", - fileio_suffix="HDF5:", + driver_suffix="CAM:", + writer_suffix="HDF5:", path_provider=get_path_provider(), ) diff --git a/src/dodal/beamlines/i22.py b/src/dodal/beamlines/i22.py index 2373220569c..3df11ddecb1 100644 --- a/src/dodal/beamlines/i22.py +++ b/src/dodal/beamlines/i22.py @@ -7,7 +7,7 @@ UUIDFilenameProvider, ) from ophyd_async.epics.adaravis import AravisDetector -from ophyd_async.epics.adcore import NDPluginBaseIO, NDPluginStatsIO +from ophyd_async.epics.adcore import NDPluginBaseIO, NDStatsIO from ophyd_async.epics.adpilatus import PilatusDetector from ophyd_async.fastcs.panda import HDFPanda @@ -70,9 +70,7 @@ def saxs(path_provider: PathProvider) -> PilatusDetector: fileio_suffix=HDF5_SUFFIX, metadata_holder=metadata_holder, plugins={ - "stats": NDPluginStatsIO( - prefix=f"{PREFIX.beamline_prefix}-EA-PILAT-01:STAT:" - ) + "stats": NDStatsIO(prefix=f"{PREFIX.beamline_prefix}-EA-PILAT-01:STAT:") }, ) @@ -100,9 +98,7 @@ def waxs(path_provider: PathProvider) -> PilatusDetector: fileio_suffix=HDF5_SUFFIX, metadata_holder=metadata_holder, plugins={ - "stats": NDPluginStatsIO( - prefix=f"{PREFIX.beamline_prefix}-EA-PILAT-03:STAT:" - ) + "stats": NDStatsIO(prefix=f"{PREFIX.beamline_prefix}-EA-PILAT-03:STAT:") }, ) diff --git a/src/dodal/beamlines/i23.py b/src/dodal/beamlines/i23.py index 260be4c5fca..a234208a0c5 100644 --- a/src/dodal/beamlines/i23.py +++ b/src/dodal/beamlines/i23.py @@ -90,8 +90,8 @@ def pilatus(path_provider: PathProvider) -> PilatusDetector: return PilatusDetector( prefix=f"{PREFIX.beamline_prefix}-EA-PILAT-01:", path_provider=path_provider, - drv_suffix="cam1:", - fileio_suffix=HDF5_SUFFIX, + driver_suffix="cam1:", + writer_suffix=HDF5_SUFFIX, ) diff --git a/src/dodal/beamlines/p38.py b/src/dodal/beamlines/p38.py index c4fc10f0a31..1147c0e5c1b 100644 --- a/src/dodal/beamlines/p38.py +++ b/src/dodal/beamlines/p38.py @@ -51,8 +51,8 @@ def d3() -> AravisDetector: return AravisDetector( f"{PREFIX.beamline_prefix}-DI-DCAM-01:", path_provider=get_path_provider(), - drv_suffix="DET:", - fileio_suffix=HDF5_SUFFIX, + driver_suffix="DET:", + writer_suffix=HDF5_SUFFIX, ) @@ -62,8 +62,8 @@ def d11() -> AravisDetector: return AravisDetector( f"{PREFIX.beamline_prefix}-DI-DCAM-03:", path_provider=get_path_provider(), - drv_suffix="DET:", - fileio_suffix=HDF5_SUFFIX, + driver_suffix="DET:", + writer_suffix=HDF5_SUFFIX, ) @@ -72,8 +72,8 @@ def d12() -> AravisDetector: return AravisDetector( f"{PREFIX.beamline_prefix}-DI-DCAM-04:", path_provider=get_path_provider(), - drv_suffix="DET:", - fileio_suffix=HDF5_SUFFIX, + driver_suffix="DET:", + writer_suffix=HDF5_SUFFIX, ) diff --git a/src/dodal/beamlines/p45.py b/src/dodal/beamlines/p45.py index ddbb51d4a06..e31628e4c3f 100644 --- a/src/dodal/beamlines/p45.py +++ b/src/dodal/beamlines/p45.py @@ -45,8 +45,8 @@ def det() -> AravisDetector: return AravisDetector( f"{PREFIX.beamline_prefix}-EA-MAP-01:", path_provider=get_path_provider(), - drv_suffix=DET_SUFFIX, - fileio_suffix=HDF5_SUFFIX, + driver_suffix=DET_SUFFIX, + writer_suffix=HDF5_SUFFIX, ) @@ -56,8 +56,8 @@ def diff() -> AravisDetector: return AravisDetector( f"{PREFIX.beamline_prefix}-EA-DIFF-01:", path_provider=get_path_provider(), - drv_suffix=DET_SUFFIX, - fileio_suffix=HDF5_SUFFIX, + driver_suffix=DET_SUFFIX, + writer_suffix=HDF5_SUFFIX, ) diff --git a/src/dodal/beamlines/training_rig.py b/src/dodal/beamlines/training_rig.py index 42c80de74d0..b8b9462df58 100644 --- a/src/dodal/beamlines/training_rig.py +++ b/src/dodal/beamlines/training_rig.py @@ -47,8 +47,8 @@ def det(path_provider: PathProvider) -> AravisDetector: return AravisDetector( f"{PREFIX.beamline_prefix}-EA-DET-01:", path_provider=path_provider, - drv_suffix=DET_SUFFIX, - fileio_suffix=HDF5_SUFFIX, + driver_suffix=DET_SUFFIX, + writer_suffix=HDF5_SUFFIX, ) diff --git a/src/dodal/devices/beamlines/b16/detector.py b/src/dodal/devices/beamlines/b16/detector.py index 2c00beae06e..e4f058194ca 100644 --- a/src/dodal/devices/beamlines/b16/detector.py +++ b/src/dodal/devices/beamlines/b16/detector.py @@ -1,12 +1,13 @@ from ophyd_async.epics.adcore import ( + ADArmLogic, ADBaseIO, - ADTIFFWriter, + ADWriterType, AreaDetector, ) from dodal.common.beamlines.beamline_utils import get_path_provider from dodal.common.beamlines.device_helpers import CAM_SUFFIX, TIFF_SUFFIX -from dodal.devices.controllers import ConstantDeadTimeController +from dodal.devices.controllers import ConstantDeadTimeTriggerLogic def software_triggered_tiff_area_detector(prefix: str, deadtime: float = 0.0): @@ -14,11 +15,13 @@ def software_triggered_tiff_area_detector(prefix: str, deadtime: float = 0.0): and a TIFF file writer. Most detectors in B16 could be configured like this. """ + driver = ADBaseIO(prefix + CAM_SUFFIX) return AreaDetector( - writer=ADTIFFWriter.with_io( - prefix=prefix, path_provider=get_path_provider(), fileio_suffix=TIFF_SUFFIX - ), - controller=ConstantDeadTimeController( - driver=ADBaseIO(prefix + CAM_SUFFIX), deadtime=deadtime - ), + prefix=prefix, + driver=driver, + arm_logic=ADArmLogic(driver), + trigger_logic=ConstantDeadTimeTriggerLogic(driver, deadtime), + path_provider=get_path_provider(), + writer_type=ADWriterType.TIFF, + writer_suffix=TIFF_SUFFIX, ) diff --git a/src/dodal/devices/beamlines/i10/diagnostics.py b/src/dodal/devices/beamlines/i10/diagnostics.py index c790b6e9d75..5d6e70ca2b2 100644 --- a/src/dodal/devices/beamlines/i10/diagnostics.py +++ b/src/dodal/devices/beamlines/i10/diagnostics.py @@ -5,8 +5,9 @@ ) from ophyd_async.core import StandardReadableFormat as Format from ophyd_async.core._device import DeviceConnector -from ophyd_async.epics.adaravis import AravisDriverIO -from ophyd_async.epics.adcore import SingleTriggerDetector +from ophyd_async.epics.adaravis import ( + AravisDriverIO, +) from ophyd_async.epics.core import ( epics_signal_r, epics_signal_rw, @@ -21,6 +22,7 @@ StruckScaler, ) from dodal.devices.positioner import create_positioner +from dodal.devices.single_trigger_detector import SingleTriggerDetector class D3Position(StrictEnum): diff --git a/src/dodal/devices/beamlines/i11/mythen.py b/src/dodal/devices/beamlines/i11/mythen.py index 8ea4ac466b5..647ab3e5129 100644 --- a/src/dodal/devices/beamlines/i11/mythen.py +++ b/src/dodal/devices/beamlines/i11/mythen.py @@ -2,17 +2,15 @@ from typing import Annotated as A from ophyd_async.core import ( - DetectorTrigger, + DetectorTriggerLogic, PathProvider, SignalRW, StrictEnum, - TriggerInfo, ) from ophyd_async.epics.adcore import ( - ADBaseController, - ADHDFWriter, + ADArmLogic, ADImageMode, - ADWriter, + ADWriterType, AreaDetector, NDArrayBaseIO, ) @@ -98,42 +96,65 @@ class Mythen3Driver(NDArrayBaseIO): _BIT_DEPTH = 24 -class Mythen3Controller(ADBaseController): - """ADBaseController` for a Mythen3.""" +class Mythen3TriggerLogic(DetectorTriggerLogic): + """Trigger logic for a Mythen3.""" def __init__(self, driver: Mythen3Driver): self._driver = driver - super().__init__(driver=self._driver) + # super().__init__(driver=self._driver) - def get_deadtime(self, exposure: float | None) -> float: + def get_deadtime(self, config_values) -> float: return _DEADTIMES[_BIT_DEPTH] - async def prepare(self, trigger_info: TriggerInfo) -> None: - if (exposure := trigger_info.livetime) is not None: - await self._driver.acquire_time.set(exposure) - - if trigger_info.trigger is DetectorTrigger.INTERNAL: - await self._driver.trigger_mode.set(Mythen3TriggerMode.INTERNAL) - elif trigger_info.trigger in { - DetectorTrigger.CONSTANT_GATE, - DetectorTrigger.EDGE_TRIGGER, - DetectorTrigger.VARIABLE_GATE, - }: - await self._driver.trigger_mode.set(Mythen3TriggerMode.EXTERNAL) - else: - raise ValueError(f"Mythen3 does not support {trigger_info.trigger}") - - if trigger_info.total_number_of_exposures == 0: - image_mode = ADImageMode.CONTINUOUS - else: - image_mode = ADImageMode.MULTIPLE + # async def prepare(self, trigger_info: TriggerInfo) -> None: + # if (exposure := trigger_info.livetime) is not None: + # await self._driver.acquire_time.set(exposure) + + # if trigger_info.trigger is DetectorTrigger.INTERNAL: + # await self._driver.trigger_mode.set(Mythen3TriggerMode.INTERNAL) + # elif trigger_info.trigger in { + # DetectorTrigger.EXTERNAL_LEVEL, + # DetectorTrigger.EXTERNAL_EDGE, + # DetectorTrigger.EXTERNAL_LEVEL, + # }: + # await self._driver.trigger_mode.set(Mythen3TriggerMode.EXTERNAL) + # else: + # raise ValueError(f"Mythen3 does not support {trigger_info.trigger}") + + # if trigger_info.number_of_exposures == 0: + # image_mode = ADImageMode.CONTINUOUS + # else: + # image_mode = ADImageMode.MULTIPLE + # await asyncio.gather( + # self._driver.num_images.set(trigger_info.number_of_exposures), + # self._driver.image_mode.set(image_mode), + # ) + + async def prepare_internal(self, num: int, livetime: float, deadtime: float): + if livetime: + await self._driver.acquire_time.set(livetime) + await self._driver.trigger_mode.set(Mythen3TriggerMode.INTERNAL) + image_mode = ADImageMode.CONTINUOUS if num == 0 else ADImageMode.MULTIPLE await asyncio.gather( - self._driver.num_images.set(trigger_info.total_number_of_exposures), + self._driver.num_images.set(num), self._driver.image_mode.set(image_mode), ) + async def prepare_edge(self, num: int, livetime: float): + if livetime: + await self._driver.acquire_time.set(livetime) + await self._driver.trigger_mode.set(Mythen3TriggerMode.EXTERNAL) + image_mode = ADImageMode.CONTINUOUS if num == 0 else ADImageMode.MULTIPLE + await asyncio.gather( + self._driver.num_images.set(num), + self._driver.image_mode.set(image_mode), + ) + + async def prepare_level(self, num: int): + await self.prepare_edge(num, 0.0) -class Mythen3(AreaDetector[Mythen3Controller]): + +class Mythen3(AreaDetector[Mythen3Driver]): """The detector may be configured for an external trigger on a GPIO port, which must be done prior to preparing the detector. """ @@ -143,22 +164,31 @@ def __init__( prefix: str, path_provider: PathProvider, drv_suffix: str = DET_SUFFIX, - writer_cls: type[ADWriter] = ADHDFWriter, - fileio_suffix: str | None = "HDF:", + writer_type: ADWriterType = ADWriterType.HDF, + writer_suffix: str | None = "HDF:", name: str = "", ): self.driver = Mythen3Driver(prefix + drv_suffix) - self.controller = Mythen3Controller(driver=self.driver) - - self.writer = writer_cls.with_io( - prefix, - path_provider, - dataset_source=self.driver, - fileio_suffix=fileio_suffix, - ) + # self.writer = writer_cls.with_io( + # prefix, + # path_provider, + # dataset_source=self.driver, + # fileio_suffix=fileio_suffix, + # ) + + # super().__init__( + # controller=self.controller, + # writer=self.writer, + # name=name, + # ) # plugins=plugins # config_sigs=config_sigs super().__init__( - controller=self.controller, - writer=self.writer, + prefix=prefix, + driver=self.driver, + arm_logic=ADArmLogic(self.driver), + trigger_logic=Mythen3TriggerLogic(self.driver), + path_provider=path_provider, + writer_type=writer_type, + writer_suffix=writer_suffix, name=name, - ) # plugins=plugins # config_sigs=config_sigs + ) diff --git a/src/dodal/devices/beamlines/i13_1/merlin.py b/src/dodal/devices/beamlines/i13_1/merlin.py index 5057d2dbcbd..f88d2e59445 100644 --- a/src/dodal/devices/beamlines/i13_1/merlin.py +++ b/src/dodal/devices/beamlines/i13_1/merlin.py @@ -1,32 +1,32 @@ -from ophyd_async.core import PathProvider, StandardDetector +from ophyd_async.core import PathProvider from ophyd_async.epics import adcore from dodal.common.beamlines.device_helpers import CAM_SUFFIX, HDF5_SUFFIX -from dodal.devices.beamlines.i13_1.merlin_controller import MerlinController +from dodal.devices.beamlines.i13_1.merlin_controller import ( + MerlinArmLogic, + MerlinTriggerLogic, +) -class Merlin(StandardDetector): - _controller: MerlinController - _writer: adcore.ADHDFWriter - +class Merlin(adcore.AreaDetector): def __init__( self, prefix: str, path_provider: PathProvider, - drv_suffix=CAM_SUFFIX, - fileio_suffix=HDF5_SUFFIX, + driver_suffix=CAM_SUFFIX, + writer_suffix=HDF5_SUFFIX, name: str = "", ): - self.drv = adcore.ADBaseIO(prefix + drv_suffix) - self.hdf = adcore.NDFileHDFIO(prefix + fileio_suffix) - + self.driver = adcore.ADBaseIO(prefix + driver_suffix) + self.hdf = adcore.NDFileHDF5IO(prefix + writer_suffix) super().__init__( - MerlinController(self.drv), - adcore.ADHDFWriter( - fileio=self.hdf, - path_provider=path_provider, - dataset_describer=adcore.ADBaseDatasetDescriber(self.drv), - ), - config_sigs=(self.drv.acquire_period, self.drv.acquire_time), + prefix=prefix, + driver=self.driver, + arm_logic=MerlinArmLogic(self.driver), + trigger_logic=MerlinTriggerLogic(self.driver), + path_provider=path_provider, + writer_suffix=writer_suffix, + writer_type=adcore.ADWriterType.HDF, + config_sigs=(self.driver.acquire_period, self.driver.acquire_time), name=name, ) diff --git a/src/dodal/devices/beamlines/i13_1/merlin_controller.py b/src/dodal/devices/beamlines/i13_1/merlin_controller.py index d8297b75270..a312f4f2610 100644 --- a/src/dodal/devices/beamlines/i13_1/merlin_controller.py +++ b/src/dodal/devices/beamlines/i13_1/merlin_controller.py @@ -1,51 +1,72 @@ -import asyncio -import logging - from ophyd_async.core import ( DEFAULT_TIMEOUT, AsyncStatus, - TriggerInfo, + DetectorArmLogic, ) from ophyd_async.epics.adcore import ( - DEFAULT_GOOD_STATES, + # DEFAULT_GOOD_STATES, ADBaseIO, - ADImageMode, - ADState, + # ADImageMode, + # ADState, + prepare_exposures, ) from ophyd_async.epics.core import stop_busy_record -from dodal.devices.controllers import ConstantDeadTimeController +from dodal.devices.controllers import ConstantDeadTimeTriggerLogic -class MerlinController(ConstantDeadTimeController): +class MerlinTriggerLogic(ConstantDeadTimeTriggerLogic): def __init__( self, driver: ADBaseIO, - good_states: frozenset[ADState] = DEFAULT_GOOD_STATES, + # good_states: frozenset[ADState] = DEFAULT_GOOD_STATES, ) -> None: self.driver = driver - self.good_states = good_states - self.frame_timeout: float = 0 - self._arm_status: AsyncStatus | None = None - for drv_child in self.driver.children(): - logging.debug(drv_child) + # self.good_states = good_states + + # self.frame_timeout: float = 0 + # self._arm_status: AsyncStatus | None = None + # for drv_child in self.driver.children(): + # logging.debug(drv_child) + + # super().__init__(driver, 0.002) + + # async def prepare(self, trigger_info: TriggerInfo): + # self.frame_timeout = ( + # DEFAULT_TIMEOUT + await self.driver.acquire_time.get_value() + # ) + # await asyncio.gather( + # self.driver.num_images.set(trigger_info.number_of_exposures), + # self.driver.image_mode.set(ADImageMode.MULTIPLE), + # ) + + # async def wait_for_idle(self): + # if self._arm_status: + # await self._arm_status - super().__init__(driver, 0.002) + # async def disarm(self): + # # We can't use caput callback as we already used it in arm() and we can't have + # # 2 or they will deadlock + # await stop_busy_record(self.driver.acquire, timeout=1) + + def get_deadtime(self, config_values) -> float: + return 0.002 + + async def prepare_internal(self, num: int, livetime: float, deadtime: float): + await prepare_exposures(self.driver, num, livetime, deadtime) + + +class MerlinArmLogic(DetectorArmLogic): + def __init__(self, driver: ADBaseIO) -> None: + self.driver = driver + self._arm_status: AsyncStatus | None = None - async def prepare(self, trigger_info: TriggerInfo): - self.frame_timeout = ( - DEFAULT_TIMEOUT + await self.driver.acquire_time.get_value() - ) - await asyncio.gather( - self.driver.num_images.set(trigger_info.total_number_of_exposures), - self.driver.image_mode.set(ADImageMode.MULTIPLE), - ) + async def arm(self): + self._arm_status = await self.driver.acquire.set(True, timeout=DEFAULT_TIMEOUT) async def wait_for_idle(self): if self._arm_status: await self._arm_status async def disarm(self): - # We can't use caput callback as we already used it in arm() and we can't have - # 2 or they will deadlock - await stop_busy_record(self.driver.acquire, False, timeout=1) + await stop_busy_record(self.driver.acquire, timeout=1) diff --git a/src/dodal/devices/beamlines/i22/nxsas.py b/src/dodal/devices/beamlines/i22/nxsas.py index a7531d754d8..389d29f06a8 100644 --- a/src/dodal/devices/beamlines/i22/nxsas.py +++ b/src/dodal/devices/beamlines/i22/nxsas.py @@ -83,8 +83,8 @@ def __init__( self, prefix: str, path_provider: PathProvider, - drv_suffix: str, - fileio_suffix: str, + driver_suffix: str, + writer_suffix: str, metadata_holder: NXSasMetadataHolder, name: str = "", plugins: dict[str, NDPluginBaseIO] | None = None, @@ -98,8 +98,8 @@ def __init__( super().__init__( prefix, path_provider, - drv_suffix=drv_suffix, - fileio_suffix=fileio_suffix, + driver_suffix=driver_suffix, + writer_suffix=writer_suffix, name=name, ) self._metadata_holder = metadata_holder @@ -128,8 +128,8 @@ def __init__( self, prefix: str, path_provider: PathProvider, - drv_suffix: str, - fileio_suffix: str, + driver_suffix: str, + writer_suffix: str, metadata_holder: NXSasMetadataHolder, name: str = "", ): @@ -142,8 +142,8 @@ def __init__( super().__init__( prefix, path_provider, - drv_suffix=drv_suffix, - fileio_suffix=fileio_suffix, + driver_suffix=driver_suffix, + writer_suffix=writer_suffix, name=name, ) self._metadata_holder = metadata_holder diff --git a/src/dodal/devices/beamlines/i24/commissioning_jungfrau.py b/src/dodal/devices/beamlines/i24/commissioning_jungfrau.py index 5cb0ec4a304..f1fe64dfc4b 100644 --- a/src/dodal/devices/beamlines/i24/commissioning_jungfrau.py +++ b/src/dodal/devices/beamlines/i24/commissioning_jungfrau.py @@ -6,7 +6,7 @@ from event_model import DataKey # type: ignore from ophyd_async.core import ( AsyncStatus, - DetectorWriter, + DetectorDataLogic, PathProvider, StandardDetector, StandardReadable, @@ -16,13 +16,14 @@ wait_for_value, ) from ophyd_async.epics.core import epics_signal_r, epics_signal_rw, epics_signal_rw_rbv +from ophyd_async.fastcs.jungfrau import JungfrauDriverIO from ophyd_async.fastcs.jungfrau._controller import JungfrauController from ophyd_async.fastcs.jungfrau._signals import JungfrauDriverIO from dodal.log import LOGGER -class JungfrauCommissioningWriter(DetectorWriter, StandardReadable): +class JungfrauCommissioningWriter(DetectorDataLogic, StandardReadable): """Implementation of the temporary filewriter used for Jungfrau commissioning on i24. The PVs on this device are responsible for writing files of a specified name diff --git a/src/dodal/devices/beamlines/p99/andor2_point.py b/src/dodal/devices/beamlines/p99/andor2_point.py index 134e7dff10d..89d222e1bb9 100644 --- a/src/dodal/devices/beamlines/p99/andor2_point.py +++ b/src/dodal/devices/beamlines/p99/andor2_point.py @@ -2,9 +2,11 @@ StandardReadableFormat, ) from ophyd_async.epics.adandor import Andor2DriverIO -from ophyd_async.epics.adcore import NDPluginBaseIO, SingleTriggerDetector +from ophyd_async.epics.adcore import NDPluginBaseIO from ophyd_async.epics.core import epics_signal_r +from dodal.devices.single_trigger_detector import SingleTriggerDetector + class Andor2Point(SingleTriggerDetector): """Using the andor2 as if it is a massive point detector, read the read uncached diff --git a/src/dodal/devices/controllers.py b/src/dodal/devices/controllers.py index 59b588ee265..21739509db6 100644 --- a/src/dodal/devices/controllers.py +++ b/src/dodal/devices/controllers.py @@ -1,23 +1,26 @@ from typing import TypeVar -from ophyd_async.epics.adcore import ADBaseController, ADBaseIO, ADImageMode +from ophyd_async.core import DetectorTriggerLogic, SignalDict +from ophyd_async.epics.adcore import ADBaseIO, ADImageMode, prepare_exposures ADBaseIOT = TypeVar("ADBaseIOT", bound=ADBaseIO) -class ConstantDeadTimeController(ADBaseController[ADBaseIOT]): - """ADBaseController with a configured constant deadtime for a driver of type - ADBaseIO. - """ +class ConstantDeadTimeTriggerLogic(DetectorTriggerLogic): + """DetectorTriggerLogic with a configured constant deadtime.""" def __init__( self, - driver: ADBaseIOT, + driver: ADBaseIO, deadtime: float, image_mode: ADImageMode = ADImageMode.MULTIPLE, ): - super().__init__(driver, image_mode=image_mode) + # super().__init__(driver, image_mode=image_mode) + self.driver = driver self.deadtime = deadtime - def get_deadtime(self, exposure: float | None) -> float: + def get_deadtime(self, config_values: SignalDict) -> float: return self.deadtime + + async def prepare_internal(self, num: int, livetime: float, deadtime: float): + await prepare_exposures(self.driver, num, livetime, deadtime) diff --git a/src/dodal/devices/electron_analyser/base/base_controller.py b/src/dodal/devices/electron_analyser/base/base_controller.py index b942159de01..ce839dd9b34 100644 --- a/src/dodal/devices/electron_analyser/base/base_controller.py +++ b/src/dodal/devices/electron_analyser/base/base_controller.py @@ -3,7 +3,7 @@ from ophyd_async.core import TriggerInfo from ophyd_async.epics.adcore import ADImageMode -from dodal.devices.controllers import ConstantDeadTimeController +from dodal.devices.controllers import ConstantDeadTimeTriggerLogic from dodal.devices.electron_analyser.base.base_driver_io import ( GenericAnalyserDriverIO, TAbstractAnalyserDriverIO, @@ -18,7 +18,7 @@ class ElectronAnalyserController( - ConstantDeadTimeController[TAbstractAnalyserDriverIO], + ConstantDeadTimeTriggerLogic[TAbstractAnalyserDriverIO], Generic[TAbstractAnalyserDriverIO, TAbstractBaseRegion], ): """Specialised controller for the electron analysers to provide additional setup diff --git a/src/dodal/devices/single_trigger_detector.py b/src/dodal/devices/single_trigger_detector.py new file mode 100644 index 00000000000..5fe1395293a --- /dev/null +++ b/src/dodal/devices/single_trigger_detector.py @@ -0,0 +1,44 @@ +import asyncio +from collections.abc import Sequence + +from bluesky.protocols import Triggerable +from ophyd_async.core import AsyncStatus, SignalR, StandardReadable +from ophyd_async.core import StandardReadableFormat as Format +from ophyd_async.epics.adcore import ADBaseIO, ADImageMode, NDPluginBaseIO + + +class SingleTriggerDetector(StandardReadable, Triggerable): + """A minimal triggerable detector that takes a single image and reads plugin + statistics. Does not write files. Replaces the removed ophyd-async class of + the same name. + """ + + def __init__( + self, + drv: ADBaseIO, + read_uncached: Sequence[SignalR] = (), + name: str = "", + plugins: dict[str, NDPluginBaseIO] | None = None, + ) -> None: + self.drv = drv + if plugins is not None: + for k, v in plugins.items(): + setattr(self, k, v) + self.add_readables( + [self.drv.array_counter, *read_uncached], + Format.HINTED_UNCACHED_SIGNAL, + ) + self.add_readables([self.drv.acquire_time], Format.CONFIG_SIGNAL) + super().__init__(name=name) + + @AsyncStatus.wrap + async def stage(self) -> None: + await asyncio.gather( + self.drv.image_mode.set(ADImageMode.SINGLE), + self.drv.wait_for_plugins.set(True), + ) + await super().stage() + + @AsyncStatus.wrap + async def trigger(self) -> None: + await self.drv.acquire.set(True) diff --git a/src/dodal/devices/tetramm.py b/src/dodal/devices/tetramm.py index 40403471312..0597322aad6 100644 --- a/src/dodal/devices/tetramm.py +++ b/src/dodal/devices/tetramm.py @@ -5,7 +5,6 @@ from ophyd_async.core import ( DEFAULT_TIMEOUT, AsyncStatus, - DatasetDescriber, DetectorController, DetectorTrigger, PathProvider, @@ -19,9 +18,10 @@ wait_for_value, ) from ophyd_async.epics.adcore import ( - ADHDFWriter, + ADHDFDataLogic, NDArrayBaseIO, - NDFileHDFIO, + NDArrayDescription, + NDFileHDF5IO, NDPluginBaseIO, ) from ophyd_async.epics.core import PvSuffix, epics_signal_r, stop_busy_record @@ -78,9 +78,9 @@ class TetrammController(DetectorController): """Controller for a TetrAMM current monitor.""" _supported_trigger_types = { - DetectorTrigger.EDGE_TRIGGER: TetrammTrigger.EXT_TRIGGER, - DetectorTrigger.CONSTANT_GATE: TetrammTrigger.EXT_TRIGGER, - DetectorTrigger.VARIABLE_GATE: TetrammTrigger.EXT_TRIGGER, + DetectorTrigger.EXTERNAL_EDGE: TetrammTrigger.EXT_TRIGGER, + DetectorTrigger.EXTERNAL_LEVEL: TetrammTrigger.EXT_TRIGGER, + DetectorTrigger.EXTERNAL_LEVEL: TetrammTrigger.EXT_TRIGGER, } """"On the TetrAMM ASCII mode requires a minimum value of ValuesPerRead of 500, [...] binary mode the minimum value of ValuesPerRead is 5." @@ -90,7 +90,7 @@ class TetrammController(DetectorController): """The TetrAMM always digitizes at 100 kHz""" _base_sample_rate: int = 100_000 - def __init__(self, driver: TetrammDriver, file_io: NDFileHDFIO) -> None: + def __init__(self, driver: TetrammDriver, file_io: NDFileHDF5IO) -> None: self.driver = driver self._file_io = file_io self._arm_status: AsyncStatus | None = None @@ -122,7 +122,7 @@ async def prepare(self, trigger_info: TriggerInfo) -> None: await asyncio.gather( self.set_exposure(trigger_info.livetime), - self._file_io.num_capture.set(trigger_info.total_number_of_exposures), + self._file_io.num_capture.set(trigger_info.number_of_exposures), ) # raise an error if asked to trigger faster than the max. @@ -154,7 +154,7 @@ async def disarm(self): # We can't use caput callback as we already used it in arm() and we can't have # 2 or they will deadlock. Therefore must use stop_busy_record LOGGER.info("Disarming TetrAMM") - await stop_busy_record(self.driver.acquire, False, timeout=DEFAULT_TIMEOUT) + await stop_busy_record(self.driver.acquire, timeout=DEFAULT_TIMEOUT) async def set_exposure(self, exposure: float) -> None: """Set the exposure time and acquire period. @@ -208,7 +208,7 @@ async def complete_acquisition() -> None: return AsyncStatus(complete_acquisition()) -class TetrammDatasetDescriber(DatasetDescriber): +class TetrammDatasetDescriber(NDArrayDescription): def __init__(self, driver: TetrammDriver) -> None: self._driver = driver @@ -235,7 +235,7 @@ def __init__( type: str | None = None, ): self.driver = TetrammDriver(prefix + drv_suffix) - self.file_io = NDFileHDFIO(prefix + fileio_suffix) + self.file_io = NDFileHDF5IO(prefix + fileio_suffix) controller = TetrammController(self.driver, self.file_io) self.current1 = epics_signal_r(float, prefix + "Cur1:MeanValue_RBV") @@ -251,7 +251,7 @@ def __init__( self.pos_x = epics_signal_r(float, prefix + "PosX:MeanValue_RBV") self.pos_y = epics_signal_r(float, prefix + "PosY:MeanValue_RBV") - writer = ADHDFWriter( + writer = ADHDFDataLogic( fileio=self.file_io, path_provider=path_provider, dataset_describer=TetrammDatasetDescriber(self.driver), diff --git a/tests/devices/beamlines/i11/test_mythen.py b/tests/devices/beamlines/i11/test_mythen.py index ed6a1877d3f..791ebacf471 100644 --- a/tests/devices/beamlines/i11/test_mythen.py +++ b/tests/devices/beamlines/i11/test_mythen.py @@ -29,7 +29,7 @@ async def i11_mythen() -> Mythen3: prefix="BL11I-EA-DET-07:", path_provider=get_path_provider(), drv_suffix="DET", - fileio_suffix="HDF:", + writer_suffix="HDF:", ) return i11_mythen @@ -59,7 +59,7 @@ async def test_mythen_prepare_when_det_trig_internal(i11_mythen: Mythen3) -> Non async def test_mythen_prepare_when_det_trig_external(i11_mythen: Mythen3) -> None: trigger_info = TriggerInfo( number_of_events=1, - trigger=DetectorTrigger.CONSTANT_GATE, + trigger=DetectorTrigger.EXTERNAL_LEVEL, deadtime=1, livetime=10.0, exposure_timeout=30.0, @@ -75,7 +75,7 @@ async def test_mythen_prepare_when_det_trig_external(i11_mythen: Mythen3) -> Non async def test_mythen_prepare_when_continous_exposure(i11_mythen: Mythen3) -> None: trigger_info = TriggerInfo( number_of_events=0, - trigger=DetectorTrigger.CONSTANT_GATE, + trigger=DetectorTrigger.EXTERNAL_LEVEL, deadtime=1, livetime=10.0, exposure_timeout=30.0, diff --git a/tests/devices/beamlines/i13_1/test_merlin.py b/tests/devices/beamlines/i13_1/test_merlin.py index 9ffbfa5670d..49aeb9e895d 100644 --- a/tests/devices/beamlines/i13_1/test_merlin.py +++ b/tests/devices/beamlines/i13_1/test_merlin.py @@ -16,11 +16,10 @@ @pytest.fixture def one_shot_trigger_info() -> TriggerInfo: return TriggerInfo( - exposure_timeout=None, number_of_events=1, trigger=DetectorTrigger.INTERNAL, deadtime=0.0, - livetime=None, + livetime=0.0, ) @@ -48,7 +47,7 @@ async def test_trigger( await merlin.prepare(one_shot_trigger_info) await merlin._controller.arm() - assert await merlin.drv.acquire.get_value() + assert await merlin.driver.acquire.get_value() await merlin._controller.wait_for_idle() @@ -59,8 +58,8 @@ async def test_can_collect( one_shot_trigger_info: TriggerInfo, ): set_mock_value(merlin.hdf.file_path_exists, True) - set_mock_value(merlin.drv.array_size_x, 10) - set_mock_value(merlin.drv.array_size_y, 20) + set_mock_value(merlin.driver.array_size_x, 10) + set_mock_value(merlin.driver.array_size_y, 20) set_mock_value(merlin.hdf.num_frames_chunks, 1) await merlin.stage() @@ -89,8 +88,8 @@ async def test_can_collect( async def test_can_decribe_collect(merlin: Merlin, one_shot_trigger_info: TriggerInfo): set_mock_value(merlin.hdf.file_path_exists, True) - set_mock_value(merlin.drv.array_size_x, 10) - set_mock_value(merlin.drv.array_size_y, 20) + set_mock_value(merlin.driver.array_size_x, 10) + set_mock_value(merlin.driver.array_size_y, 20) assert (await merlin.describe_collect()) == {} await merlin.stage() diff --git a/tests/devices/test_tetramm.py b/tests/devices/test_tetramm.py index 683b9299733..e7c4d0d35ea 100644 --- a/tests/devices/test_tetramm.py +++ b/tests/devices/test_tetramm.py @@ -56,10 +56,10 @@ async def sample_time_from_values(value: int): def supported_trigger_info() -> TriggerInfo: return TriggerInfo( number_of_events=1, - trigger=DetectorTrigger.CONSTANT_GATE, + trigger=DetectorTrigger.EXTERNAL_LEVEL, deadtime=1e-4, livetime=1, - exposure_timeout=None, + exposure_timeout=0.0, ) @@ -98,7 +98,7 @@ async def test_set_invalid_exposure_for_number_of_values_per_reading( await tetramm_controller.prepare( TriggerInfo( number_of_events=0, - trigger=DetectorTrigger.EDGE_TRIGGER, + trigger=DetectorTrigger.EXTERNAL_EDGE, livetime=4e-5, ) ) @@ -115,9 +115,8 @@ async def test_arm_raises_value_error_for_invalid_trigger_type( trigger_type: DetectorTrigger, ): accepted_types = [ - "EDGE_TRIGGER", - "CONSTANT_GATE", - "VARIABLE_GATE", + "EXTERNAL_EDGE", + "EXTERNAL_LEVEL", ] with pytest.raises( TypeError, @@ -140,8 +139,8 @@ async def test_arm_raises_value_error_for_invalid_trigger_type( @pytest.mark.parametrize( "trigger_type", [ - DetectorTrigger.EDGE_TRIGGER, - DetectorTrigger.CONSTANT_GATE, + DetectorTrigger.EXTERNAL_EDGE, + DetectorTrigger.EXTERNAL_LEVEL, ], ) async def test_arm_sets_signals_correctly_given_valid_inputs( @@ -167,7 +166,7 @@ async def test_disarm_disarms_driver( await tetramm.prepare( TriggerInfo( number_of_events=0, - trigger=DetectorTrigger.EDGE_TRIGGER, + trigger=DetectorTrigger.EXTERNAL_EDGE, livetime=VALID_TEST_EXPOSURE_TIME, deadtime=VALID_TEST_DEADTIME, ) @@ -192,10 +191,9 @@ async def test_prepare_with_too_low_a_deadtime_raises_error( await tetramm.prepare( TriggerInfo( number_of_events=5, - trigger=DetectorTrigger.EDGE_TRIGGER, + trigger=DetectorTrigger.EXTERNAL_EDGE, deadtime=1.0 / 100_000.0, livetime=VALID_TEST_EXPOSURE_TIME, - exposure_timeout=None, ) ) @@ -206,10 +204,9 @@ async def test_prepare_arms_tetramm( await tetramm.prepare( TriggerInfo( number_of_events=5, - trigger=DetectorTrigger.EDGE_TRIGGER, + trigger=DetectorTrigger.EXTERNAL_EDGE, deadtime=0.1, livetime=VALID_TEST_EXPOSURE_TIME, - exposure_timeout=None, ) ) await assert_armed(tetramm.driver) @@ -255,7 +252,7 @@ async def test_stage_sets_up_accurate_describe_output( async def test_error_if_armed_without_exposure(tetramm_controller: TetrammController): with pytest.raises(ValueError): await tetramm_controller.prepare( - TriggerInfo(number_of_events=10, trigger=DetectorTrigger.CONSTANT_GATE) + TriggerInfo(number_of_events=10, trigger=DetectorTrigger.EXTERNAL_LEVEL) ) @@ -266,7 +263,7 @@ async def test_tetramm_controller( await tetramm_controller.prepare( TriggerInfo( number_of_events=1, - trigger=DetectorTrigger.CONSTANT_GATE, + trigger=DetectorTrigger.EXTERNAL_LEVEL, livetime=VALID_TEST_EXPOSURE_TIME, deadtime=VALID_TEST_DEADTIME, ) @@ -294,7 +291,7 @@ async def test_tetramm_prepare_when_freerunning(tetramm_controller: TetrammContr await tetramm_controller.prepare( TriggerInfo( number_of_events=1, - trigger=DetectorTrigger.CONSTANT_GATE, + trigger=DetectorTrigger.EXTERNAL_LEVEL, livetime=VALID_TEST_EXPOSURE_TIME, deadtime=VALID_TEST_DEADTIME, ) diff --git a/uv.lock b/uv.lock index 57e41e6ea29..a2405c04cdc 100644 --- a/uv.lock +++ b/uv.lock @@ -768,7 +768,7 @@ requires-dist = [ { name = "numpy" }, { name = "opencv-python-headless" }, { name = "ophyd" }, - { name = "ophyd-async", extras = ["ca", "pva"], specifier = ">=0.16.0" }, + { name = "ophyd-async", extras = ["ca", "pva"], specifier = ">=0.17a1" }, { name = "pillow" }, { name = "pydantic", specifier = ">=2.0" }, { name = "pyepics" }, @@ -2089,7 +2089,7 @@ wheels = [ [[package]] name = "ophyd-async" -version = "0.16" +version = "0.17a1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "bluesky" }, @@ -2103,9 +2103,9 @@ dependencies = [ { name = "stamina" }, { name = "velocity-profile" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/08/2d/cd178f31c4efb7f2a1b2787900d62b70b478111c5db51a625307f5fb9b15/ophyd_async-0.16.tar.gz", hash = "sha256:c8a3671c704da77c7a7b7c5343b972230f743b1029a100f6c5780123fb0df33d", size = 545367, upload-time = "2026-02-17T16:39:37.897Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/da/daa89b14b50d332619cd2144c64b58d49152fb16997b42fb5a1a68567865/ophyd_async-0.17a1.tar.gz", hash = "sha256:2c5677c825e6a096043be3b9a7342838377fae1dc404b4d7bbc1aa82e628fff6", size = 545385, upload-time = "2026-03-05T17:41:25.798Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/ab/0c92e9824c9e54df5a06b5759957fc23c30489c2aabef5653dd9057b3f61/ophyd_async-0.16-py3-none-any.whl", hash = "sha256:017d837767b63cdc20af1851275495b6bb0db195a887e0bd989dc7a17e0f0c79", size = 208499, upload-time = "2026-02-17T16:39:36.542Z" }, + { url = "https://files.pythonhosted.org/packages/69/d8/a845b2f2f622ab88b521688e58925b9e19cfccb235e2c2b9ad19d83b2214/ophyd_async-0.17a1-py3-none-any.whl", hash = "sha256:4579866f0a957f2c072382aa14c9b4d8dea5dc46e3072516bc17fc7af56a1692", size = 205987, upload-time = "2026-03-05T17:41:24.499Z" }, ] [package.optional-dependencies] From 5032bdf43a6df40829e3ea7adba547b94406b017 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Mon, 16 Mar 2026 11:07:51 +0000 Subject: [PATCH 02/30] Update trigger logic --- src/dodal/devices/beamlines/b16/detector.py | 4 ++-- src/dodal/devices/controllers.py | 26 +++++++++------------ 2 files changed, 13 insertions(+), 17 deletions(-) diff --git a/src/dodal/devices/beamlines/b16/detector.py b/src/dodal/devices/beamlines/b16/detector.py index e4f058194ca..38cc9089745 100644 --- a/src/dodal/devices/beamlines/b16/detector.py +++ b/src/dodal/devices/beamlines/b16/detector.py @@ -7,7 +7,7 @@ from dodal.common.beamlines.beamline_utils import get_path_provider from dodal.common.beamlines.device_helpers import CAM_SUFFIX, TIFF_SUFFIX -from dodal.devices.controllers import ConstantDeadTimeTriggerLogic +from dodal.devices.controllers import ConfigurableImageModeTriggerLogic def software_triggered_tiff_area_detector(prefix: str, deadtime: float = 0.0): @@ -20,7 +20,7 @@ def software_triggered_tiff_area_detector(prefix: str, deadtime: float = 0.0): prefix=prefix, driver=driver, arm_logic=ADArmLogic(driver), - trigger_logic=ConstantDeadTimeTriggerLogic(driver, deadtime), + trigger_logic=ConfigurableImageModeTriggerLogic(driver), path_provider=get_path_provider(), writer_type=ADWriterType.TIFF, writer_suffix=TIFF_SUFFIX, diff --git a/src/dodal/devices/controllers.py b/src/dodal/devices/controllers.py index 21739509db6..73655d977ad 100644 --- a/src/dodal/devices/controllers.py +++ b/src/dodal/devices/controllers.py @@ -1,26 +1,22 @@ -from typing import TypeVar +from typing import Generic, TypeVar -from ophyd_async.core import DetectorTriggerLogic, SignalDict -from ophyd_async.epics.adcore import ADBaseIO, ADImageMode, prepare_exposures +from ophyd_async.core import DetectorTriggerLogic +from ophyd_async.epics.adcore import ( + ADBaseIO, + ADImageMode, + prepare_exposures, +) ADBaseIOT = TypeVar("ADBaseIOT", bound=ADBaseIO) -class ConstantDeadTimeTriggerLogic(DetectorTriggerLogic): - """DetectorTriggerLogic with a configured constant deadtime.""" - +class ConfigurableImageModeTriggerLogic(DetectorTriggerLogic, Generic[ADBaseIOT]): def __init__( - self, - driver: ADBaseIO, - deadtime: float, - image_mode: ADImageMode = ADImageMode.MULTIPLE, + self, driver: ADBaseIOT, image_mode: ADImageMode = ADImageMode.MULTIPLE ): - # super().__init__(driver, image_mode=image_mode) self.driver = driver - self.deadtime = deadtime - - def get_deadtime(self, config_values: SignalDict) -> float: - return self.deadtime + self.image_mode = image_mode async def prepare_internal(self, num: int, livetime: float, deadtime: float): + await self.driver.image_mode.set(self.image_mode) await prepare_exposures(self.driver, num, livetime, deadtime) From 04bf4bc58d9e481f7ca45dccd921c5d572db84ef Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Mon, 16 Mar 2026 11:44:03 +0000 Subject: [PATCH 03/30] Remove Merlin device as it is now in ophyd-async --- src/dodal/beamlines/i13_1.py | 10 +- src/dodal/devices/beamlines/i13_1/merlin.py | 32 ------ .../beamlines/i13_1/merlin_controller.py | 72 ------------ tests/devices/beamlines/i13_1/test_merlin.py | 105 ------------------ 4 files changed, 5 insertions(+), 214 deletions(-) delete mode 100644 src/dodal/devices/beamlines/i13_1/merlin.py delete mode 100644 src/dodal/devices/beamlines/i13_1/merlin_controller.py delete mode 100644 tests/devices/beamlines/i13_1/test_merlin.py diff --git a/src/dodal/beamlines/i13_1.py b/src/dodal/beamlines/i13_1.py index 230b4e16669..7182d679274 100644 --- a/src/dodal/beamlines/i13_1.py +++ b/src/dodal/beamlines/i13_1.py @@ -1,6 +1,7 @@ from pathlib import Path from ophyd_async.epics.adaravis import AravisDetector +from ophyd_async.epics.admerlin import MerlinDetector from dodal.common.beamlines.beamline_utils import ( device_factory, @@ -9,7 +10,6 @@ ) from dodal.common.beamlines.beamline_utils import set_beamline as set_utils_beamline from dodal.common.visit import LocalDirectoryServiceClient, StaticVisitPathProvider -from dodal.devices.beamlines.i13_1.merlin import Merlin from dodal.devices.motors import XYZStage from dodal.log import set_beamline as set_log_beamline from dodal.utils import BeamlinePrefix, get_beamline_name @@ -49,10 +49,10 @@ def side_camera() -> AravisDetector: @device_factory() -def merlin() -> Merlin: - return Merlin( +def merlin() -> MerlinDetector: + return MerlinDetector( prefix=f"{PREFIX}-EA-DET-04:", - drv_suffix="CAM:", - fileio_suffix="HDF5:", path_provider=get_path_provider(), + driver_suffix="CAM:", + writer_suffix="HDF5:", ) diff --git a/src/dodal/devices/beamlines/i13_1/merlin.py b/src/dodal/devices/beamlines/i13_1/merlin.py deleted file mode 100644 index f88d2e59445..00000000000 --- a/src/dodal/devices/beamlines/i13_1/merlin.py +++ /dev/null @@ -1,32 +0,0 @@ -from ophyd_async.core import PathProvider -from ophyd_async.epics import adcore - -from dodal.common.beamlines.device_helpers import CAM_SUFFIX, HDF5_SUFFIX -from dodal.devices.beamlines.i13_1.merlin_controller import ( - MerlinArmLogic, - MerlinTriggerLogic, -) - - -class Merlin(adcore.AreaDetector): - def __init__( - self, - prefix: str, - path_provider: PathProvider, - driver_suffix=CAM_SUFFIX, - writer_suffix=HDF5_SUFFIX, - name: str = "", - ): - self.driver = adcore.ADBaseIO(prefix + driver_suffix) - self.hdf = adcore.NDFileHDF5IO(prefix + writer_suffix) - super().__init__( - prefix=prefix, - driver=self.driver, - arm_logic=MerlinArmLogic(self.driver), - trigger_logic=MerlinTriggerLogic(self.driver), - path_provider=path_provider, - writer_suffix=writer_suffix, - writer_type=adcore.ADWriterType.HDF, - config_sigs=(self.driver.acquire_period, self.driver.acquire_time), - name=name, - ) diff --git a/src/dodal/devices/beamlines/i13_1/merlin_controller.py b/src/dodal/devices/beamlines/i13_1/merlin_controller.py deleted file mode 100644 index a312f4f2610..00000000000 --- a/src/dodal/devices/beamlines/i13_1/merlin_controller.py +++ /dev/null @@ -1,72 +0,0 @@ -from ophyd_async.core import ( - DEFAULT_TIMEOUT, - AsyncStatus, - DetectorArmLogic, -) -from ophyd_async.epics.adcore import ( - # DEFAULT_GOOD_STATES, - ADBaseIO, - # ADImageMode, - # ADState, - prepare_exposures, -) -from ophyd_async.epics.core import stop_busy_record - -from dodal.devices.controllers import ConstantDeadTimeTriggerLogic - - -class MerlinTriggerLogic(ConstantDeadTimeTriggerLogic): - def __init__( - self, - driver: ADBaseIO, - # good_states: frozenset[ADState] = DEFAULT_GOOD_STATES, - ) -> None: - self.driver = driver - # self.good_states = good_states - - # self.frame_timeout: float = 0 - # self._arm_status: AsyncStatus | None = None - # for drv_child in self.driver.children(): - # logging.debug(drv_child) - - # super().__init__(driver, 0.002) - - # async def prepare(self, trigger_info: TriggerInfo): - # self.frame_timeout = ( - # DEFAULT_TIMEOUT + await self.driver.acquire_time.get_value() - # ) - # await asyncio.gather( - # self.driver.num_images.set(trigger_info.number_of_exposures), - # self.driver.image_mode.set(ADImageMode.MULTIPLE), - # ) - - # async def wait_for_idle(self): - # if self._arm_status: - # await self._arm_status - - # async def disarm(self): - # # We can't use caput callback as we already used it in arm() and we can't have - # # 2 or they will deadlock - # await stop_busy_record(self.driver.acquire, timeout=1) - - def get_deadtime(self, config_values) -> float: - return 0.002 - - async def prepare_internal(self, num: int, livetime: float, deadtime: float): - await prepare_exposures(self.driver, num, livetime, deadtime) - - -class MerlinArmLogic(DetectorArmLogic): - def __init__(self, driver: ADBaseIO) -> None: - self.driver = driver - self._arm_status: AsyncStatus | None = None - - async def arm(self): - self._arm_status = await self.driver.acquire.set(True, timeout=DEFAULT_TIMEOUT) - - async def wait_for_idle(self): - if self._arm_status: - await self._arm_status - - async def disarm(self): - await stop_busy_record(self.driver.acquire, timeout=1) diff --git a/tests/devices/beamlines/i13_1/test_merlin.py b/tests/devices/beamlines/i13_1/test_merlin.py deleted file mode 100644 index 49aeb9e895d..00000000000 --- a/tests/devices/beamlines/i13_1/test_merlin.py +++ /dev/null @@ -1,105 +0,0 @@ -from typing import cast - -import pytest -from event_model import StreamDatum, StreamResource -from ophyd_async.core import ( - DetectorTrigger, - PathProvider, - TriggerInfo, - init_devices, - set_mock_value, -) - -from dodal.devices.beamlines.i13_1.merlin import Merlin - - -@pytest.fixture -def one_shot_trigger_info() -> TriggerInfo: - return TriggerInfo( - number_of_events=1, - trigger=DetectorTrigger.INTERNAL, - deadtime=0.0, - livetime=0.0, - ) - - -@pytest.fixture -async def merlin(static_path_provider: PathProvider) -> Merlin: - async with init_devices(mock=True): - merlin = Merlin( - prefix="BL13J-EA-DET-04", - # name="merlin", - # drv_suffix="CAM:", - # fileio_suffix="HDF5:", - path_provider=static_path_provider, - ) - - return merlin - - -async def test_trigger( - merlin: Merlin, - one_shot_trigger_info: TriggerInfo, -): - set_mock_value(merlin.hdf.file_path_exists, True) - - await merlin.stage() - await merlin.prepare(one_shot_trigger_info) - await merlin._controller.arm() - - assert await merlin.driver.acquire.get_value() - - await merlin._controller.wait_for_idle() - - -async def test_can_collect( - merlin: Merlin, - static_path_provider: PathProvider, - one_shot_trigger_info: TriggerInfo, -): - set_mock_value(merlin.hdf.file_path_exists, True) - set_mock_value(merlin.driver.array_size_x, 10) - set_mock_value(merlin.driver.array_size_y, 20) - set_mock_value(merlin.hdf.num_frames_chunks, 1) - - await merlin.stage() - await merlin.prepare(one_shot_trigger_info) - docs = [(name, doc) async for name, doc in merlin.collect_asset_docs(1)] - assert len(docs) == 2 - assert docs[0][0] == "stream_resource" - stream_resource = cast(StreamResource, docs[0][1]) - sr_uid = stream_resource["uid"] - assert stream_resource["data_key"] == "merlin" - expected_path = static_path_provider(merlin.name) - assert ( - stream_resource["uri"] - == f"file://localhost{expected_path.directory_path}/{expected_path.filename}.h5" - ) - assert stream_resource["parameters"] == { - "dataset": "/entry/data/data", - "chunk_shape": (1, 20, 10), - } - assert docs[1][0] == "stream_datum" - stream_datum = cast(StreamDatum, docs[1][1]) - assert stream_datum["stream_resource"] == sr_uid - assert stream_datum["seq_nums"] == {"start": 0, "stop": 0} - assert stream_datum["indices"] == {"start": 0, "stop": 1} - - -async def test_can_decribe_collect(merlin: Merlin, one_shot_trigger_info: TriggerInfo): - set_mock_value(merlin.hdf.file_path_exists, True) - set_mock_value(merlin.driver.array_size_x, 10) - set_mock_value(merlin.driver.array_size_y, 20) - - assert (await merlin.describe_collect()) == {} - await merlin.stage() - await merlin.prepare(one_shot_trigger_info) - assert (await merlin.describe_collect()) == { - "merlin": { - "source": "mock+ca://BL13J-EA-DET-04HDF5:FullFileName_RBV", - "shape": [1, 20, 10], - "dtype": "array", - "dtype_numpy": "|i1", - "external": "STREAM:", - } - } From 7e123b401b9ed3268eb125c262c1d713411bd20d Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Mon, 16 Mar 2026 15:12:54 +0000 Subject: [PATCH 04/30] Fix b16 --- src/dodal/devices/beamlines/b16/detector.py | 21 +++++++++- src/dodal/devices/controllers.py | 22 ---------- tests/beamlines/test_b16.py | 46 +++++++++++---------- 3 files changed, 43 insertions(+), 46 deletions(-) delete mode 100644 src/dodal/devices/controllers.py diff --git a/src/dodal/devices/beamlines/b16/detector.py b/src/dodal/devices/beamlines/b16/detector.py index 38cc9089745..fe47860c3cc 100644 --- a/src/dodal/devices/beamlines/b16/detector.py +++ b/src/dodal/devices/beamlines/b16/detector.py @@ -1,13 +1,30 @@ +from typing import Generic, TypeVar + +from ophyd_async.core import DetectorTriggerLogic, SignalDict from ophyd_async.epics.adcore import ( ADArmLogic, ADBaseIO, ADWriterType, AreaDetector, + prepare_exposures, ) from dodal.common.beamlines.beamline_utils import get_path_provider from dodal.common.beamlines.device_helpers import CAM_SUFFIX, TIFF_SUFFIX -from dodal.devices.controllers import ConfigurableImageModeTriggerLogic + +ADBaseIOT = TypeVar("ADBaseIOT", bound=ADBaseIO) + + +class TiffTriggerLogic(DetectorTriggerLogic, Generic[ADBaseIOT]): + def __init__(self, driver: ADBaseIOT, deadtime: float): + self.driver = driver + self.deadtime = deadtime + + async def prepare_internal(self, num: int, livetime: float, deadtime: float): + await prepare_exposures(self.driver, num, livetime, deadtime) + + def get_deadtime(self, config_values: SignalDict) -> float: + return self.deadtime def software_triggered_tiff_area_detector(prefix: str, deadtime: float = 0.0): @@ -20,7 +37,7 @@ def software_triggered_tiff_area_detector(prefix: str, deadtime: float = 0.0): prefix=prefix, driver=driver, arm_logic=ADArmLogic(driver), - trigger_logic=ConfigurableImageModeTriggerLogic(driver), + trigger_logic=TiffTriggerLogic(driver, deadtime), path_provider=get_path_provider(), writer_type=ADWriterType.TIFF, writer_suffix=TIFF_SUFFIX, diff --git a/src/dodal/devices/controllers.py b/src/dodal/devices/controllers.py deleted file mode 100644 index 73655d977ad..00000000000 --- a/src/dodal/devices/controllers.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Generic, TypeVar - -from ophyd_async.core import DetectorTriggerLogic -from ophyd_async.epics.adcore import ( - ADBaseIO, - ADImageMode, - prepare_exposures, -) - -ADBaseIOT = TypeVar("ADBaseIOT", bound=ADBaseIO) - - -class ConfigurableImageModeTriggerLogic(DetectorTriggerLogic, Generic[ADBaseIOT]): - def __init__( - self, driver: ADBaseIOT, image_mode: ADImageMode = ADImageMode.MULTIPLE - ): - self.driver = driver - self.image_mode = image_mode - - async def prepare_internal(self, num: int, livetime: float, deadtime: float): - await self.driver.image_mode.set(self.image_mode) - await prepare_exposures(self.driver, num, livetime, deadtime) diff --git a/tests/beamlines/test_b16.py b/tests/beamlines/test_b16.py index 3b768de46b8..72903b7ef33 100644 --- a/tests/beamlines/test_b16.py +++ b/tests/beamlines/test_b16.py @@ -1,5 +1,7 @@ from unittest.mock import MagicMock, patch +from ophyd_async.epics.adcore import ADWriterType + from dodal.common.beamlines.device_helpers import CAM_SUFFIX, TIFF_SUFFIX from dodal.devices.beamlines.b16.detector import software_triggered_tiff_area_detector @@ -9,28 +11,26 @@ def test_software_triggered_tiff_area_detector_calls_with_io_correctly(): default_deadtime = 0.0 with ( - patch( - "dodal.devices.beamlines.b16.detector.ADTIFFWriter.with_io" - ) as mock_writer_with_io, patch( "dodal.devices.beamlines.b16.detector.AreaDetector" ) as mock_area_detector, patch( - "dodal.devices.beamlines.b16.detector.ConstantDeadTimeController" - ) as mock_controller, + "dodal.devices.beamlines.b16.detector.TiffTriggerLogic" + ) as mock_tiff_trigger_logic, patch( "dodal.devices.beamlines.b16.detector.get_path_provider" ) as mock_get_path_provider, patch("dodal.devices.beamlines.b16.detector.ADBaseIO") as mock_adbase_io, + patch("dodal.devices.beamlines.b16.detector.ADArmLogic") as mock_arm_logic, ): - mock_writer = MagicMock(name="Writer") - mock_writer_with_io.return_value = mock_writer + mock_arm_logic_instance = MagicMock(name="ADArmLogic") + mock_arm_logic.return_value = mock_arm_logic_instance mock_path_provider = MagicMock(name="PathProvider") mock_get_path_provider.return_value = mock_path_provider - mock_controller_instance = MagicMock(name="Controller") - mock_controller.return_value = mock_controller_instance + mock_tiff_trigger_logic_instance = MagicMock(name="TriggerLogic") + mock_tiff_trigger_logic.return_value = mock_tiff_trigger_logic_instance mock_driver_instance = MagicMock(name="Driver") mock_adbase_io.return_value = mock_driver_instance @@ -40,26 +40,28 @@ def test_software_triggered_tiff_area_detector_calls_with_io_correctly(): result = software_triggered_tiff_area_detector(prefix) # default deadtime - # Assert with_io called with correct arguments - mock_writer_with_io.assert_called_once_with( - prefix=prefix, - path_provider=mock_path_provider, - fileio_suffix=TIFF_SUFFIX, - ) - # Assert ADBaseIO called with correct prefix + suffix mock_adbase_io.assert_called_once_with(prefix + CAM_SUFFIX) - # Assert ConstantDeadTimeController called with driver and deadtime - mock_controller.assert_called_once_with( - driver=mock_driver_instance, - deadtime=default_deadtime, + # Assert correct TriggerLogic used. + mock_arm_logic.assert_called_once_with(mock_driver_instance) + + # Assert TiffTriggerLogic called with driver and deadtime + mock_tiff_trigger_logic.assert_called_once_with( + mock_driver_instance, + default_deadtime, ) # Assert AreaDetector constructed with correct arguments mock_area_detector.assert_called_once_with( - writer=mock_writer, - controller=mock_controller_instance, + prefix=prefix, + driver=mock_driver_instance, + # writer=mock_writer, + trigger_logic=mock_tiff_trigger_logic_instance, + path_provider=mock_path_provider, + arm_logic=mock_arm_logic_instance, + writer_type=ADWriterType.TIFF, + writer_suffix=TIFF_SUFFIX, ) # The function should return the AreaDetector instance From f4905df06cab177e52e1272a390c768b2158e7e2 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Mon, 16 Mar 2026 15:14:00 +0000 Subject: [PATCH 05/30] Shorten imports --- src/dodal/beamlines/b16.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/src/dodal/beamlines/b16.py b/src/dodal/beamlines/b16.py index a5857fb2b0c..ac3fb6a3034 100644 --- a/src/dodal/beamlines/b16.py +++ b/src/dodal/beamlines/b16.py @@ -1,19 +1,12 @@ from pathlib import Path -from ophyd_async.epics.adcore import ( - AreaDetector, -) +from ophyd_async.epics.adcore import AreaDetector from ophyd_async.epics.motor import Motor -from dodal.common.beamlines.beamline_utils import ( - device_factory, - set_path_provider, -) +from dodal.common.beamlines.beamline_utils import device_factory, set_path_provider from dodal.common.beamlines.beamline_utils import set_beamline as set_utils_beamline from dodal.common.visit import RemoteDirectoryServiceClient, StaticVisitPathProvider -from dodal.devices.beamlines.b16.detector import ( - software_triggered_tiff_area_detector, -) +from dodal.devices.beamlines.b16.detector import software_triggered_tiff_area_detector from dodal.devices.motors import XYZStage from dodal.log import set_beamline as set_log_beamline from dodal.utils import BeamlinePrefix, get_beamline_name From b4e30c5434eff02d0954d4021f57b0180a6ed6a2 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Tue, 17 Mar 2026 10:55:11 +0000 Subject: [PATCH 06/30] Convert Tetramm detector --- src/dodal/devices/tetramm.py | 242 ++++++++++-------------------- tests/devices/test_controllers.py | 16 -- tests/devices/test_tetramm.py | 160 +++++++++++--------- 3 files changed, 168 insertions(+), 250 deletions(-) delete mode 100644 tests/devices/test_controllers.py diff --git a/src/dodal/devices/tetramm.py b/src/dodal/devices/tetramm.py index 0597322aad6..264516b41be 100644 --- a/src/dodal/devices/tetramm.py +++ b/src/dodal/devices/tetramm.py @@ -3,28 +3,27 @@ from typing import Annotated as A from ophyd_async.core import ( - DEFAULT_TIMEOUT, AsyncStatus, - DetectorController, - DetectorTrigger, + # DetectorController, + DetectorTriggerLogic, PathProvider, + SignalDict, SignalR, SignalRW, StandardDetector, StrictEnum, TriggerInfo, - set_and_wait_for_value, - soft_signal_r_and_setter, - wait_for_value, + derived_signal_r, ) from ophyd_async.epics.adcore import ( + ADArmLogic, + ADBaseIO, ADHDFDataLogic, - NDArrayBaseIO, NDArrayDescription, NDFileHDF5IO, NDPluginBaseIO, ) -from ophyd_async.epics.core import PvSuffix, epics_signal_r, stop_busy_record +from ophyd_async.epics.core import PvSuffix, epics_signal_r from dodal.log import LOGGER @@ -57,7 +56,7 @@ class TetrammGeometry(StrictEnum): SQUARE = "Square" -class TetrammDriver(NDArrayBaseIO): +class TetrammDriver(ADBaseIO): range = A[SignalRW[TetrammRange], PvSuffix.rbv("Range")] sample_time: A[SignalR[float], PvSuffix("SampleTime_RBV")] values_per_reading: A[SignalRW[int], PvSuffix.rbv("ValuesPerRead")] @@ -74,138 +73,45 @@ class TetrammDriver(NDArrayBaseIO): read_format: A[SignalRW[bool], PvSuffix.rbv("ReadFormat")] -class TetrammController(DetectorController): - """Controller for a TetrAMM current monitor.""" - - _supported_trigger_types = { - DetectorTrigger.EXTERNAL_EDGE: TetrammTrigger.EXT_TRIGGER, - DetectorTrigger.EXTERNAL_LEVEL: TetrammTrigger.EXT_TRIGGER, - DetectorTrigger.EXTERNAL_LEVEL: TetrammTrigger.EXT_TRIGGER, - } - """"On the TetrAMM ASCII mode requires a minimum value of ValuesPerRead of 500, - [...] binary mode the minimum value of ValuesPerRead is 5." - https://millenia.cars.aps.anl.gov/software/epics/quadEMDoc.html - """ +class TetrammTriggerLogic(DetectorTriggerLogic): + _base_sample_rate = 100_000 _minimal_values_per_reading = {0: 5, 1: 500} - """The TetrAMM always digitizes at 100 kHz""" - _base_sample_rate: int = 100_000 - def __init__(self, driver: TetrammDriver, file_io: NDFileHDF5IO) -> None: + def __init__(self, driver: TetrammDriver, file_io: NDFileHDF5IO): self.driver = driver - self._file_io = file_io - self._arm_status: AsyncStatus | None = None + self.file_io = file_io - def get_deadtime(self, exposure: float | None) -> float: - # 2 internal clock cycles. Best effort approximation + def get_deadtime(self, config_values) -> float: return 2 / self._base_sample_rate - async def prepare(self, trigger_info: TriggerInfo) -> None: - if trigger_info.trigger not in self._supported_trigger_types: - raise TypeError( - f"{self.__class__.__name__} only supports the following trigger " - f"types: {[k.name for k in self._supported_trigger_types]} but was asked to " - f"use {trigger_info.trigger}" - ) - if trigger_info.livetime is None: - raise ValueError(f"{self.__class__.__name__} requires that livetime is set") - - current_trig_status = await self.driver.trigger_mode.get_value() - - if current_trig_status == TetrammTrigger.FREE_RUN: # if freerun turn off first - LOGGER.info("Disarming TetrAMM from free run") - await self.disarm() - - # trigger mode must be set first and on its own! - await self.driver.trigger_mode.set( - self._supported_trigger_types[trigger_info.trigger] - ) + async def prepare_edge(self, num: int, livetime: float): + await self.driver.trigger_mode.set(TetrammTrigger.EXT_TRIGGER) await asyncio.gather( - self.set_exposure(trigger_info.livetime), - self._file_io.num_capture.set(trigger_info.number_of_exposures), + self.set_exposure(livetime), + self.file_io.num_capture.set(num), ) - # raise an error if asked to trigger faster than the max. - # possible speed for a tetramm - self._validate_deadtime(trigger_info) - - def _validate_deadtime(self, value: TriggerInfo) -> None: - minimum_deadtime = self.get_deadtime(value.livetime) - if minimum_deadtime > value.deadtime: - msg = ( - f"Tetramm {self} needs at least {minimum_deadtime}s " - f"deadtime, but trigger logic provides only {value.deadtime}s" - ) - raise ValueError(msg) - - async def arm(self): - self._arm_status = await self.start_acquiring_driver_and_ensure_status() - - async def wait_for_idle(self): - # tetramm never goes idle really, actually it is always acquiring - # so need to wait for the capture to finish instead - await wait_for_value(self._file_io.acquire, False, timeout=None) + async def prepare_level(self, num: int): + await self.driver.trigger_mode.set(TetrammTrigger.EXT_TRIGGER) + await self.file_io.num_capture.set(num) - async def unstage(self): - LOGGER.info("Unstaging TetrAMM") - await self._file_io.acquire.set(False) - - async def disarm(self): - # We can't use caput callback as we already used it in arm() and we can't have - # 2 or they will deadlock. Therefore must use stop_busy_record - LOGGER.info("Disarming TetrAMM") - await stop_busy_record(self.driver.acquire, timeout=DEFAULT_TIMEOUT) - - async def set_exposure(self, exposure: float) -> None: - """Set the exposure time and acquire period. - - As during the exposure time, the device must collect an integer number - of readings, in the case where the exposure is not a multiple of the base - sample rate, it will be lowered to the prior multiple ot ensure triggers - are not missed. - - Args: - exposure (float): Desired exposure time. - """ + async def set_exposure(self, exposure: float): sample_time = await self.driver.sample_time.get_value() + minimum_samples = self._minimal_values_per_reading[ await self.driver.read_format.get_value() ] - samples_per_reading = int(exposure / sample_time) - if samples_per_reading < minimum_samples: + + samples = int(exposure / sample_time) + + if samples < minimum_samples: raise ValueError( "Tetramm exposure time must be at least " f"{minimum_samples * sample_time}s, asked to set it to {exposure}s" ) - await self.driver.averaging_time.set( - samples_per_reading * sample_time - ) # correct - - async def start_acquiring_driver_and_ensure_status(self) -> AsyncStatus: - """Start acquiring driver, raising ValueError if the detector is in a bad state. - - This sets driver.acquire to True, and waits for it to be True up to a timeout. - Then, it checks that the DetectorState PV is in DEFAULT_GOOD_STATES, - and otherwise raises a ValueError. - - Returns: - AsyncStatus: An AsyncStatus that can be awaited to set driver.acquire to - True and perform subsequent raising (if applicable) due to detector - state. - """ - status = await set_and_wait_for_value( - self.driver.acquire, - True, - timeout=DEFAULT_TIMEOUT, - wait_for_set_completion=False, - ) - - async def complete_acquisition() -> None: - # NOTE: possible race condition here between the callback from - # set_and_wait_for_value and the detector state updating. - await status - return AsyncStatus(complete_acquisition()) + await self.driver.averaging_time.set(samples * sample_time) class TetrammDatasetDescriber(NDArrayDescription): @@ -227,57 +133,71 @@ def __init__( self, prefix: str, path_provider: PathProvider, - drv_suffix: str = "DRV:", - fileio_suffix: str = "HDF5:", + drv_suffix="DRV:", + fileio_suffix="HDF5:", + plugins: Sequence[NDPluginBaseIO] = [], name: str = "", - plugins: dict[str, NDPluginBaseIO] | None = None, - config_sigs: Sequence[SignalR] = (), - type: str | None = None, ): self.driver = TetrammDriver(prefix + drv_suffix) self.file_io = NDFileHDF5IO(prefix + fileio_suffix) - controller = TetrammController(self.driver, self.file_io) + def _get_num_channels(num_channels: TetrammChannels) -> int: + return int(num_channels) + + self.num_channels = derived_signal_r( + _get_num_channels, num_channels=self.driver.num_channels + ) + + super().__init__(name=name) + + self.add_detector_logics( + ADHDFDataLogic( + writer=self.file_io, + driver=self.driver, + path_provider=path_provider, + description=NDArrayDescription( + shape_signals=[self.num_channels, self.driver.to_average], + data_type_signal=self.driver.data_type, + color_mode_signal=self.driver.color_mode, + ), + plugins=plugins, + ), + TetrammTriggerLogic(self.driver, self.file_io), + ADArmLogic(self.driver), + ) + + # currents self.current1 = epics_signal_r(float, prefix + "Cur1:MeanValue_RBV") self.current2 = epics_signal_r(float, prefix + "Cur2:MeanValue_RBV") self.current3 = epics_signal_r(float, prefix + "Cur3:MeanValue_RBV") self.current4 = epics_signal_r(float, prefix + "Cur4:MeanValue_RBV") - self.sum_x = epics_signal_r(float, prefix + "SumX:MeanValue_RBV") - self.sum_y = epics_signal_r(float, prefix + "SumY:MeanValue_RBV") - self.sum_all = epics_signal_r(float, prefix + "SumAll:MeanValue_RBV") - self.diff_x = epics_signal_r(float, prefix + "DiffX:MeanValue_RBV") - self.diff_y = epics_signal_r(float, prefix + "DiffY:MeanValue_RBV") - self.pos_x = epics_signal_r(float, prefix + "PosX:MeanValue_RBV") - self.pos_y = epics_signal_r(float, prefix + "PosY:MeanValue_RBV") - - writer = ADHDFDataLogic( - fileio=self.file_io, - path_provider=path_provider, - dataset_describer=TetrammDatasetDescriber(self.driver), - plugins=plugins, - ) - - config_sigs = [ + # configuration signals + self.add_config_signals( self.driver.values_per_reading, self.driver.averaging_time, self.driver.sample_time, - *config_sigs, - ] - - if type: - self.type, _ = soft_signal_r_and_setter(str, type) - config_sigs.append(self.type) - else: - self.type = None - - if plugins is not None: - for plugin_name, plugin in plugins.items(): - setattr(self, plugin_name, plugin) - - super().__init__( - controller=controller, - writer=writer, - name=name, - config_sigs=config_sigs, ) + + @AsyncStatus.wrap + async def prepare(self, value: TriggerInfo): + current_trig_status = await self.driver.trigger_mode.get_value() + if ( + current_trig_status == TetrammTrigger.FREE_RUN + and self._arm_logic is not None + ): # if freerun turn off first + LOGGER.info("Disarming TetrAMM from free run") + await self._arm_logic.disarm() + await super().prepare(value) + self._validate_deadtime(value) + + def _validate_deadtime(self, value: TriggerInfo) -> None: + if self._trigger_logic is None: + raise RuntimeError("") + minimum_deadtime = self._trigger_logic.get_deadtime(SignalDict()) + if minimum_deadtime > value.deadtime: + msg = ( + f"Tetramm {self} needs at least {minimum_deadtime}s " + f"deadtime, but trigger logic provides only {value.deadtime}s" + ) + raise ValueError(msg) diff --git a/tests/devices/test_controllers.py b/tests/devices/test_controllers.py deleted file mode 100644 index b2b139fd8ef..00000000000 --- a/tests/devices/test_controllers.py +++ /dev/null @@ -1,16 +0,0 @@ -from unittest.mock import Mock - -import pytest - -from dodal.devices.controllers import ( - ConstantDeadTimeController, -) - - -@pytest.mark.parametrize("exposure", [0.001, 0.01, 0.1, 1, 10, 100]) -def test_constant_dead_time_controller_returns_constant(exposure: float): - deadtime = 0.7 - controller = ConstantDeadTimeController(driver=Mock(), deadtime=deadtime) - # Check that the exposure value given is ignored and used the configured constant - # value instead. - assert controller.get_deadtime(exposure) == deadtime diff --git a/tests/devices/test_tetramm.py b/tests/devices/test_tetramm.py index e7c4d0d35ea..d9737454e84 100644 --- a/tests/devices/test_tetramm.py +++ b/tests/devices/test_tetramm.py @@ -10,22 +10,16 @@ init_devices, set_mock_value, ) -from ophyd_async.epics.adcore import ADFileWriteMode +from ophyd_async.epics.adcore import ADBaseDataType, ADFileWriteMode from dodal.devices.tetramm import ( TetrammChannels, - TetrammController, TetrammDetector, TetrammDriver, TetrammTrigger, ) -@pytest.fixture -async def tetramm_controller(tetramm: TetrammDetector) -> TetrammController: - return tetramm._controller - - @pytest.fixture async def tetramm(static_path_provider: PathProvider) -> TetrammDetector: async with init_devices(mock=True): @@ -38,6 +32,7 @@ async def tetramm(static_path_provider: PathProvider) -> TetrammDetector: set_mock_value(tetramm.driver.averaged, 1) set_mock_value(tetramm.driver.num_channels, TetrammChannels.FOUR) set_mock_value(tetramm.driver.sample_time, 10e-6) + set_mock_value(tetramm.driver.data_type, ADBaseDataType.FLOAT64) async def sample_time_from_values(value: int): # https://millenia.cars.aps.anl.gov/software/epics/quadEMDoc.html @@ -59,7 +54,7 @@ def supported_trigger_info() -> TriggerInfo: trigger=DetectorTrigger.EXTERNAL_LEVEL, deadtime=1e-4, livetime=1, - exposure_timeout=0.0, + exposure_timeout=0.001, ) @@ -72,16 +67,15 @@ def supported_trigger_info() -> TriggerInfo: async def test_set_exposure_updates_values_per_reading( - tetramm_controller: TetrammController, tetramm: TetrammDetector, ): - await tetramm_controller.set_exposure(VALID_TEST_EXPOSURE_TIME) + await tetramm._trigger_logic.set_exposure(VALID_TEST_EXPOSURE_TIME) # type:ignore values_per_reading = await tetramm.driver.values_per_reading.get_value() assert values_per_reading == 5 async def test_set_invalid_exposure_for_number_of_values_per_reading( - tetramm_controller: TetrammController, + tetramm: TetrammDetector, ): """Test invalid exposure values. @@ -95,7 +89,7 @@ async def test_set_invalid_exposure_for_number_of_values_per_reading( ValueError, match="Tetramm exposure time must be at least 5e-05s, asked to set it to 4e-05s", ): - await tetramm_controller.prepare( + await tetramm.prepare( TriggerInfo( number_of_events=0, trigger=DetectorTrigger.EXTERNAL_EDGE, @@ -111,22 +105,20 @@ async def test_set_invalid_exposure_for_number_of_values_per_reading( ], ) async def test_arm_raises_value_error_for_invalid_trigger_type( - tetramm_controller: TetrammController, + tetramm: TetrammDetector, trigger_type: DetectorTrigger, ): - accepted_types = [ - "EXTERNAL_EDGE", - "EXTERNAL_LEVEL", - ] + accepted_types = ( + f"[{DetectorTrigger.EXTERNAL_EDGE.name}, {DetectorTrigger.EXTERNAL_LEVEL.name}]" + ) with pytest.raises( - TypeError, + ValueError, match=re.escape( - "TetrammController only supports the following trigger " - f"types: {accepted_types} but was asked to " - f"use {trigger_type}" + f"Trigger type {trigger_type} not supported by '{tetramm.name}'," + f" supported types are: {accepted_types}" ), ): - await tetramm_controller.prepare( + await tetramm.prepare( TriggerInfo( number_of_events=0, trigger=trigger_type, @@ -147,16 +139,15 @@ async def test_arm_sets_signals_correctly_given_valid_inputs( tetramm: TetrammDetector, trigger_type: DetectorTrigger, ): - await tetramm.prepare( - TriggerInfo( - number_of_events=0, - trigger=trigger_type, - livetime=VALID_TEST_EXPOSURE_TIME, - deadtime=VALID_TEST_DEADTIME, - ) + trigger_info = TriggerInfo( + number_of_events=0, + trigger=trigger_type, + livetime=VALID_TEST_EXPOSURE_TIME, + deadtime=VALID_TEST_DEADTIME, ) + await tetramm.prepare(trigger_info) - await assert_armed(tetramm.driver) + await assert_armed(tetramm.driver, trigger_info) async def test_disarm_disarms_driver( @@ -172,7 +163,7 @@ async def test_disarm_disarms_driver( ) ) assert (await tetramm.driver.acquire.get_value()) == 1 - await tetramm._controller.disarm() + await tetramm._arm_logic.disarm() # type: ignore assert (await tetramm.driver.acquire.get_value()) == 0 @@ -198,18 +189,16 @@ async def test_prepare_with_too_low_a_deadtime_raises_error( ) -async def test_prepare_arms_tetramm( - tetramm: TetrammDetector, -): - await tetramm.prepare( - TriggerInfo( - number_of_events=5, - trigger=DetectorTrigger.EXTERNAL_EDGE, - deadtime=0.1, - livetime=VALID_TEST_EXPOSURE_TIME, - ) +async def test_prepare_arms_tetramm(tetramm: TetrammDetector): + trigger_info = TriggerInfo( + number_of_events=5, + trigger=DetectorTrigger.EXTERNAL_EDGE, + deadtime=0.1, + livetime=VALID_TEST_EXPOSURE_TIME, ) - await assert_armed(tetramm.driver) + + await tetramm.prepare(trigger_info) + await assert_armed(tetramm.driver, trigger_info) async def test_prepare_sets_up_writer( @@ -227,20 +216,37 @@ async def test_prepare_sets_up_writer( async def test_stage_sets_up_accurate_describe_output( - tetramm: TetrammDetector, supported_trigger_info: TriggerInfo + tetramm: TetrammDetector, + static_path_provider: PathProvider, # , supported_trigger_info: TriggerInfo ): - assert await tetramm.describe() == {} + + trigger_info = TriggerInfo( + number_of_events=1, + trigger=DetectorTrigger.EXTERNAL_EDGE, + deadtime=1e-4, + livetime=1, + exposure_timeout=0.001, + ) + + # assert await tetramm.describe() == {} await tetramm.stage() - await tetramm.prepare(supported_trigger_info) + await tetramm.prepare(trigger_info) averaging_time = await tetramm.driver.averaging_time.get_value() averaging_time = round(averaging_time, 3) # avoid floating point issues assert averaging_time == 1.0 + path_info = static_path_provider(tetramm.name) + expected_path = ( + path_info.directory_path.joinpath(path_info.filename + ".h5") + .as_uri() + .replace("file:///", "file://localhost/") + ) + assert await tetramm.describe() == { "tetramm": { - "source": "mock+ca://MY-TETRAMM:HDF5:FullFileName_RBV", + "source": expected_path, "shape": [1, 4, averaging_time], "dtype_numpy": " None: +async def assert_armed(driver: TetrammDriver, trigger_info: TriggerInfo) -> None: sample_time = await driver.sample_time.get_value() samples_per_reading = int(VALID_TEST_EXPOSURE_TIME / sample_time) averaging_time = samples_per_reading * sample_time @@ -308,13 +320,15 @@ async def assert_armed(driver: TetrammDriver) -> None: assert (await driver.values_per_reading.get_value()) == 5 assert (await driver.acquire.get_value()) == 1 - assert (await driver.averaging_time.get_value()) == averaging_time + # Live time not used for EXTERNAL_LEVEL + if trigger_info.trigger is not DetectorTrigger.EXTERNAL_LEVEL: + assert (await driver.averaging_time.get_value()) == averaging_time -@patch("dodal.devices.tetramm.stop_busy_record") +@patch("ophyd_async.epics.adcore._arm_logic.stop_busy_record") async def test_tetramm_disarm_calls_stop_busy_recording( stop_busy_record_mock: MagicMock, - tetramm_controller: TetrammController, + tetramm: TetrammDetector, ): - await tetramm_controller.disarm() + await tetramm._arm_logic.disarm() # type: ignore stop_busy_record_mock.assert_called_once() From 18be4d3c60412a273c374b5dc8be663ff3e34ba1 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Thu, 19 Mar 2026 12:22:34 +0000 Subject: [PATCH 07/30] update electron analyser --- .../electron_analyser/base/__init__.py | 16 +- .../electron_analyser/base/base_controller.py | 13 +- .../electron_analyser/base/base_detector.py | 226 ++++++++---------- .../electron_analyser/specs/specs_detector.py | 28 ++- .../vgscienta/vgscienta_detector.py | 30 ++- 5 files changed, 161 insertions(+), 152 deletions(-) diff --git a/src/dodal/devices/electron_analyser/base/__init__.py b/src/dodal/devices/electron_analyser/base/__init__.py index a06633ee053..9d731a2f767 100644 --- a/src/dodal/devices/electron_analyser/base/__init__.py +++ b/src/dodal/devices/electron_analyser/base/__init__.py @@ -3,12 +3,12 @@ GenericElectronAnalyserController, ) from .base_detector import ( - BaseElectronAnalyserDetector, + # BaseElectronAnalyserDetector, ElectronAnalyserDetector, - ElectronAnalyserRegionDetector, + # ElectronAnalyserRegionDetector, GenericBaseElectronAnalyserDetector, - GenericElectronAnalyserDetector, - GenericElectronAnalyserRegionDetector, + # GenericElectronAnalyserDetector, + # GenericElectronAnalyserRegionDetector, ) from .base_driver_io import ( AbstractAnalyserDriverIO, @@ -32,12 +32,12 @@ __all__ = [ "ElectronAnalyserController", "GenericElectronAnalyserController", - "BaseElectronAnalyserDetector", + # "BaseElectronAnalyserDetector", "ElectronAnalyserDetector", - "ElectronAnalyserRegionDetector", + # "ElectronAnalyserRegionDetector", "GenericBaseElectronAnalyserDetector", - "GenericElectronAnalyserDetector", - "GenericElectronAnalyserRegionDetector", + # "GenericElectronAnalyserDetector", + # "GenericElectronAnalyserRegionDetector", "AbstractAnalyserDriverIO", "GenericAnalyserDriverIO", "TAbstractAnalyserDriverIO", diff --git a/src/dodal/devices/electron_analyser/base/base_controller.py b/src/dodal/devices/electron_analyser/base/base_controller.py index ce839dd9b34..5861bde3931 100644 --- a/src/dodal/devices/electron_analyser/base/base_controller.py +++ b/src/dodal/devices/electron_analyser/base/base_controller.py @@ -1,9 +1,8 @@ from typing import Generic, TypeVar -from ophyd_async.core import TriggerInfo +from ophyd_async.core import DetectorTriggerLogic, TriggerInfo from ophyd_async.epics.adcore import ADImageMode -from dodal.devices.controllers import ConstantDeadTimeTriggerLogic from dodal.devices.electron_analyser.base.base_driver_io import ( GenericAnalyserDriverIO, TAbstractAnalyserDriverIO, @@ -18,7 +17,7 @@ class ElectronAnalyserController( - ConstantDeadTimeTriggerLogic[TAbstractAnalyserDriverIO], + DetectorTriggerLogic, Generic[TAbstractAnalyserDriverIO, TAbstractBaseRegion], ): """Specialised controller for the electron analysers to provide additional setup @@ -42,13 +41,12 @@ def __init__( energy_source: AbstractEnergySource, shutter: FastShutter | None = None, source_selector: SourceSelector | None = None, - deadtime: float = 0, - image_mode: ADImageMode = ADImageMode.SINGLE, ): self.energy_source = energy_source self.shutter = shutter self.source_selector = source_selector - super().__init__(driver, deadtime, image_mode) + self.driver = driver + # super().__init__(driver, deadtime, image_mode) async def setup_with_region(self, region: TAbstractBaseRegion) -> None: """Logic to set the driver with a region.""" @@ -69,12 +67,11 @@ async def prepare(self, trigger_info: TriggerInfo) -> None: # axis calculation. excitation_energy = await self.energy_source.energy.get_value() await self.driver.cached_excitation_energy.set(excitation_energy) + await self.driver.image_mode.set(ADImageMode.SINGLE) if self.shutter is not None: await self.shutter.set(self.shutter.open_state) - await super().prepare(trigger_info) - GenericElectronAnalyserController = ElectronAnalyserController[ GenericAnalyserDriverIO, GenericRegion diff --git a/src/dodal/devices/electron_analyser/base/base_detector.py b/src/dodal/devices/electron_analyser/base/base_detector.py index aed52fe418b..488f2e48a93 100644 --- a/src/dodal/devices/electron_analyser/base/base_detector.py +++ b/src/dodal/devices/electron_analyser/base/base_detector.py @@ -1,19 +1,19 @@ -from typing import Generic, TypeVar +from typing import Any, Generic -from bluesky.protocols import Reading, Stageable, Triggerable -from event_model import DataKey from ophyd_async.core import ( - AsyncConfigurable, - AsyncReadable, AsyncStatus, - Device, - TriggerInfo, + DetectorArmLogic, + DetectorTriggerLogic, + SignalDict, + SignalR, + StandardDetector, + error_if_none, ) +from ophyd_async.epics.adcore import ADArmLogic, ADImageMode -from dodal.devices.electron_analyser.base.base_controller import ( - ElectronAnalyserController, -) +from dodal.devices.electron_analyser.base import TAbstractAnalyserDriverIO from dodal.devices.electron_analyser.base.base_driver_io import ( + AbstractAnalyserDriverIO, GenericAnalyserDriverIO, TAbstractAnalyserDriverIO, ) @@ -21,139 +21,120 @@ GenericRegion, TAbstractBaseRegion, ) +from dodal.devices.electron_analyser.base.energy_sources import AbstractEnergySource +from dodal.devices.fast_shutter import FastShutter +from dodal.devices.selectable_source import SourceSelector -class BaseElectronAnalyserDetector( - Device, - Triggerable, - AsyncReadable, - AsyncConfigurable, - Generic[TAbstractAnalyserDriverIO, TAbstractBaseRegion], -): - """Detector for data acquisition of electron analyser. Can only acquire using - settings already configured for the device. +async def _get_value(value: SignalR[bool] | bool) -> bool: + return await value.get_value() if isinstance(value, SignalR) else value - If possible, this should be changed to inherit from a StandardDetector. Currently, - StandardDetector forces you to use a file writer which doesn't apply here. - See issue https://github.com/bluesky/ophyd-async/issues/888 - """ + +class ShutterCoordinatorADArmLogic(ADArmLogic, Generic[TAbstractAnalyserDriverIO]): + """Or we do this inside a plan?""" def __init__( self, - controller: ElectronAnalyserController[ - TAbstractAnalyserDriverIO, TAbstractBaseRegion - ], - name: str = "", + driver: TAbstractAnalyserDriverIO, + shutter: FastShutter, + close_shutter_idle: SignalR[bool] | bool = True, + close_shutter_disarm: SignalR[bool] | bool = True, ): - self._controller = controller - super().__init__(name) - - @AsyncStatus.wrap - async def set(self, region: TAbstractBaseRegion) -> None: - await self._controller.setup_with_region(region) - - @AsyncStatus.wrap - async def trigger(self) -> None: - await self._controller.prepare(TriggerInfo()) - await self._controller.arm() - await self._controller.wait_for_idle() - - async def read(self) -> dict[str, Reading]: - return await self._controller.driver.read() - - async def describe(self) -> dict[str, DataKey]: - data = await self._controller.driver.describe() - # Correct the shape for image - prefix = self._controller.driver.name + "-" - energy_size = len(await self._controller.driver.energy_axis.get_value()) - angle_size = len(await self._controller.driver.angle_axis.get_value()) - data[prefix + "image"]["shape"] = [angle_size, energy_size] - return data - - async def read_configuration(self) -> dict[str, Reading]: - return await self._controller.driver.read_configuration() + self._shutter = shutter + self._close_shutter_idle = close_shutter_idle + self._close_shutter_disarm = close_shutter_disarm + super().__init__(driver) + + async def arm(self): + # Open shutter before data collection + self._shutter.set(self._shutter.open_state) + await super().arm() + + async def wait_for_idle(self): + await super().wait_for_idle() + # Optionally close shutters between regions + if await _get_value(self._close_shutter_idle): + self._shutter.set(self._shutter.close_state) + + async def disarm(self): + await super().disarm() + if await _get_value(self._close_shutter_disarm): + self._shutter.set(self._shutter.close_state) + + +class ElectronAnalayserTriggerLogic( + DetectorTriggerLogic, Generic[TAbstractAnalyserDriverIO] +): + def __init__( + self, driver: TAbstractAnalyserDriverIO, config_sigs: set[SignalR[Any]] + ): + self._driver = driver + self._config_sigs = self._config_sigs - async def describe_configuration(self) -> dict[str, DataKey]: - return await self._controller.driver.describe_configuration() + def config_sigs(self) -> set[SignalR[Any]]: + """Return the signals that should appear in read_configuration.""" + return self._config_sigs + def get_deadtime(self, config_values: SignalDict) -> float: + return 0.0 -GenericBaseElectronAnalyserDetector = BaseElectronAnalyserDetector[ - GenericAnalyserDriverIO, GenericRegion -] + async def prepare_internal(self, num: int, livetime: float, deadtime: float): + # set image mode, anything else? + await self._driver.image_mode.set(ADImageMode.SINGLE) -class ElectronAnalyserRegionDetector( - BaseElectronAnalyserDetector[TAbstractAnalyserDriverIO, TAbstractBaseRegion], - Generic[TAbstractAnalyserDriverIO, TAbstractBaseRegion], -): - """Extends electron analyser detector to configure specific region settings before - data acquisition. It is designed to only exist inside a plan. - """ +class RegionLogic: + """Logic for wrapping electron analyser driver to correctly set region data.""" def __init__( self, - controller: ElectronAnalyserController[ - TAbstractAnalyserDriverIO, TAbstractBaseRegion - ], - region: TAbstractBaseRegion, - name: str = "", + driver: AbstractAnalyserDriverIO, + energy_source: AbstractEnergySource, + source_selector: SourceSelector | None = None, ): - self.region = region - super().__init__(controller, name) + self._driver = driver + self._energy_source = energy_source + self._source_selector = source_selector - @AsyncStatus.wrap - async def trigger(self) -> None: - # Configure region parameters on the driver first before data collection. - await self.set(self.region) - await super().trigger() + async def setup_with_region(self, region: TAbstractBaseRegion) -> None: + """Logic to correctly wrap the driver with a region.""" + if self._source_selector is not None: + await self._source_selector.set(region.excitation_energy_source) - -# Used in sm-bluesky, but will hopefully be removed along with -# ElectronAnalyserRegionDetector in future. Blocked by: -# https://github.com/bluesky/bluesky/pull/1978 -GenericElectronAnalyserRegionDetector = ElectronAnalyserRegionDetector[ - GenericAnalyserDriverIO, GenericRegion -] -TElectronAnalyserRegionDetector = TypeVar( - "TElectronAnalyserRegionDetector", - bound=ElectronAnalyserRegionDetector, -) + excitation_energy = await self._energy_source.energy.get_value() + epics_region = region.prepare_for_epics(excitation_energy) + await self._driver.set(epics_region) class ElectronAnalyserDetector( - BaseElectronAnalyserDetector[TAbstractAnalyserDriverIO, TAbstractBaseRegion], - Stageable, + StandardDetector, Generic[TAbstractAnalyserDriverIO, TAbstractBaseRegion], ): - """Electron analyser detector with the additional functionality to load a sequence - file and create a list of temporary ElectronAnalyserRegionDetector objects. These - will setup configured region settings before data acquisition. + """Detector for data acquisition of electron analyser. Can only acquire using + settings already configured for the device. """ - @AsyncStatus.wrap - async def stage(self) -> None: - """Prepare the detector for use by ensuring it is idle and ready. - - This method asynchronously stages the detector by first disarming the controller - to ensure the detector is not actively acquiring data, then invokes the driver's - stage procedure. This ensures the detector is in a known, ready state - before use. - - Raises: - Any exceptions raised by the driver's stage or controller's disarm methods. - """ - await self._controller.disarm() + def __init__( + self, + arm_logic: DetectorArmLogic, + trigger_logic: DetectorTriggerLogic, + region_logic: RegionLogic, + # ToDo - Add data logic + name: str = "", + ): + self.add_detector_logics(arm_logic, trigger_logic) + # Custom logic for handling regions configured from sequence files. + self._region_logic = region_logic + super().__init__(name) @AsyncStatus.wrap - async def unstage(self) -> None: - """Disarm the detector.""" - await self._controller.disarm() + async def set(self, region: TAbstractBaseRegion) -> None: + """Method so detector can be configured with regions from plans.""" + await self._region_logic.setup_with_region(region) def create_region_detector_list( self, regions: list[TAbstractBaseRegion] - ) -> list[ - ElectronAnalyserRegionDetector[TAbstractAnalyserDriverIO, TAbstractBaseRegion] - ]: + ) -> list["ElectronAnalyserDetector"]: """This method can hopefully be dropped when this is merged and released. https://github.com/bluesky/bluesky/pull/1978. @@ -167,18 +148,21 @@ def create_region_detector_list( List of ElectronAnalyserRegionDetector, equal to the number of regions in the sequence file. """ + arm_logic = error_if_none(self._arm_logic, "arm_logic cannot be None.") + trigger_logic = error_if_none( + self._trigger_logic, "trigger_logic cannot be None." + ) return [ - ElectronAnalyserRegionDetector[ - TAbstractAnalyserDriverIO, TAbstractBaseRegion - ](self._controller, r, self.name + "_" + r.name) + ElectronAnalyserDetector( + arm_logic=arm_logic, + trigger_logic=trigger_logic, + region_logic=self._region_logic, + name=self.name + "_" + r.name, + ) for r in regions ] -GenericElectronAnalyserDetector = ElectronAnalyserDetector[ +GenericBaseElectronAnalyserDetector = ElectronAnalyserDetector[ GenericAnalyserDriverIO, GenericRegion ] -TElectronAnalyserDetector = TypeVar( - "TElectronAnalyserDetector", - bound=ElectronAnalyserDetector, -) diff --git a/src/dodal/devices/electron_analyser/specs/specs_detector.py b/src/dodal/devices/electron_analyser/specs/specs_detector.py index 0506f69e5aa..a2e3e749d39 100644 --- a/src/dodal/devices/electron_analyser/specs/specs_detector.py +++ b/src/dodal/devices/electron_analyser/specs/specs_detector.py @@ -1,9 +1,12 @@ from typing import Generic -from dodal.devices.electron_analyser.base.base_controller import ( - ElectronAnalyserController, +from dodal.devices.electron_analyser.base.base_detector import ( + ADArmLogic, + ElectronAnalayserTriggerLogic, + ElectronAnalyserDetector, + RegionLogic, + ShutterCoordinatorADArmLogic, ) -from dodal.devices.electron_analyser.base.base_detector import ElectronAnalyserDetector from dodal.devices.electron_analyser.base.base_region import TLensMode, TPsuMode from dodal.devices.electron_analyser.base.energy_sources import AbstractEnergySource from dodal.devices.electron_analyser.specs.specs_driver_io import SpecsAnalyserDriverIO @@ -33,8 +36,19 @@ def __init__( self.driver = SpecsAnalyserDriverIO[TLensMode, TPsuMode]( prefix, lens_mode_type, psu_mode_type ) - controller = ElectronAnalyserController[ - SpecsAnalyserDriverIO[TLensMode, TPsuMode], SpecsRegion[TLensMode, TPsuMode] - ](self.driver, energy_source, shutter, source_selector) + region_logic = RegionLogic(self.driver, energy_source, source_selector) + arm_logic = ( + ShutterCoordinatorADArmLogic(self.driver, shutter) + if shutter is not None + else ADArmLogic(self.driver) + ) + trigger_logic = ElectronAnalayserTriggerLogic( + self.driver, {self.driver.lens_mode, self.driver.pass_energy} + ) - super().__init__(controller, name) + super().__init__( + region_logic=region_logic, + arm_logic=arm_logic, + trigger_logic=trigger_logic, + name=name, + ) diff --git a/src/dodal/devices/electron_analyser/vgscienta/vgscienta_detector.py b/src/dodal/devices/electron_analyser/vgscienta/vgscienta_detector.py index 1475d327761..8a996f29134 100644 --- a/src/dodal/devices/electron_analyser/vgscienta/vgscienta_detector.py +++ b/src/dodal/devices/electron_analyser/vgscienta/vgscienta_detector.py @@ -1,9 +1,13 @@ from typing import Generic -from dodal.devices.electron_analyser.base.base_controller import ( - ElectronAnalyserController, +from ophyd_async.epics.adcore import ADArmLogic + +from dodal.devices.electron_analyser.base.base_detector import ( + ElectronAnalayserTriggerLogic, + ElectronAnalyserDetector, + RegionLogic, + ShutterCoordinatorADArmLogic, ) -from dodal.devices.electron_analyser.base.base_detector import ElectronAnalyserDetector from dodal.devices.electron_analyser.base.base_region import TLensMode, TPsuMode from dodal.devices.electron_analyser.base.energy_sources import AbstractEnergySource from dodal.devices.electron_analyser.vgscienta.vgscienta_driver_io import ( @@ -39,9 +43,19 @@ def __init__( self.driver = VGScientaAnalyserDriverIO[TLensMode, TPsuMode, TPassEnergyEnum]( prefix, lens_mode_type, psu_mode_type, pass_energy_type ) - controller = ElectronAnalyserController[ - VGScientaAnalyserDriverIO[TLensMode, TPsuMode, TPassEnergyEnum], - VGScientaRegion[TLensMode, TPassEnergyEnum], - ](self.driver, energy_source, shutter, source_selector) + region_logic = RegionLogic(self.driver, energy_source, source_selector) + arm_logic = ( + ShutterCoordinatorADArmLogic(self.driver, shutter) + if shutter is not None + else ADArmLogic(self.driver) + ) + trigger_logic = ElectronAnalayserTriggerLogic( + self.driver, {self.driver.lens_mode, self.driver.pass_energy} + ) - super().__init__(controller, name) + super().__init__( + region_logic=region_logic, + arm_logic=arm_logic, + trigger_logic=trigger_logic, + name=name, + ) From ff134e494fc165102d89e5db1a401a87bb9fa8c2 Mon Sep 17 00:00:00 2001 From: Jacob Williamson Date: Fri, 20 Mar 2026 13:23:50 +0000 Subject: [PATCH 08/30] Remove commissioning junfrau --- src/dodal/beamlines/i24.py | 15 +- .../beamlines/i24/commissioning_jungfrau.py | 129 ------------------ .../i24/test_commissioning_jungfrau.py | 69 ---------- 3 files changed, 7 insertions(+), 206 deletions(-) delete mode 100644 src/dodal/devices/beamlines/i24/commissioning_jungfrau.py delete mode 100644 tests/devices/beamlines/i24/test_commissioning_jungfrau.py diff --git a/src/dodal/beamlines/i24.py b/src/dodal/beamlines/i24.py index b17ae5043a5..90ce6c236b9 100644 --- a/src/dodal/beamlines/i24.py +++ b/src/dodal/beamlines/i24.py @@ -1,7 +1,9 @@ from functools import cache from pathlib import Path +from dodal.devices.beamlines.i24.commissioning_jungfrau import CommissioningJungfrau from ophyd_async.core import AutoMaxIncrementingPathProvider, PathProvider +from ophyd_async.fastcs.jungfrau import JungfrauDetector from dodal.common.beamlines.beamline_utils import BL from dodal.common.beamlines.beamline_utils import set_beamline as set_utils_beamline @@ -15,7 +17,6 @@ from dodal.devices.beamlines.i24.aperture import Aperture from dodal.devices.beamlines.i24.beam_center import DetectorBeamCenter from dodal.devices.beamlines.i24.beamstop import Beamstop -from dodal.devices.beamlines.i24.commissioning_jungfrau import CommissioningJungfrau from dodal.devices.beamlines.i24.dcm import DCM from dodal.devices.beamlines.i24.dual_backlight import DualBacklight from dodal.devices.beamlines.i24.focus_mirrors import FocusMirrorsMode @@ -144,16 +145,14 @@ def eiger_beam_center() -> DetectorBeamCenter: @devices.factory() -def commissioning_jungfrau( +def jungfrau( path_provider: PathProvider, -) -> CommissioningJungfrau: - """Get the commissionning Jungfrau 9M device, which uses a temporary filewriter - device in place of Odin while the detector is in commissioning. - """ - return CommissioningJungfrau( +) -> JungfrauDetector: + return JungfrauDetector( f"{PREFIX.beamline_prefix}-EA-JFRAU-01:", - f"{PREFIX.beamline_prefix}-JUNGFRAU-META:FD:", AutoMaxIncrementingPathProvider(path_provider), + "CAM:", + "OD:", ) diff --git a/src/dodal/devices/beamlines/i24/commissioning_jungfrau.py b/src/dodal/devices/beamlines/i24/commissioning_jungfrau.py deleted file mode 100644 index f1fe64dfc4b..00000000000 --- a/src/dodal/devices/beamlines/i24/commissioning_jungfrau.py +++ /dev/null @@ -1,129 +0,0 @@ -import asyncio -from collections.abc import AsyncGenerator, AsyncIterator -from pathlib import Path - -from bluesky.protocols import StreamAsset -from event_model import DataKey # type: ignore -from ophyd_async.core import ( - AsyncStatus, - DetectorDataLogic, - PathProvider, - StandardDetector, - StandardReadable, - TriggerInfo, - observe_value, - soft_signal_r_and_setter, - wait_for_value, -) -from ophyd_async.epics.core import epics_signal_r, epics_signal_rw, epics_signal_rw_rbv -from ophyd_async.fastcs.jungfrau import JungfrauDriverIO -from ophyd_async.fastcs.jungfrau._controller import JungfrauController -from ophyd_async.fastcs.jungfrau._signals import JungfrauDriverIO - -from dodal.log import LOGGER - - -class JungfrauCommissioningWriter(DetectorDataLogic, StandardReadable): - """Implementation of the temporary filewriter used for Jungfrau commissioning on i24. - - The PVs on this device are responsible for writing files of a specified name - to a specified path, marking itself as "ready to write", and having a counter of - frames written, which must be zero'd at the ophyd level. - """ - - def __init__( - self, - prefix, - path_provider: PathProvider, - name="", - ) -> None: - with self.add_children_as_readables(): - self._path_provider = path_provider - self.frame_counter = epics_signal_rw(int, f"{prefix}NumCaptured") - self.file_name = epics_signal_rw_rbv(str, f"{prefix}FileName") - self.file_path = epics_signal_rw_rbv(str, f"{prefix}FilePath") - self.writer_ready = epics_signal_r(int, f"{prefix}Ready_RBV") - self.expected_frames = epics_signal_rw(int, f"{prefix}NumCapture") - super().__init__(name) - - async def open(self, name: str, exposures_per_event: int = 1) -> dict[str, DataKey]: - self._exposures_per_event = exposures_per_event - _path_info = self._path_provider() - - requested_filepath = Path(_path_info.directory_path) / _path_info.filename - if requested_filepath.exists(): - raise FileExistsError( - f"Jungfrau was requested to write to {requested_filepath}, but this file already exists!" - ) - - await asyncio.gather( - self.file_name.set(_path_info.filename), - self.file_path.set(str(_path_info.directory_path)), - self.frame_counter.set(0), - ) - LOGGER.info( - f"Jungfrau writing to folder {_path_info.directory_path} with filename {_path_info.filename}" - ) - await wait_for_value(self.writer_ready, 1, timeout=10) - self.final_path = requested_filepath - return await self._describe() - - async def _describe(self) -> dict[str, DataKey]: - # Dummy function, doesn't actually describe the dataset - - return { - "data": DataKey( - source="Commissioning writer", - shape=[-1], - dtype="array", - dtype_numpy=" AsyncGenerator[int, None]: - timeout = timeout * 4 # This filewriter is very slow - async for num_captured in observe_value(self.frame_counter, timeout): - yield num_captured // (self._exposures_per_event) - - async def get_indices_written(self) -> int: - return await self.frame_counter.get_value() // self._exposures_per_event - - def collect_stream_docs( - self, name: str, indices_written: int - ) -> AsyncIterator[StreamAsset]: - raise NotImplementedError() - - async def close(self) -> None: ... - - -class CommissioningJungfrau( - StandardDetector[JungfrauController, JungfrauCommissioningWriter] -): - """Ophyd-async implementation of a Jungfrau 9M Detector, using a temporary - filewriter in place of Odin. - """ - - def __init__( - self, - prefix: str, - writer_prefix: str, - path_provider: PathProvider, - name="", - detector_id=124, - ): - self.drv = JungfrauDriverIO(prefix) - writer = JungfrauCommissioningWriter(writer_prefix, path_provider) - controller = JungfrauController(self.drv) - self.ispyb_detector_id, _ = soft_signal_r_and_setter( - int, - initial_value=detector_id, - ) - super().__init__(controller, writer, name=name) - - @AsyncStatus.wrap - async def prepare(self, value: TriggerInfo) -> None: - await super().prepare(value) - await self._writer.expected_frames.set(value.total_number_of_exposures) diff --git a/tests/devices/beamlines/i24/test_commissioning_jungfrau.py b/tests/devices/beamlines/i24/test_commissioning_jungfrau.py deleted file mode 100644 index 2f2df470299..00000000000 --- a/tests/devices/beamlines/i24/test_commissioning_jungfrau.py +++ /dev/null @@ -1,69 +0,0 @@ -import asyncio -from pathlib import Path, PurePath -from unittest.mock import MagicMock - -import pytest -from ophyd_async.core import ( - AutoIncrementingPathProvider, - StaticFilenameProvider, - StaticPathProvider, - TriggerInfo, - init_devices, - set_mock_value, -) - -from dodal.devices.beamlines.i24.commissioning_jungfrau import CommissioningJungfrau - - -@pytest.fixture -def jungfrau(tmpdir: Path) -> CommissioningJungfrau: - with init_devices(mock=True): - name = StaticFilenameProvider("jf_out") - path = AutoIncrementingPathProvider(name, PurePath(tmpdir)) - detector = CommissioningJungfrau("", "", path) - - return detector - - -async def test_jungfrau_with_temporary_writer( - jungfrau: CommissioningJungfrau, -): - set_mock_value(jungfrau._writer.writer_ready, 1) - set_mock_value(jungfrau._writer.frame_counter, 10) - jungfrau._writer._path_provider = MagicMock() - trigger_info = TriggerInfo(livetime=1e-3, exposures_per_event=5) - await jungfrau.prepare(trigger_info) - assert await jungfrau._writer.frame_counter.get_value() == 0 - assert ( - await jungfrau._writer.expected_frames.get_value() - == trigger_info.total_number_of_exposures - ) - await jungfrau.kickoff() - status = jungfrau.complete() - - async def _do_fake_writing(): - for frame in range(1, 5): - set_mock_value(jungfrau._writer.frame_counter, frame) - assert not status.done - set_mock_value(jungfrau._writer.frame_counter, 5) - - await asyncio.gather(status, _do_fake_writing()) - jungfrau._writer._path_provider.assert_called_once() - - -async def test_jungfrau_error_when_writing_to_existing_file(tmp_path: Path): - file_name = "test_file" - empty_file = tmp_path / file_name - empty_file.touch() - name = StaticFilenameProvider(file_name) - path = StaticPathProvider(name, PurePath(tmp_path)) - with init_devices(mock=True): - jungfrau = CommissioningJungfrau("", "", path) - set_mock_value(jungfrau._writer.writer_ready, 1) - with pytest.raises(FileExistsError): - await jungfrau.prepare(TriggerInfo(livetime=1e-3, exposures_per_event=5)) - - -def test_collect_stream_docs_raises_error(jungfrau: CommissioningJungfrau): - with pytest.raises(NotImplementedError): - jungfrau._writer.collect_stream_docs("jungfrau", 0) From f1898e48501dc35d65e6f32713058140a72c7251 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Tue, 24 Mar 2026 12:16:12 +0000 Subject: [PATCH 09/30] Add electron analyser detector logic --- .../electron_analyser/base/__init__.py | 16 +- .../electron_analyser/base/base_controller.py | 81 ------ .../electron_analyser/base/base_detector.py | 95 +------ .../electron_analyser/base/detector_logic.py | 97 +++++++ .../electron_analyser/base/energy_sources.py | 3 - .../electron_analyser/specs/specs_detector.py | 6 +- .../vgscienta/vgscienta_detector.py | 7 +- .../electron_analyser/base/conftest.py | 18 +- .../base/test_base_controller.py | 119 --------- .../base/test_base_detector.py | 244 ++++++++---------- .../base/test_detector_logic.py | 201 +++++++++++++++ .../specs/test_specs_detector.py | 31 --- .../vgscienta/test_vgscienta_detector.py | 25 -- 13 files changed, 421 insertions(+), 522 deletions(-) delete mode 100644 src/dodal/devices/electron_analyser/base/base_controller.py create mode 100644 src/dodal/devices/electron_analyser/base/detector_logic.py delete mode 100644 tests/devices/electron_analyser/base/test_base_controller.py create mode 100644 tests/devices/electron_analyser/base/test_detector_logic.py delete mode 100644 tests/devices/electron_analyser/specs/test_specs_detector.py delete mode 100644 tests/devices/electron_analyser/vgscienta/test_vgscienta_detector.py diff --git a/src/dodal/devices/electron_analyser/base/__init__.py b/src/dodal/devices/electron_analyser/base/__init__.py index 9d731a2f767..e32e1c9e512 100644 --- a/src/dodal/devices/electron_analyser/base/__init__.py +++ b/src/dodal/devices/electron_analyser/base/__init__.py @@ -1,12 +1,12 @@ -from .base_controller import ( - ElectronAnalyserController, - GenericElectronAnalyserController, -) +# from .base_controller import ( +# ElectronAnalyserController, +# GenericElectronAnalyserController, +# ) from .base_detector import ( # BaseElectronAnalyserDetector, ElectronAnalyserDetector, # ElectronAnalyserRegionDetector, - GenericBaseElectronAnalyserDetector, + GenericElectronAnalyserDetector, # GenericElectronAnalyserDetector, # GenericElectronAnalyserRegionDetector, ) @@ -30,12 +30,12 @@ from .energy_sources import AbstractEnergySource, DualEnergySource, EnergySource __all__ = [ - "ElectronAnalyserController", - "GenericElectronAnalyserController", + # "ElectronAnalyserController", + # "GenericElectronAnalyserController", # "BaseElectronAnalyserDetector", "ElectronAnalyserDetector", # "ElectronAnalyserRegionDetector", - "GenericBaseElectronAnalyserDetector", + "GenericElectronAnalyserDetector", # "GenericElectronAnalyserDetector", # "GenericElectronAnalyserRegionDetector", "AbstractAnalyserDriverIO", diff --git a/src/dodal/devices/electron_analyser/base/base_controller.py b/src/dodal/devices/electron_analyser/base/base_controller.py deleted file mode 100644 index 5861bde3931..00000000000 --- a/src/dodal/devices/electron_analyser/base/base_controller.py +++ /dev/null @@ -1,81 +0,0 @@ -from typing import Generic, TypeVar - -from ophyd_async.core import DetectorTriggerLogic, TriggerInfo -from ophyd_async.epics.adcore import ADImageMode - -from dodal.devices.electron_analyser.base.base_driver_io import ( - GenericAnalyserDriverIO, - TAbstractAnalyserDriverIO, -) -from dodal.devices.electron_analyser.base.base_region import ( - GenericRegion, - TAbstractBaseRegion, -) -from dodal.devices.electron_analyser.base.energy_sources import AbstractEnergySource -from dodal.devices.fast_shutter import FastShutter -from dodal.devices.selectable_source import SourceSelector - - -class ElectronAnalyserController( - DetectorTriggerLogic, - Generic[TAbstractAnalyserDriverIO, TAbstractBaseRegion], -): - """Specialised controller for the electron analysers to provide additional setup - logic such as selecting the energy source to use from requested region and giving - the driver the correct region parameters. - - Args: - driver (TAbstractAnalyserDriverIO): The electron analyser driver to wrap - around that holds the PV's. - energy_source (AbstractEnergySource): Device that holds the excitation - energy and ability to switch between sources. - deadtime (float, optional): For a given exposure, what is the safest minimum - time between exposures that can be determined without reading signals. - image_mode (ADImageMode, optional): The image mode to configure the driver - with before measuring. - """ - - def __init__( - self, - driver: TAbstractAnalyserDriverIO, - energy_source: AbstractEnergySource, - shutter: FastShutter | None = None, - source_selector: SourceSelector | None = None, - ): - self.energy_source = energy_source - self.shutter = shutter - self.source_selector = source_selector - self.driver = driver - # super().__init__(driver, deadtime, image_mode) - - async def setup_with_region(self, region: TAbstractBaseRegion) -> None: - """Logic to set the driver with a region.""" - if self.source_selector is not None: - await self.source_selector.set(region.excitation_energy_source) - - # Should this be moved to a VGScientController only? - if self.shutter is not None: - await self.shutter.set(self.shutter.close_state) - - excitation_energy = await self.energy_source.energy.get_value() - epics_region = region.prepare_for_epics(excitation_energy) - await self.driver.set(epics_region) - - async def prepare(self, trigger_info: TriggerInfo) -> None: - """Do all necessary steps to prepare the detector for triggers.""" - # Let the driver know the excitation energy before measuring for binding energy - # axis calculation. - excitation_energy = await self.energy_source.energy.get_value() - await self.driver.cached_excitation_energy.set(excitation_energy) - await self.driver.image_mode.set(ADImageMode.SINGLE) - - if self.shutter is not None: - await self.shutter.set(self.shutter.open_state) - - -GenericElectronAnalyserController = ElectronAnalyserController[ - GenericAnalyserDriverIO, GenericRegion -] -TElectronAnalyserController = TypeVar( - "TElectronAnalyserController", bound=ElectronAnalyserController -) diff --git a/src/dodal/devices/electron_analyser/base/base_detector.py b/src/dodal/devices/electron_analyser/base/base_detector.py index 488f2e48a93..da502248827 100644 --- a/src/dodal/devices/electron_analyser/base/base_detector.py +++ b/src/dodal/devices/electron_analyser/base/base_detector.py @@ -1,19 +1,14 @@ -from typing import Any, Generic +from typing import Generic from ophyd_async.core import ( AsyncStatus, DetectorArmLogic, DetectorTriggerLogic, - SignalDict, - SignalR, StandardDetector, error_if_none, ) -from ophyd_async.epics.adcore import ADArmLogic, ADImageMode -from dodal.devices.electron_analyser.base import TAbstractAnalyserDriverIO from dodal.devices.electron_analyser.base.base_driver_io import ( - AbstractAnalyserDriverIO, GenericAnalyserDriverIO, TAbstractAnalyserDriverIO, ) @@ -21,89 +16,7 @@ GenericRegion, TAbstractBaseRegion, ) -from dodal.devices.electron_analyser.base.energy_sources import AbstractEnergySource -from dodal.devices.fast_shutter import FastShutter -from dodal.devices.selectable_source import SourceSelector - - -async def _get_value(value: SignalR[bool] | bool) -> bool: - return await value.get_value() if isinstance(value, SignalR) else value - - -class ShutterCoordinatorADArmLogic(ADArmLogic, Generic[TAbstractAnalyserDriverIO]): - """Or we do this inside a plan?""" - - def __init__( - self, - driver: TAbstractAnalyserDriverIO, - shutter: FastShutter, - close_shutter_idle: SignalR[bool] | bool = True, - close_shutter_disarm: SignalR[bool] | bool = True, - ): - self._shutter = shutter - self._close_shutter_idle = close_shutter_idle - self._close_shutter_disarm = close_shutter_disarm - super().__init__(driver) - - async def arm(self): - # Open shutter before data collection - self._shutter.set(self._shutter.open_state) - await super().arm() - - async def wait_for_idle(self): - await super().wait_for_idle() - # Optionally close shutters between regions - if await _get_value(self._close_shutter_idle): - self._shutter.set(self._shutter.close_state) - - async def disarm(self): - await super().disarm() - if await _get_value(self._close_shutter_disarm): - self._shutter.set(self._shutter.close_state) - - -class ElectronAnalayserTriggerLogic( - DetectorTriggerLogic, Generic[TAbstractAnalyserDriverIO] -): - def __init__( - self, driver: TAbstractAnalyserDriverIO, config_sigs: set[SignalR[Any]] - ): - self._driver = driver - self._config_sigs = self._config_sigs - - def config_sigs(self) -> set[SignalR[Any]]: - """Return the signals that should appear in read_configuration.""" - return self._config_sigs - - def get_deadtime(self, config_values: SignalDict) -> float: - return 0.0 - - async def prepare_internal(self, num: int, livetime: float, deadtime: float): - # set image mode, anything else? - await self._driver.image_mode.set(ADImageMode.SINGLE) - - -class RegionLogic: - """Logic for wrapping electron analyser driver to correctly set region data.""" - - def __init__( - self, - driver: AbstractAnalyserDriverIO, - energy_source: AbstractEnergySource, - source_selector: SourceSelector | None = None, - ): - self._driver = driver - self._energy_source = energy_source - self._source_selector = source_selector - - async def setup_with_region(self, region: TAbstractBaseRegion) -> None: - """Logic to correctly wrap the driver with a region.""" - if self._source_selector is not None: - await self._source_selector.set(region.excitation_energy_source) - - excitation_energy = await self._energy_source.energy.get_value() - epics_region = region.prepare_for_epics(excitation_energy) - await self._driver.set(epics_region) +from dodal.devices.electron_analyser.base.detector_logic import RegionLogic class ElectronAnalyserDetector( @@ -129,7 +42,7 @@ def __init__( @AsyncStatus.wrap async def set(self, region: TAbstractBaseRegion) -> None: - """Method so detector can be configured with regions from plans.""" + """Configure detector with regions from plans.""" await self._region_logic.setup_with_region(region) def create_region_detector_list( @@ -163,6 +76,6 @@ def create_region_detector_list( ] -GenericBaseElectronAnalyserDetector = ElectronAnalyserDetector[ +GenericElectronAnalyserDetector = ElectronAnalyserDetector[ GenericAnalyserDriverIO, GenericRegion ] diff --git a/src/dodal/devices/electron_analyser/base/detector_logic.py b/src/dodal/devices/electron_analyser/base/detector_logic.py new file mode 100644 index 00000000000..0e3fd069d89 --- /dev/null +++ b/src/dodal/devices/electron_analyser/base/detector_logic.py @@ -0,0 +1,97 @@ +from typing import Any, Generic + +from ophyd_async.core import ( + DetectorTriggerLogic, + SignalDict, + SignalR, +) +from ophyd_async.epics.adcore import ADArmLogic, ADImageMode + +from dodal.devices.electron_analyser.base.base_driver_io import ( + AbstractAnalyserDriverIO, + TAbstractAnalyserDriverIO, +) +from dodal.devices.electron_analyser.base.base_region import TAbstractBaseRegion +from dodal.devices.electron_analyser.base.energy_sources import AbstractEnergySource +from dodal.devices.fast_shutter import FastShutter +from dodal.devices.selectable_source import SourceSelector + + +async def _get_value(value: SignalR[bool] | bool) -> bool: + return await value.get_value() if isinstance(value, SignalR) else value + + +class ShutterCoordinatorADArmLogic(ADArmLogic, Generic[TAbstractAnalyserDriverIO]): + """Extends the arm logic to coordinate opening shutters before acqusition with + optional configuration of when to close. + """ + + def __init__( + self, + driver: TAbstractAnalyserDriverIO, + shutter: FastShutter, + close_shutter_idle: SignalR[bool] | None = None, + ): + self._shutter = shutter + self._close_shutter_idle = close_shutter_idle + super().__init__(driver) + + async def arm(self): + # Open shutter before data collection + await self._shutter.set(self._shutter.open_state) + await super().arm() + + async def wait_for_idle(self): + await super().wait_for_idle() + # Optionally close shutters between regions + if ( + self._close_shutter_idle is not None + and await self._close_shutter_idle.get_value() + ): + await self._shutter.set(self._shutter.close_state) + + +class ElectronAnalayserTriggerLogic( + DetectorTriggerLogic, Generic[TAbstractAnalyserDriverIO] +): + """Simple trigger logic for electron analyser.""" + + def __init__( + self, driver: TAbstractAnalyserDriverIO, config_sigs: set[SignalR[Any]] + ): + self.driver = driver + self._config_sigs = config_sigs + + def config_sigs(self) -> set[SignalR[Any]]: + """Return the signals that should appear in read_configuration.""" + return self._config_sigs + + def get_deadtime(self, config_values: SignalDict) -> float: + return 0.0 + + async def prepare_internal(self, num: int, livetime: float, deadtime: float): + # Only set image mode to single, num images and exposure is done with region. + await self.driver.image_mode.set(ADImageMode.SINGLE) + + +class RegionLogic: + """Logic for wrapping electron analyser driver to correctly set region data.""" + + def __init__( + self, + driver: AbstractAnalyserDriverIO, + energy_source: AbstractEnergySource, + source_selector: SourceSelector | None = None, + ): + self.driver = driver + self.energy_source = energy_source + self.source_selector = source_selector + + async def setup_with_region(self, region: TAbstractBaseRegion) -> None: + """Logic to correctly wrap the driver with a region.""" + if self.source_selector is not None: + await self.source_selector.set(region.excitation_energy_source) + + excitation_energy = await self.energy_source.energy.get_value() + epics_region = region.prepare_for_epics(excitation_energy) + await self.driver.set(epics_region) diff --git a/src/dodal/devices/electron_analyser/base/energy_sources.py b/src/dodal/devices/electron_analyser/base/energy_sources.py index c01555a123b..6404ac5dce3 100644 --- a/src/dodal/devices/electron_analyser/base/energy_sources.py +++ b/src/dodal/devices/electron_analyser/base/energy_sources.py @@ -18,9 +18,6 @@ class AbstractEnergySource(StandardReadable): via a energy signal. """ - def __init__(self, name: str = "") -> None: - super().__init__(name) - @property @abstractmethod def energy(self) -> SignalR[float]: diff --git a/src/dodal/devices/electron_analyser/specs/specs_detector.py b/src/dodal/devices/electron_analyser/specs/specs_detector.py index a2e3e749d39..a3f9e92975b 100644 --- a/src/dodal/devices/electron_analyser/specs/specs_detector.py +++ b/src/dodal/devices/electron_analyser/specs/specs_detector.py @@ -1,13 +1,13 @@ from typing import Generic -from dodal.devices.electron_analyser.base.base_detector import ( +from dodal.devices.electron_analyser.base.base_detector import ElectronAnalyserDetector +from dodal.devices.electron_analyser.base.base_region import TLensMode, TPsuMode +from dodal.devices.electron_analyser.base.detector_logic import ( ADArmLogic, ElectronAnalayserTriggerLogic, - ElectronAnalyserDetector, RegionLogic, ShutterCoordinatorADArmLogic, ) -from dodal.devices.electron_analyser.base.base_region import TLensMode, TPsuMode from dodal.devices.electron_analyser.base.energy_sources import AbstractEnergySource from dodal.devices.electron_analyser.specs.specs_driver_io import SpecsAnalyserDriverIO from dodal.devices.electron_analyser.specs.specs_region import SpecsRegion diff --git a/src/dodal/devices/electron_analyser/vgscienta/vgscienta_detector.py b/src/dodal/devices/electron_analyser/vgscienta/vgscienta_detector.py index 8a996f29134..abbd196f598 100644 --- a/src/dodal/devices/electron_analyser/vgscienta/vgscienta_detector.py +++ b/src/dodal/devices/electron_analyser/vgscienta/vgscienta_detector.py @@ -2,13 +2,14 @@ from ophyd_async.epics.adcore import ADArmLogic -from dodal.devices.electron_analyser.base.base_detector import ( +from dodal.devices.electron_analyser.base.base_detector import ElectronAnalyserDetector +from dodal.devices.electron_analyser.base.base_region import TLensMode, TPsuMode +from dodal.devices.electron_analyser.base.detector_logic import ( + ADArmLogic, ElectronAnalayserTriggerLogic, - ElectronAnalyserDetector, RegionLogic, ShutterCoordinatorADArmLogic, ) -from dodal.devices.electron_analyser.base.base_region import TLensMode, TPsuMode from dodal.devices.electron_analyser.base.energy_sources import AbstractEnergySource from dodal.devices.electron_analyser.vgscienta.vgscienta_driver_io import ( VGScientaAnalyserDriverIO, diff --git a/tests/devices/electron_analyser/base/conftest.py b/tests/devices/electron_analyser/base/conftest.py index 2e8677610a4..e0d75ef3737 100644 --- a/tests/devices/electron_analyser/base/conftest.py +++ b/tests/devices/electron_analyser/base/conftest.py @@ -1,20 +1,8 @@ import pytest -from dodal.devices.beamlines import b07, b07_shared, i09 -from dodal.devices.electron_analyser.base import GenericElectronAnalyserDetector -from dodal.devices.electron_analyser.specs import SpecsDetector -from dodal.devices.electron_analyser.vgscienta import VGScientaDetector +from dodal.devices.electron_analyser.base import ElectronAnalyserDetector @pytest.fixture(params=["ew4000", "b07b_specs150"]) -def sim_detector( - request: pytest.FixtureRequest, - ew4000: VGScientaDetector[i09.LensMode, i09.PsuMode, i09.PassEnergy], - b07b_specs150: SpecsDetector[b07.LensMode, b07_shared.PsuMode], -) -> GenericElectronAnalyserDetector: - detectors = [ew4000, b07b_specs150] - for detector in detectors: - if detector.name == request.param: - return detector - - raise ValueError(f"Detector with name '{request.param}' not found") +def sim_detector(request: pytest.FixtureRequest) -> ElectronAnalyserDetector: + return request.getfixturevalue(request.param) diff --git a/tests/devices/electron_analyser/base/test_base_controller.py b/tests/devices/electron_analyser/base/test_base_controller.py deleted file mode 100644 index 94986943714..00000000000 --- a/tests/devices/electron_analyser/base/test_base_controller.py +++ /dev/null @@ -1,119 +0,0 @@ -from unittest.mock import AsyncMock, patch - -import pytest -from ophyd_async.core import TriggerInfo, get_mock_put - -from dodal.devices.electron_analyser.base import ( - AbstractAnalyserDriverIO, - AbstractBaseRegion, - ElectronAnalyserController, - GenericElectronAnalyserController, - GenericElectronAnalyserDetector, - GenericSequence, -) -from tests.devices.electron_analyser.helper_util import ( - TEST_SEQUENCE_REGION_NAMES, - get_test_sequence, -) - - -@pytest.fixture -def sequence(sim_detector: GenericElectronAnalyserDetector) -> GenericSequence: - return get_test_sequence(type(sim_detector)) - - -@pytest.fixture -def analyser_controller( - sim_detector: GenericElectronAnalyserDetector, -) -> GenericElectronAnalyserController: - return sim_detector._controller - - -async def test_controller_prepare_sets_excitation_energy( - analyser_controller: ElectronAnalyserController[ - AbstractAnalyserDriverIO, AbstractBaseRegion - ], -) -> None: - excitation_energy = await analyser_controller.energy_source.energy.get_value() - await analyser_controller.prepare(TriggerInfo()) - get_mock_put( - analyser_controller.driver.cached_excitation_energy - ).assert_awaited_once_with(excitation_energy) - - -@pytest.mark.parametrize("region", TEST_SEQUENCE_REGION_NAMES, indirect=True) -async def test_controller_setup_with_region_sets_region_for_epics_and_sets_driver( - analyser_controller: ElectronAnalyserController[ - AbstractAnalyserDriverIO, AbstractBaseRegion - ], - region: AbstractBaseRegion, -) -> None: - sim_driver = analyser_controller.driver - sim_driver.set = AsyncMock() - - # Patch switch_energy_mode so we can check on calls, but still run the real function - with patch.object( - AbstractBaseRegion, - "prepare_for_epics", - side_effect=AbstractBaseRegion.prepare_for_epics, # run the real method - autospec=True, - ) as mock_prepare_for_epics: - await analyser_controller.setup_with_region(region) - - mock_prepare_for_epics.assert_called_once_with( - region, - await analyser_controller.energy_source.energy.get_value(), - ) - - source_selector = analyser_controller.source_selector - if source_selector is not None: - get_mock_put(source_selector.selected_source).assert_called_once_with( - region.excitation_energy_source - ) - # Check set was called with epics_region - epics_region = mock_prepare_for_epics.call_args[0][0].prepare_for_epics( - await analyser_controller.energy_source.energy.get_value(), - ) - sim_driver.set.assert_called_once_with(epics_region) - - -@pytest.mark.parametrize("region", TEST_SEQUENCE_REGION_NAMES, indirect=True) -async def test_controller_setup_with_region_moves_selected_source_if_not_none( - analyser_controller: ElectronAnalyserController[ - AbstractAnalyserDriverIO, AbstractBaseRegion - ], - region: AbstractBaseRegion, -) -> None: - source_selector = analyser_controller.source_selector - - if source_selector is not None: - await analyser_controller.setup_with_region(region) - get_mock_put(source_selector.selected_source).assert_awaited_once_with( - region.excitation_energy_source - ) - - -@pytest.mark.parametrize("region", TEST_SEQUENCE_REGION_NAMES, indirect=True) -async def test_controller_setup_with_region_moves_shutter_if_not_none( - analyser_controller: ElectronAnalyserController[ - AbstractAnalyserDriverIO, AbstractBaseRegion - ], - region: AbstractBaseRegion, -) -> None: - shutter = analyser_controller.shutter - if shutter is not None: - await analyser_controller.setup_with_region(region) - get_mock_put(shutter.shutter_state).assert_awaited_once_with( - shutter.close_state - ) - - -async def test_controller_prepare_moves_shutters_if_not_none( - analyser_controller: ElectronAnalyserController[ - AbstractAnalyserDriverIO, AbstractBaseRegion - ], -) -> None: - shutter = analyser_controller.shutter - if shutter is not None: - await analyser_controller.prepare(TriggerInfo()) - get_mock_put(shutter.shutter_state).assert_awaited_once_with(shutter.open_state) diff --git a/tests/devices/electron_analyser/base/test_base_detector.py b/tests/devices/electron_analyser/base/test_base_detector.py index 2b0ffee8475..3d1c722ac13 100644 --- a/tests/devices/electron_analyser/base/test_base_detector.py +++ b/tests/devices/electron_analyser/base/test_base_detector.py @@ -3,14 +3,8 @@ import pytest from bluesky import plan_stubs as bps from bluesky.run_engine import RunEngine -from ophyd_async.core import TriggerInfo -from ophyd_async.testing import ( - assert_configuration, - assert_reading, -) from dodal.devices.electron_analyser.base import ( - GenericBaseElectronAnalyserDetector, GenericElectronAnalyserDetector, GenericSequence, ) @@ -23,52 +17,16 @@ def sequence(sim_detector: GenericElectronAnalyserDetector) -> GenericSequence: def test_base_analyser_detector_trigger( - sim_detector: GenericBaseElectronAnalyserDetector, + sim_detector: GenericElectronAnalyserDetector, run_engine: RunEngine, ) -> None: - sim_detector._controller.arm = AsyncMock() - sim_detector._controller.wait_for_idle = AsyncMock() + sim_detector._arm_logic.arm = AsyncMock() # type: ignore + sim_detector._arm_logic.wait_for_idle = AsyncMock() # type: ignore run_engine(bps.trigger(sim_detector, wait=True), wait=True) - sim_detector._controller.arm.assert_awaited_once() - sim_detector._controller.wait_for_idle.assert_awaited_once() - - -async def test_base_analyser_detector_read( - sim_detector: GenericBaseElectronAnalyserDetector, -) -> None: - driver_read = await sim_detector._controller.driver.read() - await assert_reading(sim_detector, driver_read) - - -async def test_base_analyser_describe( - sim_detector: GenericBaseElectronAnalyserDetector, -) -> None: - energy_array = await sim_detector._controller.driver.energy_axis.get_value() - angle_array = await sim_detector._controller.driver.angle_axis.get_value() - data = await sim_detector.describe() - assert data[f"{sim_detector._controller.driver.image.name}"]["shape"] == [ - len(angle_array), - len(energy_array), - ] - - -async def test_base_analyser_detector_configuration( - sim_detector: GenericBaseElectronAnalyserDetector, -) -> None: - driver_config = await sim_detector._controller.driver.read_configuration() - await assert_configuration(sim_detector, driver_config) - - -async def test_base_analyser_detector_describe_configuration( - sim_detector: GenericBaseElectronAnalyserDetector, -) -> None: - driver_describe_config = ( - await sim_detector._controller.driver.describe_configuration() - ) - - assert await sim_detector.describe_configuration() == driver_describe_config + sim_detector._arm_logic.arm.assert_awaited_once() # type: ignore + sim_detector._arm_logic.wait_for_idle.assert_awaited_once() # type: ignore def test_analyser_detector_loads_sequence_correctly( @@ -82,102 +40,102 @@ def test_analyser_detector_loads_sequence_correctly( async def test_analyser_detector_stage( sim_detector: GenericElectronAnalyserDetector, ) -> None: - sim_detector._controller.disarm = AsyncMock() + sim_detector._arm_logic.disarm = AsyncMock() # type: ignore await sim_detector.stage() - sim_detector._controller.disarm.assert_awaited_once() - - -async def test_analyser_detector_unstage( - sim_detector: GenericElectronAnalyserDetector, -) -> None: - sim_detector._controller.disarm = AsyncMock() - - await sim_detector.unstage() - - sim_detector._controller.disarm.assert_awaited_once() - - -def test_analyser_detector_creates_region_detectors( - sim_detector: GenericElectronAnalyserDetector, - sequence: GenericSequence, -) -> None: - region_detectors = sim_detector.create_region_detector_list( - sequence.get_enabled_regions() - ) - assert len(region_detectors) == len(sequence.get_enabled_regions()) - for det in region_detectors: - assert det.region.enabled is True - assert det.name == sim_detector.name + "_" + det.region.name - - -def test_analyser_detector_has_driver_as_child_and_region_detector_does_not( - sim_detector: GenericElectronAnalyserDetector, - sequence: GenericSequence, -) -> None: - # Remove parent name from driver name so it can be checked it exists in - # _child_devices dict - driver_name = sim_detector._controller.driver.name.replace( - sim_detector.name + "-", "" - ) - - assert sim_detector._controller.driver.parent == sim_detector - assert sim_detector._child_devices.get(driver_name) is not None - - region_detectors = sim_detector.create_region_detector_list( - sequence.get_enabled_regions() - ) - for det in region_detectors: - assert det._child_devices.get(driver_name) is None - assert det._controller.driver.parent == sim_detector - - -def test_analyser_detector_trigger_called_controller_prepare( - sim_detector: GenericElectronAnalyserDetector, - run_engine: RunEngine, -) -> None: - sim_detector._controller.prepare = AsyncMock() - sim_detector._controller.arm = AsyncMock() - sim_detector._controller.wait_for_idle = AsyncMock() - - run_engine(bps.trigger(sim_detector, wait=True), wait=True) - - sim_detector._controller.prepare.assert_awaited_once() - sim_detector._controller.arm.assert_awaited_once() - sim_detector._controller.wait_for_idle.assert_awaited_once() - - -def test_analyser_detector_set_called_controller_setup_with_region( - sim_detector: GenericElectronAnalyserDetector, - sequence: GenericSequence, - run_engine: RunEngine, -) -> None: - region = sequence.get_enabled_regions()[0] - sim_detector._controller.setup_with_region = AsyncMock() - run_engine(bps.mv(sim_detector, region), wait=True) - sim_detector._controller.setup_with_region.assert_awaited_once_with(region) - - -async def test_analyser_region_detector_trigger_sets_driver_with_region( - sim_detector: GenericElectronAnalyserDetector, - sequence: GenericSequence, - run_engine: RunEngine, -) -> None: - region_detectors = sim_detector.create_region_detector_list( - sequence.get_enabled_regions() - ) - trigger_info = TriggerInfo() - - for reg_det in region_detectors: - reg_det.set = AsyncMock() - reg_det._controller.prepare = AsyncMock() - reg_det._controller.arm = AsyncMock() - reg_det._controller.wait_for_idle = AsyncMock() - - run_engine(bps.trigger(reg_det, wait=True), wait=True) - - reg_det.set.assert_awaited_once_with(reg_det.region) - reg_det._controller.prepare.assert_awaited_once_with(trigger_info) - reg_det._controller.arm.assert_awaited_once() - reg_det._controller.wait_for_idle.assert_awaited_once() + sim_detector._arm_logic.disarm.assert_awaited_once() # type: ignore + + +# async def test_analyser_detector_unstage( +# sim_detector: GenericElectronAnalyserDetector, +# ) -> None: +# sim_detector._controller.disarm = AsyncMock() + +# await sim_detector.unstage() + +# sim_detector._controller.disarm.assert_awaited_once() + + +# def test_analyser_detector_creates_region_detectors( +# sim_detector: GenericElectronAnalyserDetector, +# sequence: GenericSequence, +# ) -> None: +# region_detectors = sim_detector.create_region_detector_list( +# sequence.get_enabled_regions() +# ) +# assert len(region_detectors) == len(sequence.get_enabled_regions()) +# for det in region_detectors: +# assert det.region.enabled is True +# assert det.name == sim_detector.name + "_" + det.region.name + + +# def test_analyser_detector_has_driver_as_child_and_region_detector_does_not( +# sim_detector: GenericElectronAnalyserDetector, +# sequence: GenericSequence, +# ) -> None: +# # Remove parent name from driver name so it can be checked it exists in +# # _child_devices dict +# driver_name = sim_detector._controller.driver.name.replace( +# sim_detector.name + "-", "" +# ) + +# assert sim_detector._controller.driver.parent == sim_detector +# assert sim_detector._child_devices.get(driver_name) is not None + +# region_detectors = sim_detector.create_region_detector_list( +# sequence.get_enabled_regions() +# ) +# for det in region_detectors: +# assert det._child_devices.get(driver_name) is None +# assert det._controller.driver.parent == sim_detector + + +# def test_analyser_detector_trigger_called_controller_prepare( +# sim_detector: GenericElectronAnalyserDetector, +# run_engine: RunEngine, +# ) -> None: +# sim_detector._controller.prepare = AsyncMock() +# sim_detector._controller.arm = AsyncMock() +# sim_detector._controller.wait_for_idle = AsyncMock() + +# run_engine(bps.trigger(sim_detector, wait=True), wait=True) + +# sim_detector._controller.prepare.assert_awaited_once() +# sim_detector._controller.arm.assert_awaited_once() +# sim_detector._controller.wait_for_idle.assert_awaited_once() + + +# def test_analyser_detector_set_called_controller_setup_with_region( +# sim_detector: GenericElectronAnalyserDetector, +# sequence: GenericSequence, +# run_engine: RunEngine, +# ) -> None: +# region = sequence.get_enabled_regions()[0] +# sim_detector._controller.setup_with_region = AsyncMock() +# run_engine(bps.mv(sim_detector, region), wait=True) +# sim_detector._controller.setup_with_region.assert_awaited_once_with(region) + + +# async def test_analyser_region_detector_trigger_sets_driver_with_region( +# sim_detector: GenericElectronAnalyserDetector, +# sequence: GenericSequence, +# run_engine: RunEngine, +# ) -> None: +# region_detectors = sim_detector.create_region_detector_list( +# sequence.get_enabled_regions() +# ) +# trigger_info = TriggerInfo() + +# for reg_det in region_detectors: +# reg_det.set = AsyncMock() +# reg_det._controller.prepare = AsyncMock() +# reg_det._controller.arm = AsyncMock() +# reg_det._controller.wait_for_idle = AsyncMock() + +# run_engine(bps.trigger(reg_det, wait=True), wait=True) + +# reg_det.set.assert_awaited_once_with(reg_det.region) +# reg_det._controller.prepare.assert_awaited_once_with(trigger_info) +# reg_det._controller.arm.assert_awaited_once() +# reg_det._controller.wait_for_idle.assert_awaited_once() diff --git a/tests/devices/electron_analyser/base/test_detector_logic.py b/tests/devices/electron_analyser/base/test_detector_logic.py new file mode 100644 index 00000000000..dc8623d9eb0 --- /dev/null +++ b/tests/devices/electron_analyser/base/test_detector_logic.py @@ -0,0 +1,201 @@ +from collections.abc import Callable +from typing import Any +from unittest.mock import ANY, AsyncMock, call, patch + +import pytest +from ophyd_async.core import ( + SignalDict, + StandardDetector, + get_mock_put, + init_devices, + soft_signal_rw, +) +from ophyd_async.epics.adcore import ADImageMode +from ophyd_async.testing import assert_configuration, partial_reading + +from dodal.devices.beamlines import b07, b07_shared, i09 +from dodal.devices.electron_analyser.base import ( + AbstractAnalyserDriverIO, + AbstractBaseRegion, + AbstractBaseSequence, + AbstractEnergySource, +) +from dodal.devices.electron_analyser.base.detector_logic import ( + ElectronAnalayserTriggerLogic, + RegionLogic, + ShutterCoordinatorADArmLogic, +) +from dodal.devices.electron_analyser.specs import SpecsAnalyserDriverIO +from dodal.devices.electron_analyser.vgscienta import VGScientaAnalyserDriverIO +from dodal.devices.fast_shutter import FastShutter +from dodal.devices.selectable_source import SourceSelector +from tests.devices.electron_analyser.helper_util import ( + TEST_SEQUENCE_REGION_NAMES, + get_test_sequence, +) + + +@pytest.fixture +def vgscienta_driver() -> VGScientaAnalyserDriverIO: + with init_devices(mock=True): + vgscienta_driver = VGScientaAnalyserDriverIO( + "TEST:", i09.LensMode, i09.PsuMode, i09.PassEnergy + ) + return vgscienta_driver + + +@pytest.fixture +def specs_driver() -> SpecsAnalyserDriverIO: + with init_devices(mock=True): + specs_driver = SpecsAnalyserDriverIO("TEST:", b07.LensMode, b07_shared.PsuMode) + return specs_driver + + +@pytest.fixture(params=["specs_driver", "vgscienta_driver"]) +def driver(request: pytest.FixtureRequest) -> AbstractAnalyserDriverIO: + return request.getfixturevalue(request.param) + + +@pytest.fixture(params=["shutter1", "dual_fast_shutter"]) +def shutter(request: pytest.FixtureRequest) -> FastShutter: + return request.getfixturevalue(request.param) + + +@pytest.mark.parametrize( + "close_shutter_idle, expected_shutter_calls", + [ + (True, lambda s: [call(s.open_state), call(s.close_state)]), + (False, lambda s: [call(s.open_state)]), + ], +) +async def test_shutter_arm_logic_opens_shutters( + driver: AbstractAnalyserDriverIO, + shutter: FastShutter, + close_shutter_idle: bool, + expected_shutter_calls: Callable[[FastShutter], list[Any]], +): + with init_devices(mock=True): + close_shutter_idle_signal = soft_signal_rw(bool, close_shutter_idle) + + shutter_arm_logic = ShutterCoordinatorADArmLogic( + driver, shutter, close_shutter_idle_signal + ) + + detector = StandardDetector() + detector.add_detector_logics(shutter_arm_logic) + + await detector.stage() + await detector.trigger() + await detector.unstage() + + # Test driver acquire expected number of times. + get_mock_put(driver.acquire).assert_has_awaits( + [call(False), call(True), call(False)] + ) + + # Test expected shutter calls. + shutter = shutter_arm_logic._shutter + get_mock_put(shutter.shutter_state).assert_has_awaits( + expected_shutter_calls(shutter) + ) + + +@pytest.fixture(params=["single_energy_source", "dual_energy_source"]) +def energy_source(request: pytest.FixtureRequest) -> AbstractEnergySource: + return request.getfixturevalue(request.param) + + +@pytest.fixture +def sequence(driver: AbstractAnalyserDriverIO) -> AbstractBaseSequence: + return get_test_sequence(type(driver)) + + +@pytest.fixture +def region_logic( + driver: AbstractAnalyserDriverIO, + energy_source: AbstractEnergySource, + source_selector: SourceSelector, +) -> RegionLogic: + return RegionLogic(driver, energy_source, source_selector) + + +@pytest.mark.parametrize("region", TEST_SEQUENCE_REGION_NAMES, indirect=True) +async def test_region_logic_setup_with_region_sets_region_for_epics_and_sets_driver( + region: AbstractBaseRegion, + region_logic: RegionLogic, +) -> None: + + region_logic.driver.set = AsyncMock() + + # Patch switch_energy_mode so we can check on calls, but still run the real function + with patch.object( + AbstractBaseRegion, + "prepare_for_epics", + side_effect=AbstractBaseRegion.prepare_for_epics, # run the real method + autospec=True, + ) as mock_prepare_for_epics: + await region_logic.setup_with_region(region) + + mock_prepare_for_epics.assert_called_once_with( + region, + await region_logic.energy_source.energy.get_value(), + ) + + if region_logic.source_selector is not None: + get_mock_put( + region_logic.source_selector.selected_source + ).assert_called_once_with(region.excitation_energy_source) + # Check set was called with epics_region + epics_region = mock_prepare_for_epics.call_args[0][0].prepare_for_epics( + await region_logic.energy_source.energy.get_value(), + ) + region_logic.driver.set.assert_called_once_with(epics_region) + + +@pytest.mark.parametrize("region", TEST_SEQUENCE_REGION_NAMES, indirect=True) +async def test_region_logic_setup_with_region_moves_selected_source_if_not_none( + region: AbstractBaseRegion, region_logic: RegionLogic +) -> None: + + if region_logic.source_selector is not None: + await region_logic.setup_with_region(region) + get_mock_put( + region_logic.source_selector.selected_source + ).assert_awaited_once_with(region.excitation_energy_source) + + +@pytest.fixture +def trigger_logic(driver: AbstractAnalyserDriverIO) -> ElectronAnalayserTriggerLogic: + return ElectronAnalayserTriggerLogic(driver, {driver.lens_mode, driver.psu_mode}) + + +async def test_electron_analyser_trigger_logic_prepare_internal( + trigger_logic: ElectronAnalayserTriggerLogic, +) -> None: + detector = StandardDetector() + detector.add_detector_logics(trigger_logic) + await detector.trigger() + get_mock_put(trigger_logic.driver.image_mode).assert_awaited_once_with( + ADImageMode.SINGLE + ) + + +async def test_electron_analyser_trigger_logic_config_sigs( + trigger_logic: ElectronAnalayserTriggerLogic, +) -> None: + detector = StandardDetector() + detector.add_detector_logics(trigger_logic) + + await assert_configuration( + detector, + { + trigger_logic.driver.lens_mode.name: partial_reading(ANY), + trigger_logic.driver.psu_mode.name: partial_reading(ANY), + }, + ) + + +async def test_electron_analyser_deadtime( + trigger_logic: ElectronAnalayserTriggerLogic, +) -> None: + assert trigger_logic.get_deadtime(SignalDict()) == 0.0 diff --git a/tests/devices/electron_analyser/specs/test_specs_detector.py b/tests/devices/electron_analyser/specs/test_specs_detector.py deleted file mode 100644 index ac91086a57a..00000000000 --- a/tests/devices/electron_analyser/specs/test_specs_detector.py +++ /dev/null @@ -1,31 +0,0 @@ -from ophyd_async.core import set_mock_value - -from dodal.devices.electron_analyser.specs import SpecsDetector - - -async def test_analyser_specs_detector_image_shape( - b07b_specs150: SpecsDetector, -) -> None: - driver = b07b_specs150.driver - prefix = driver.name + "-" - - low_energy = 1 - high_energy = 10 - slices = 4 - set_mock_value(driver.low_energy, low_energy) - set_mock_value(driver.high_energy, high_energy) - set_mock_value(driver.slices, slices) - - min_angle = 1 - max_angle = 10 - set_mock_value(driver.min_angle_axis, min_angle) - set_mock_value(driver.max_angle_axis, max_angle) - - angle_axis = await driver.angle_axis.get_value() - energy_axis = await driver.energy_axis.get_value() - - describe = await b07b_specs150.describe() - assert describe[f"{prefix}image"]["shape"] == [ - len(angle_axis), - len(energy_axis), - ] diff --git a/tests/devices/electron_analyser/vgscienta/test_vgscienta_detector.py b/tests/devices/electron_analyser/vgscienta/test_vgscienta_detector.py deleted file mode 100644 index 48ac964f0f6..00000000000 --- a/tests/devices/electron_analyser/vgscienta/test_vgscienta_detector.py +++ /dev/null @@ -1,25 +0,0 @@ -import numpy as np -from ophyd_async.core import set_mock_value - -from dodal.devices.beamlines.i09 import LensMode, PassEnergy, PsuMode -from dodal.devices.electron_analyser.vgscienta import ( - VGScientaDetector, -) - - -async def test_analyser_vgscienta_detector_image_shape( - ew4000: VGScientaDetector[LensMode, PsuMode, PassEnergy], -) -> None: - driver = ew4000.driver - prefix = driver.name + "-" - - energy_axis = np.array([1, 2, 3, 4, 5]) - angle_axis = np.array([1, 2]) - set_mock_value(driver.energy_axis, energy_axis) - set_mock_value(driver.angle_axis, angle_axis) - - describe = await ew4000.describe() - assert describe[f"{prefix}image"]["shape"] == [ - len(angle_axis), - len(energy_axis), - ] From 42d530ed8f1ddb25ba90453210afdaf8db8d4169 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Tue, 24 Mar 2026 13:10:08 +0000 Subject: [PATCH 10/30] remove _get_value --- src/dodal/devices/electron_analyser/base/detector_logic.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/dodal/devices/electron_analyser/base/detector_logic.py b/src/dodal/devices/electron_analyser/base/detector_logic.py index 0e3fd069d89..795da56d21c 100644 --- a/src/dodal/devices/electron_analyser/base/detector_logic.py +++ b/src/dodal/devices/electron_analyser/base/detector_logic.py @@ -17,10 +17,6 @@ from dodal.devices.selectable_source import SourceSelector -async def _get_value(value: SignalR[bool] | bool) -> bool: - return await value.get_value() if isinstance(value, SignalR) else value - - class ShutterCoordinatorADArmLogic(ADArmLogic, Generic[TAbstractAnalyserDriverIO]): """Extends the arm logic to coordinate opening shutters before acqusition with optional configuration of when to close. From ec050212080ee4f551c591110452ee49307e4aa1 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Tue, 24 Mar 2026 14:28:03 +0000 Subject: [PATCH 11/30] Finish up tests for electron analyser --- .../electron_analyser/base/base_detector.py | 48 +++++- .../electron_analyser/base/base_driver_io.py | 41 ----- .../electron_analyser/specs/specs_detector.py | 27 ++- .../vgscienta/vgscienta_detector.py | 32 +++- .../base/test_base_detector.py | 161 ++++++------------ tests/devices/electron_analyser/conftest.py | 3 + .../specs/test_specs_detector.py | 39 +++++ .../specs/test_specs_driver_io.py | 27 --- .../vgscienta/test_vgscienta_detector.py | 44 +++++ .../vgscienta/test_vgscienta_driver_io.py | 28 --- 10 files changed, 239 insertions(+), 211 deletions(-) create mode 100644 tests/devices/electron_analyser/specs/test_specs_detector.py create mode 100644 tests/devices/electron_analyser/vgscienta/test_vgscienta_detector.py diff --git a/src/dodal/devices/electron_analyser/base/base_detector.py b/src/dodal/devices/electron_analyser/base/base_detector.py index da502248827..3721ed3b33a 100644 --- a/src/dodal/devices/electron_analyser/base/base_detector.py +++ b/src/dodal/devices/electron_analyser/base/base_detector.py @@ -1,10 +1,13 @@ from typing import Generic +import numpy as np from ophyd_async.core import ( + Array1D, AsyncStatus, DetectorArmLogic, DetectorTriggerLogic, StandardDetector, + derived_signal_r, error_if_none, ) @@ -12,10 +15,12 @@ GenericAnalyserDriverIO, TAbstractAnalyserDriverIO, ) +from dodal.devices.electron_analyser.base.base_enums import EnergyMode from dodal.devices.electron_analyser.base.base_region import ( GenericRegion, TAbstractBaseRegion, ) +from dodal.devices.electron_analyser.base.base_util import to_binding_energy from dodal.devices.electron_analyser.base.detector_logic import RegionLogic @@ -23,9 +28,7 @@ class ElectronAnalyserDetector( StandardDetector, Generic[TAbstractAnalyserDriverIO, TAbstractBaseRegion], ): - """Detector for data acquisition of electron analyser. Can only acquire using - settings already configured for the device. - """ + """Detector for data acquisition of electron analyser.""" def __init__( self, @@ -36,10 +39,47 @@ def __init__( name: str = "", ): self.add_detector_logics(arm_logic, trigger_logic) - # Custom logic for handling regions configured from sequence files. self._region_logic = region_logic + self.binding_energy_axis = derived_signal_r( + self._calculate_binding_energy_axis, + "eV", + energy_axis=self._region_logic.driver.energy_axis, + excitation_energy=self._region_logic.energy_source.energy, + energy_mode=self._region_logic.driver.energy_mode, + ) + self.add_config_signals(self.binding_energy_axis) super().__init__(name) + def _calculate_binding_energy_axis( + self, + energy_axis: Array1D[np.float64], + excitation_energy: float, + energy_mode: EnergyMode, + ) -> Array1D[np.float64]: + """Calculate the binding energy axis to calibrate the spectra data. Function for + a derived signal. + + Args: + energy_axis (Array1D[np.float64]): Array data of the original energy_axis + from epics. + excitation_energy (float): The excitation energy value used for the scan of + this region. + energy_mode (EnergyMode): The energy_mode of the region that was used for + the scan of this region. + + Returns: + Array that is the correct axis for the spectra data. + """ + is_binding = energy_mode == EnergyMode.BINDING + return np.array( + [ + to_binding_energy(i_energy_axis, EnergyMode.KINETIC, excitation_energy) + if is_binding + else i_energy_axis + for i_energy_axis in energy_axis + ] + ) + @AsyncStatus.wrap async def set(self, region: TAbstractBaseRegion) -> None: """Configure detector with regions from plans.""" diff --git a/src/dodal/devices/electron_analyser/base/base_driver_io.py b/src/dodal/devices/electron_analyser/base/base_driver_io.py index 8bc55e7ddf9..7418755b619 100644 --- a/src/dodal/devices/electron_analyser/base/base_driver_io.py +++ b/src/dodal/devices/electron_analyser/base/base_driver_io.py @@ -28,7 +28,6 @@ TLensMode, TPassEnergy, ) -from dodal.devices.electron_analyser.base.base_util import to_binding_energy AnyPsuMode: TypeAlias = SupersetEnum | StrictEnum TPsuMode = TypeVar("TPsuMode", bound=AnyPsuMode) @@ -94,9 +93,6 @@ def __init__( self.energy_mode = soft_signal_rw( EnergyMode, initial_value=EnergyMode.KINETIC ) - self.cached_excitation_energy = soft_signal_rw( - float, initial_value=0, units="eV" - ) self.low_energy = epics_signal_rw(float, prefix + "LOW_ENERGY") self.centre_energy = epics_signal_rw(float, prefix + "CENTRE_ENERGY") self.high_energy = epics_signal_rw(float, prefix + "HIGH_ENERGY") @@ -118,13 +114,6 @@ def __init__( with self.add_children_as_readables(StandardReadableFormat.CONFIG_SIGNAL): # NOT used for setting up region data acquisition. self.energy_axis = self._create_energy_axis_signal(prefix) - self.binding_energy_axis = derived_signal_r( - self._calculate_binding_energy_axis, - "eV", - energy_axis=self.energy_axis, - excitation_energy=self.cached_excitation_energy, - energy_mode=self.energy_mode, - ) self.angle_axis = self._create_angle_axis_signal(prefix) self.total_steps = epics_signal_r(int, prefix + "TOTAL_POINTS_RBV") self.total_time = derived_signal_r( @@ -168,36 +157,6 @@ def _create_energy_axis_signal(self, prefix: str) -> SignalR[Array1D[np.float64] Signal that can give us energy axis array data. """ - def _calculate_binding_energy_axis( - self, - energy_axis: Array1D[np.float64], - excitation_energy: float, - energy_mode: EnergyMode, - ) -> Array1D[np.float64]: - """Calculate the binding energy axis to calibrate the spectra data. Function for - a derived signal. - - Args: - energy_axis (Array1D[np.float64]): Array data of the original energy_axis - from epics. - excitation_energy (float): The excitation energy value used for the scan of - this region. - energy_mode (EnergyMode): The energy_mode of the region that was used for - the scan of this region. - - Returns: - Array that is the correct axis for the spectra data. - """ - is_binding = energy_mode == EnergyMode.BINDING - return np.array( - [ - to_binding_energy(i_energy_axis, EnergyMode.KINETIC, excitation_energy) - if is_binding - else i_energy_axis - for i_energy_axis in energy_axis - ] - ) - def _calculate_total_time( self, total_steps: int, step_time: float, iterations: int ) -> float: diff --git a/src/dodal/devices/electron_analyser/specs/specs_detector.py b/src/dodal/devices/electron_analyser/specs/specs_detector.py index a3f9e92975b..20dc7419c78 100644 --- a/src/dodal/devices/electron_analyser/specs/specs_detector.py +++ b/src/dodal/devices/electron_analyser/specs/specs_detector.py @@ -1,5 +1,7 @@ from typing import Generic +from ophyd_async.core import soft_signal_rw + from dodal.devices.electron_analyser.base.base_detector import ElectronAnalyserDetector from dodal.devices.electron_analyser.base.base_region import TLensMode, TPsuMode from dodal.devices.electron_analyser.base.detector_logic import ( @@ -37,13 +39,34 @@ def __init__( prefix, lens_mode_type, psu_mode_type ) region_logic = RegionLogic(self.driver, energy_source, source_selector) + self.close_shutter_idle = soft_signal_rw(bool, initial_value=True) arm_logic = ( - ShutterCoordinatorADArmLogic(self.driver, shutter) + ShutterCoordinatorADArmLogic(self.driver, shutter, self.close_shutter_idle) if shutter is not None else ADArmLogic(self.driver) ) trigger_logic = ElectronAnalayserTriggerLogic( - self.driver, {self.driver.lens_mode, self.driver.pass_energy} + self.driver, + { + self.driver.region_name, + self.driver.energy_mode, + self.driver.acquisition_mode, + self.driver.lens_mode, + self.driver.low_energy, + self.driver.centre_energy, + self.driver.high_energy, + self.driver.energy_step, + self.driver.pass_energy, + self.driver.slices, + self.driver.acquire_time, + self.driver.iterations, + self.driver.total_steps, + self.driver.total_time, + self.driver.energy_axis, + self.driver.angle_axis, + self.driver.snapshot_values, + self.driver.psu_mode, + }, ) super().__init__( diff --git a/src/dodal/devices/electron_analyser/vgscienta/vgscienta_detector.py b/src/dodal/devices/electron_analyser/vgscienta/vgscienta_detector.py index abbd196f598..de120632392 100644 --- a/src/dodal/devices/electron_analyser/vgscienta/vgscienta_detector.py +++ b/src/dodal/devices/electron_analyser/vgscienta/vgscienta_detector.py @@ -1,5 +1,6 @@ from typing import Generic +from ophyd_async.core import soft_signal_rw from ophyd_async.epics.adcore import ADArmLogic from dodal.devices.electron_analyser.base.base_detector import ElectronAnalyserDetector @@ -45,13 +46,40 @@ def __init__( prefix, lens_mode_type, psu_mode_type, pass_energy_type ) region_logic = RegionLogic(self.driver, energy_source, source_selector) + self.close_shutter_idle = soft_signal_rw(bool, initial_value=True) arm_logic = ( - ShutterCoordinatorADArmLogic(self.driver, shutter) + ShutterCoordinatorADArmLogic(self.driver, shutter, self.close_shutter_idle) if shutter is not None else ADArmLogic(self.driver) ) trigger_logic = ElectronAnalayserTriggerLogic( - self.driver, {self.driver.lens_mode, self.driver.pass_energy} + self.driver, + { + self.driver.region_name, + self.driver.energy_mode, + self.driver.acquisition_mode, + self.driver.lens_mode, + self.driver.low_energy, + self.driver.centre_energy, + self.driver.high_energy, + self.driver.energy_step, + self.driver.pass_energy, + self.driver.slices, + self.driver.iterations, + self.driver.total_steps, + self.driver.acquire_time, + self.driver.total_time, + self.driver.energy_axis, + self.driver.angle_axis, + self.driver.detector_mode, + self.driver.region_min_x, + self.driver.region_size_x, + self.driver.sensor_max_size_x, + self.driver.region_min_y, + self.driver.region_size_y, + self.driver.sensor_max_size_y, + self.driver.psu_mode, + }, ) super().__init__( diff --git a/tests/devices/electron_analyser/base/test_base_detector.py b/tests/devices/electron_analyser/base/test_base_detector.py index 3d1c722ac13..ecfd88c3fb1 100644 --- a/tests/devices/electron_analyser/base/test_base_detector.py +++ b/tests/devices/electron_analyser/base/test_base_detector.py @@ -1,14 +1,21 @@ from unittest.mock import AsyncMock +import numpy as np import pytest from bluesky import plan_stubs as bps from bluesky.run_engine import RunEngine +from ophyd_async.testing import assert_value from dodal.devices.electron_analyser.base import ( + AbstractBaseRegion, + EnergyMode, GenericElectronAnalyserDetector, GenericSequence, ) -from tests.devices.electron_analyser.helper_util.sequence import get_test_sequence +from tests.devices.electron_analyser.helper_util import ( + TEST_SEQUENCE_REGION_NAMES, + get_test_sequence, +) @pytest.fixture @@ -16,17 +23,29 @@ def sequence(sim_detector: GenericElectronAnalyserDetector) -> GenericSequence: return get_test_sequence(type(sim_detector)) -def test_base_analyser_detector_trigger( +@pytest.mark.parametrize("region", TEST_SEQUENCE_REGION_NAMES, indirect=True) +async def test_analyser_detector_binding_energy_axis( sim_detector: GenericElectronAnalyserDetector, + region: AbstractBaseRegion, run_engine: RunEngine, ) -> None: - sim_detector._arm_logic.arm = AsyncMock() # type: ignore - sim_detector._arm_logic.wait_for_idle = AsyncMock() # type: ignore + run_engine(bps.mv(sim_detector, region)) - run_engine(bps.trigger(sim_detector, wait=True), wait=True) + excitation_energy = ( + await sim_detector._region_logic.energy_source.energy.get_value() + ) + driver = sim_detector._region_logic.driver - sim_detector._arm_logic.arm.assert_awaited_once() # type: ignore - sim_detector._arm_logic.wait_for_idle.assert_awaited_once() # type: ignore + # Check binding energy is correct + is_region_binding = region.is_binding_energy() + is_driver_binding = await driver.energy_mode.get_value() == EnergyMode.BINDING + # Catch that driver correctly reflects what region energy mode is. + assert is_region_binding == is_driver_binding + energy_axis = await driver.energy_axis.get_value() + expected_binding_energy_axis = np.array( + [excitation_energy - e if is_driver_binding else e for e in energy_axis] + ) + await assert_value(sim_detector.binding_energy_axis, expected_binding_energy_axis) def test_analyser_detector_loads_sequence_correctly( @@ -37,105 +56,33 @@ def test_analyser_detector_loads_sequence_correctly( assert seq is not None -async def test_analyser_detector_stage( +def test_analyser_detector_has_driver_as_child_and_region_detector_does_not( + sim_detector: GenericElectronAnalyserDetector, + sequence: GenericSequence, +) -> None: + # Remove parent name from driver name so it can be checked it exists in + # _child_devices dict + driver_name = sim_detector._region_logic.driver.name.replace( + sim_detector.name + "-", "" + ) + + assert sim_detector._region_logic.driver.parent == sim_detector + assert sim_detector._child_devices.get(driver_name) is not None + + region_detectors = sim_detector.create_region_detector_list( + sequence.get_enabled_regions() + ) + for det in region_detectors: + assert det._child_devices.get(driver_name) is None + assert det._region_logic.driver.parent == sim_detector + + +def test_analyser_detector_set_called_region_logic_setup_with_region( sim_detector: GenericElectronAnalyserDetector, + sequence: GenericSequence, + run_engine: RunEngine, ) -> None: - sim_detector._arm_logic.disarm = AsyncMock() # type: ignore - - await sim_detector.stage() - - sim_detector._arm_logic.disarm.assert_awaited_once() # type: ignore - - -# async def test_analyser_detector_unstage( -# sim_detector: GenericElectronAnalyserDetector, -# ) -> None: -# sim_detector._controller.disarm = AsyncMock() - -# await sim_detector.unstage() - -# sim_detector._controller.disarm.assert_awaited_once() - - -# def test_analyser_detector_creates_region_detectors( -# sim_detector: GenericElectronAnalyserDetector, -# sequence: GenericSequence, -# ) -> None: -# region_detectors = sim_detector.create_region_detector_list( -# sequence.get_enabled_regions() -# ) -# assert len(region_detectors) == len(sequence.get_enabled_regions()) -# for det in region_detectors: -# assert det.region.enabled is True -# assert det.name == sim_detector.name + "_" + det.region.name - - -# def test_analyser_detector_has_driver_as_child_and_region_detector_does_not( -# sim_detector: GenericElectronAnalyserDetector, -# sequence: GenericSequence, -# ) -> None: -# # Remove parent name from driver name so it can be checked it exists in -# # _child_devices dict -# driver_name = sim_detector._controller.driver.name.replace( -# sim_detector.name + "-", "" -# ) - -# assert sim_detector._controller.driver.parent == sim_detector -# assert sim_detector._child_devices.get(driver_name) is not None - -# region_detectors = sim_detector.create_region_detector_list( -# sequence.get_enabled_regions() -# ) -# for det in region_detectors: -# assert det._child_devices.get(driver_name) is None -# assert det._controller.driver.parent == sim_detector - - -# def test_analyser_detector_trigger_called_controller_prepare( -# sim_detector: GenericElectronAnalyserDetector, -# run_engine: RunEngine, -# ) -> None: -# sim_detector._controller.prepare = AsyncMock() -# sim_detector._controller.arm = AsyncMock() -# sim_detector._controller.wait_for_idle = AsyncMock() - -# run_engine(bps.trigger(sim_detector, wait=True), wait=True) - -# sim_detector._controller.prepare.assert_awaited_once() -# sim_detector._controller.arm.assert_awaited_once() -# sim_detector._controller.wait_for_idle.assert_awaited_once() - - -# def test_analyser_detector_set_called_controller_setup_with_region( -# sim_detector: GenericElectronAnalyserDetector, -# sequence: GenericSequence, -# run_engine: RunEngine, -# ) -> None: -# region = sequence.get_enabled_regions()[0] -# sim_detector._controller.setup_with_region = AsyncMock() -# run_engine(bps.mv(sim_detector, region), wait=True) -# sim_detector._controller.setup_with_region.assert_awaited_once_with(region) - - -# async def test_analyser_region_detector_trigger_sets_driver_with_region( -# sim_detector: GenericElectronAnalyserDetector, -# sequence: GenericSequence, -# run_engine: RunEngine, -# ) -> None: -# region_detectors = sim_detector.create_region_detector_list( -# sequence.get_enabled_regions() -# ) -# trigger_info = TriggerInfo() - -# for reg_det in region_detectors: -# reg_det.set = AsyncMock() -# reg_det._controller.prepare = AsyncMock() -# reg_det._controller.arm = AsyncMock() -# reg_det._controller.wait_for_idle = AsyncMock() - -# run_engine(bps.trigger(reg_det, wait=True), wait=True) - -# reg_det.set.assert_awaited_once_with(reg_det.region) -# reg_det._controller.prepare.assert_awaited_once_with(trigger_info) -# reg_det._controller.arm.assert_awaited_once() -# reg_det._controller.wait_for_idle.assert_awaited_once() + region = sequence.get_enabled_regions()[0] + sim_detector._region_logic.setup_with_region = AsyncMock() + run_engine(bps.mv(sim_detector, region), wait=True) + sim_detector._region_logic.setup_with_region.assert_awaited_once_with(region) diff --git a/tests/devices/electron_analyser/conftest.py b/tests/devices/electron_analyser/conftest.py index 07720f5ffb2..5ee95f2bb3f 100644 --- a/tests/devices/electron_analyser/conftest.py +++ b/tests/devices/electron_analyser/conftest.py @@ -1,5 +1,6 @@ from typing import Any +import numpy as np import pytest from ophyd_async.core import InOut, init_devices, set_mock_value @@ -132,6 +133,8 @@ async def ew4000( shutter=dual_fast_shutter, source_selector=source_selector, ) + energy_axis = [1, 2, 3, 4, 5] + set_mock_value(ew4000.driver.energy_axis, np.array(energy_axis, dtype=float)) return ew4000 diff --git a/tests/devices/electron_analyser/specs/test_specs_detector.py b/tests/devices/electron_analyser/specs/test_specs_detector.py new file mode 100644 index 00000000000..8d79523205a --- /dev/null +++ b/tests/devices/electron_analyser/specs/test_specs_detector.py @@ -0,0 +1,39 @@ +from unittest.mock import ANY + +from ophyd_async.testing import assert_configuration, partial_reading + +from dodal.devices.beamlines.b07 import LensMode +from dodal.devices.beamlines.b07_shared import PsuMode +from dodal.devices.electron_analyser.specs import SpecsDetector, SpecsRegion + + +async def test_specs_detector_read_configuration(b07b_specs150: SpecsDetector) -> None: + prefix = b07b_specs150.driver.name + "-" + region = SpecsRegion[LensMode, PsuMode]( + lens_mode=LensMode.HIGH_ANGULAR_DISPERSION, psu_mode=PsuMode.V10 + ) + await b07b_specs150.set(region) + await assert_configuration( + b07b_specs150, + { + b07b_specs150.binding_energy_axis.name: partial_reading(ANY), + f"{prefix}region_name": partial_reading(region.name), + f"{prefix}energy_mode": partial_reading(region.energy_mode), + f"{prefix}acquisition_mode": partial_reading(region.acquisition_mode), + f"{prefix}snapshot_values": partial_reading(region.values), + f"{prefix}lens_mode": partial_reading(region.lens_mode), + f"{prefix}low_energy": partial_reading(region.low_energy), + f"{prefix}centre_energy": partial_reading(region.centre_energy), + f"{prefix}high_energy": partial_reading(region.high_energy), + f"{prefix}energy_step": partial_reading(region.energy_step), + f"{prefix}pass_energy": partial_reading(region.pass_energy), + f"{prefix}slices": partial_reading(region.slices), + f"{prefix}iterations": partial_reading(region.iterations), + f"{prefix}total_steps": partial_reading(ANY), + f"{prefix}acquire_time": partial_reading(region.acquire_time), + f"{prefix}total_time": partial_reading(ANY), + f"{prefix}energy_axis": partial_reading(ANY), + f"{prefix}angle_axis": partial_reading(ANY), + f"{prefix}psu_mode": partial_reading(ANY), + }, + ) diff --git a/tests/devices/electron_analyser/specs/test_specs_driver_io.py b/tests/devices/electron_analyser/specs/test_specs_driver_io.py index be485cb06d5..a8b964fc9aa 100644 --- a/tests/devices/electron_analyser/specs/test_specs_driver_io.py +++ b/tests/devices/electron_analyser/specs/test_specs_driver_io.py @@ -14,8 +14,6 @@ from dodal.devices.beamlines.b07 import LensMode from dodal.devices.beamlines.b07_shared import PsuMode -from dodal.devices.electron_analyser.base import EnergyMode -from dodal.devices.electron_analyser.base.base_enums import EnergyMode from dodal.devices.electron_analyser.specs import ( AcquisitionMode, SpecsAnalyserDriverIO, @@ -105,11 +103,9 @@ async def test_analyser_sets_region_and_read_configuration_is_correct( f"{prefix}total_steps": partial_reading(ANY), f"{prefix}total_time": partial_reading(ANY), f"{prefix}energy_axis": partial_reading(ANY), - f"{prefix}binding_energy_axis": partial_reading(ANY), f"{prefix}angle_axis": partial_reading(ANY), f"{prefix}snapshot_values": partial_reading(region.values), f"{prefix}psu_mode": partial_reading(region.psu_mode), - f"{prefix}cached_excitation_energy": partial_reading(0), }, ) @@ -136,29 +132,6 @@ async def test_analyser_sets_region_and_read_is_correct( ) -@pytest.mark.parametrize("region", TEST_SEQUENCE_REGION_NAMES, indirect=True) -async def test_specs_analyser_binding_energy_axis( - sim_driver: SpecsAnalyserDriverIO[LensMode, PsuMode], - region: SpecsRegion[LensMode, PsuMode], - run_engine: RunEngine, -) -> None: - run_engine(bps.mv(sim_driver, region)) - - excitation_energy = 500 - await sim_driver.cached_excitation_energy.set(500) - - # Check binding energy is correct - is_region_binding = region.is_binding_energy() - is_driver_binding = await sim_driver.energy_mode.get_value() == EnergyMode.BINDING - # Catch that driver correctly reflects what region energy mode is. - assert is_region_binding == is_driver_binding - energy_axis = await sim_driver.energy_axis.get_value() - expected_binding_energy_axis = np.array( - [excitation_energy - e if is_driver_binding else e for e in energy_axis] - ) - await assert_value(sim_driver.binding_energy_axis, expected_binding_energy_axis) - - async def test_specs_analyser_energy_axis( sim_driver: SpecsAnalyserDriverIO[LensMode, PsuMode], run_engine: RunEngine, diff --git a/tests/devices/electron_analyser/vgscienta/test_vgscienta_detector.py b/tests/devices/electron_analyser/vgscienta/test_vgscienta_detector.py new file mode 100644 index 00000000000..2b9aa3ce44f --- /dev/null +++ b/tests/devices/electron_analyser/vgscienta/test_vgscienta_detector.py @@ -0,0 +1,44 @@ +from unittest.mock import ANY + +from ophyd_async.testing import assert_configuration, partial_reading + +from dodal.devices.beamlines.i09 import LensMode, PassEnergy +from dodal.devices.electron_analyser.vgscienta import VGScientaDetector, VGScientaRegion + + +async def test_vgscienta_detector_read_configuration(ew4000: VGScientaDetector) -> None: + prefix = ew4000.driver.name + "-" + region = VGScientaRegion[LensMode, PassEnergy]( + lens_mode=LensMode.ANGULAR45, pass_energy=PassEnergy.E10 + ) + await ew4000.set(region) + await assert_configuration( + ew4000, + { + ew4000.binding_energy_axis.name: partial_reading(ANY), + f"{prefix}region_name": partial_reading(region.name), + f"{prefix}energy_mode": partial_reading(region.energy_mode), + f"{prefix}acquisition_mode": partial_reading(region.acquisition_mode), + f"{prefix}lens_mode": partial_reading(region.lens_mode), + f"{prefix}low_energy": partial_reading(region.low_energy), + f"{prefix}centre_energy": partial_reading(region.centre_energy), + f"{prefix}high_energy": partial_reading(region.high_energy), + f"{prefix}energy_step": partial_reading(region.energy_step), + f"{prefix}pass_energy": partial_reading(region.pass_energy), + f"{prefix}slices": partial_reading(region.slices), + f"{prefix}iterations": partial_reading(region.iterations), + f"{prefix}total_steps": partial_reading(ANY), + f"{prefix}acquire_time": partial_reading(region.acquire_time), + f"{prefix}total_time": partial_reading(ANY), + f"{prefix}energy_axis": partial_reading(ANY), + f"{prefix}angle_axis": partial_reading(ANY), + f"{prefix}detector_mode": partial_reading(region.detector_mode), + f"{prefix}region_min_x": partial_reading(region.min_x), + f"{prefix}region_size_x": partial_reading(region.size_x), + f"{prefix}sensor_max_size_x": partial_reading(ANY), + f"{prefix}region_min_y": partial_reading(region.min_y), + f"{prefix}region_size_y": partial_reading(region.size_y), + f"{prefix}sensor_max_size_y": partial_reading(ANY), + f"{prefix}psu_mode": partial_reading(ANY), + }, + ) diff --git a/tests/devices/electron_analyser/vgscienta/test_vgscienta_driver_io.py b/tests/devices/electron_analyser/vgscienta/test_vgscienta_driver_io.py index befb5dfaa07..fda8f88eb8e 100644 --- a/tests/devices/electron_analyser/vgscienta/test_vgscienta_driver_io.py +++ b/tests/devices/electron_analyser/vgscienta/test_vgscienta_driver_io.py @@ -9,12 +9,10 @@ from ophyd_async.testing import ( assert_configuration, assert_reading, - assert_value, partial_reading, ) from dodal.devices.beamlines.i09 import LensMode, PassEnergy, PsuMode -from dodal.devices.electron_analyser.base import EnergyMode from dodal.devices.electron_analyser.vgscienta import ( VGScientaAnalyserDriverIO, VGScientaDetector, @@ -95,7 +93,6 @@ async def test_analyser_sets_region_and_read_configuration_is_correct( f"{prefix}acquire_time": partial_reading(region.acquire_time), f"{prefix}total_time": partial_reading(ANY), f"{prefix}energy_axis": partial_reading(ANY), - f"{prefix}binding_energy_axis": partial_reading(ANY), f"{prefix}angle_axis": partial_reading(ANY), f"{prefix}detector_mode": partial_reading(region.detector_mode), f"{prefix}region_min_x": partial_reading(region.min_x), @@ -105,7 +102,6 @@ async def test_analyser_sets_region_and_read_configuration_is_correct( f"{prefix}region_size_y": partial_reading(region.size_y), f"{prefix}sensor_max_size_y": partial_reading(ANY), f"{prefix}psu_mode": partial_reading(ANY), - f"{prefix}cached_excitation_energy": partial_reading(0), }, ) @@ -134,30 +130,6 @@ async def test_analyser_sets_region_and_read_is_correct( ) -@pytest.mark.parametrize("region", TEST_SEQUENCE_REGION_NAMES, indirect=True) -async def test_analayser_binding_energy_is_correct( - sim_driver: VGScientaAnalyserDriverIO[LensMode, PsuMode, PassEnergy], - region: VGScientaRegion[LensMode, PassEnergy], - run_engine: RunEngine, -) -> None: - run_engine(bps.mv(sim_driver, region), wait=True) - excitation_energy = 500 - await sim_driver.cached_excitation_energy.set(excitation_energy) - # Check binding energy is correct - energy_axis = [1, 2, 3, 4, 5] - set_mock_value(sim_driver.energy_axis, np.array(energy_axis, dtype=float)) - - # Check binding energy is correct - is_region_binding = region.is_binding_energy() - is_driver_binding = await sim_driver.energy_mode.get_value() == EnergyMode.BINDING - # Catch that driver correctly reflects what region energy mode is. - assert is_region_binding == is_driver_binding - expected_binding_energy_axis = np.array( - [excitation_energy - e if is_driver_binding else e for e in energy_axis] - ) - await assert_value(sim_driver.binding_energy_axis, expected_binding_energy_axis) - - def test_driver_throws_error_with_wrong_pass_energy( sim_driver: VGScientaAnalyserDriverIO[LensMode, PsuMode, PassEnergy], run_engine: RunEngine, From b591802e7eb9341361556fb1fa306bf6335c3104 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Tue, 24 Mar 2026 14:44:10 +0000 Subject: [PATCH 12/30] Update p99 to use AndorDetector rather than Andor2Detector --- src/dodal/beamlines/p99.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/dodal/beamlines/p99.py b/src/dodal/beamlines/p99.py index dc2d0d686b0..4fdc134c870 100644 --- a/src/dodal/beamlines/p99.py +++ b/src/dodal/beamlines/p99.py @@ -1,6 +1,6 @@ from pathlib import Path -from ophyd_async.epics.adandor import Andor2Detector +from ophyd_async.epics.adandor import AndorDetector from ophyd_async.fastcs.panda import HDFPanda from dodal.common.beamlines.beamline_utils import ( @@ -59,13 +59,13 @@ def lab_stage() -> XYZStage: @devices.factory() -def andor2_det() -> Andor2Detector: +def andor2_det() -> AndorDetector: """Andor model:DU897_BV.""" - return Andor2Detector( + return AndorDetector( prefix=f"{PREFIX.beamline_prefix}-EA-DET-03:", path_provider=get_path_provider(), - drv_suffix=CAM_SUFFIX, - fileio_suffix=HDF5_SUFFIX, + driver_suffix=CAM_SUFFIX, + writer_suffix=HDF5_SUFFIX, ) From 6e75e676e68acf0be4f29b5650a1ed9b54f5a921 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Tue, 24 Mar 2026 14:52:20 +0000 Subject: [PATCH 13/30] Update b21 --- src/dodal/beamlines/b21.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/dodal/beamlines/b21.py b/src/dodal/beamlines/b21.py index c84e0e1d199..bf1d84257d2 100644 --- a/src/dodal/beamlines/b21.py +++ b/src/dodal/beamlines/b21.py @@ -39,9 +39,9 @@ def saxs(path_provider: PathProvider) -> EigerDetector: return EigerDetector( prefix=PREFIX.beamline_prefix, path_provider=path_provider, - drv_suffix="-EA-EIGER-01:", - hdf_suffix="-EA-EIGER-01:OD:", - odin_nodes=1, + # driver_suffix="-EA-EIGER-01:", + # hdf_suffix="-EA-EIGER-01:OD:", + # odin_nodes=1, ) @@ -50,9 +50,9 @@ def waxs(path_provider: PathProvider) -> EigerDetector: return EigerDetector( prefix=PREFIX.beamline_prefix, path_provider=path_provider, - drv_suffix="-EA-EIGER-02:", - hdf_suffix="-EA-EIGER-02:OD:", - odin_nodes=1, + # drv_suffix="-EA-EIGER-02:", + # hdf_suffix="-EA-EIGER-02:OD:", + # odin_nodes=1, ) @@ -141,8 +141,8 @@ def wbscam(path_provider: PathProvider) -> AravisDetector: ) return NXSasOAV( prefix=f"{PREFIX.beamline_prefix}-RS-ABSB-02:CAM:", - drv_suffix=CAM_SUFFIX, - fileio_suffix=HDF5_SUFFIX, + driver_suffix=CAM_SUFFIX, + writer_suffix=HDF5_SUFFIX, path_provider=path_provider, metadata_holder=metadata_holder, ) From 75fb7326270b86da7238ac2f2064aaef39656bae Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Tue, 24 Mar 2026 15:05:11 +0000 Subject: [PATCH 14/30] Fix beamlines --- src/dodal/beamlines/i03.py | 2 -- src/dodal/beamlines/i11.py | 2 +- src/dodal/beamlines/i18.py | 1 - src/dodal/beamlines/i22.py | 26 ++++++++------------------ 4 files changed, 9 insertions(+), 22 deletions(-) diff --git a/src/dodal/beamlines/i03.py b/src/dodal/beamlines/i03.py index 79631b15b2e..56963b6589c 100644 --- a/src/dodal/beamlines/i03.py +++ b/src/dodal/beamlines/i03.py @@ -185,8 +185,6 @@ def fastcs_eiger(path_provider: PathProvider) -> FastEiger: return FastEiger( prefix=PREFIX.beamline_prefix, path_provider=path_provider, - drv_suffix="-EA-EIGER-02:", - hdf_suffix="-EA-EIGER-01:OD:", ) diff --git a/src/dodal/beamlines/i11.py b/src/dodal/beamlines/i11.py index 9328a32929f..15274a74623 100644 --- a/src/dodal/beamlines/i11.py +++ b/src/dodal/beamlines/i11.py @@ -50,7 +50,7 @@ def mythen3(path_provider: PathProvider) -> Mythen3: prefix=f"{PREFIX.beamline_prefix}-EA-DET-07:", path_provider=path_provider, drv_suffix=DET_SUFFIX, - fileio_suffix="HDF:", + writer_suffix="HDF:", ) diff --git a/src/dodal/beamlines/i18.py b/src/dodal/beamlines/i18.py index 4e9af8ddfdc..b60791eb140 100644 --- a/src/dodal/beamlines/i18.py +++ b/src/dodal/beamlines/i18.py @@ -100,7 +100,6 @@ def i0() -> TetrammDetector: return TetrammDetector( f"{PREFIX.beamline_prefix}-DI-XBPM-02:", path_provider=get_path_provider(), - type="Cividec Diamond XBPM", ) diff --git a/src/dodal/beamlines/i22.py b/src/dodal/beamlines/i22.py index 745c85161b2..933394367c1 100644 --- a/src/dodal/beamlines/i22.py +++ b/src/dodal/beamlines/i22.py @@ -76,8 +76,8 @@ def saxs(path_provider: PathProvider) -> PilatusDetector: return NXSasPilatus( prefix=f"{PREFIX.beamline_prefix}-EA-PILAT-01:", path_provider=path_provider, - drv_suffix=CAM_SUFFIX, - fileio_suffix=HDF5_SUFFIX, + driver_suffix=CAM_SUFFIX, + writer_suffix=HDF5_SUFFIX, metadata_holder=metadata_holder, plugins={ "stats": NDStatsIO(prefix=f"{PREFIX.beamline_prefix}-EA-PILAT-01:STAT:") @@ -104,8 +104,8 @@ def waxs(path_provider: PathProvider) -> PilatusDetector: return NXSasPilatus( prefix=f"{PREFIX.beamline_prefix}-EA-PILAT-03:", path_provider=path_provider, - drv_suffix=CAM_SUFFIX, - fileio_suffix=HDF5_SUFFIX, + driver_suffix=CAM_SUFFIX, + writer_suffix=HDF5_SUFFIX, metadata_holder=metadata_holder, plugins={ "stats": NDStatsIO(prefix=f"{PREFIX.beamline_prefix}-EA-PILAT-03:STAT:") @@ -118,12 +118,7 @@ def i0(path_provider: PathProvider) -> TetrammDetector: return TetrammDetector( prefix=f"{PREFIX.beamline_prefix}-EA-XBPM-02:", path_provider=path_provider, - type="Cividec Diamond XBPM", - plugins={ - "stats": NDPluginBaseIO( - prefix=f"{PREFIX.beamline_prefix}-EA-XBPM-02:SumAll:" - ) - }, + plugins=[NDPluginBaseIO(prefix=f"{PREFIX.beamline_prefix}-EA-XBPM-02:SumAll:")], ) @@ -132,12 +127,7 @@ def it(path_provider: PathProvider) -> TetrammDetector: return TetrammDetector( prefix=f"{PREFIX.beamline_prefix}-EA-TTRM-02:", path_provider=path_provider, - type="PIN Diode", - plugins={ - "stats": NDPluginBaseIO( - prefix=f"{PREFIX.beamline_prefix}-EA-TTRM-02:SumAll:" - ) - }, + plugins=[NDPluginBaseIO(prefix=f"{PREFIX.beamline_prefix}-EA-TTRM-02:SumAll:")], ) @@ -278,8 +268,8 @@ def oav(path_provider: PathProvider) -> AravisDetector: ) return NXSasOAV( prefix=f"{PREFIX.beamline_prefix}-DI-OAV-01:", - drv_suffix=DET_SUFFIX, - fileio_suffix=HDF5_SUFFIX, + driver_suffix=DET_SUFFIX, + writer_suffix=HDF5_SUFFIX, path_provider=path_provider, metadata_holder=metadata_holder, ) From 55c424c987824018896a026b0f74e7dfccbb42b1 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Tue, 24 Mar 2026 15:22:37 +0000 Subject: [PATCH 15/30] Update i19-2 --- src/dodal/beamlines/i19_2.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/dodal/beamlines/i19_2.py b/src/dodal/beamlines/i19_2.py index 352a6cbb89b..cb3619ec197 100644 --- a/src/dodal/beamlines/i19_2.py +++ b/src/dodal/beamlines/i19_2.py @@ -89,8 +89,6 @@ def eiger(path_provider: PathProvider) -> EigerDetector: return EigerDetector( prefix=PREFIX.beamline_prefix, path_provider=path_provider, - drv_suffix="-EA-EIGER-01:", - hdf_suffix="-EA-EIGER-01:OD:", ) From 060c02974c789889956cff6a506c84e9bf951f03 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Tue, 24 Mar 2026 16:37:06 +0000 Subject: [PATCH 16/30] Fix mythen --- src/dodal/devices/beamlines/i11/mythen.py | 22 +--------- tests/devices/beamlines/i11/test_mythen.py | 47 +++++++++------------- 2 files changed, 20 insertions(+), 49 deletions(-) diff --git a/src/dodal/devices/beamlines/i11/mythen.py b/src/dodal/devices/beamlines/i11/mythen.py index 647ab3e5129..f8fddc0012c 100644 --- a/src/dodal/devices/beamlines/i11/mythen.py +++ b/src/dodal/devices/beamlines/i11/mythen.py @@ -9,10 +9,10 @@ ) from ophyd_async.epics.adcore import ( ADArmLogic, + ADBaseIO, ADImageMode, ADWriterType, AreaDetector, - NDArrayBaseIO, ) from ophyd_async.epics.core import PvSuffix @@ -45,14 +45,8 @@ class Mythen3DetectorState(StrictEnum): ABORTED = "Aborted" -class Mythen3Driver(NDArrayBaseIO): - acquire_time: A[SignalRW[float], PvSuffix.rbv("AcquireTime")] - acquire_period: A[SignalRW[float], PvSuffix.rbv("AcquirePeriod")] - num_images: A[SignalRW[int], PvSuffix.rbv("NumImages")] - image_mode: A[SignalRW[ADImageMode], PvSuffix.rbv("ImageMode")] - +class Mythen3Driver(ADBaseIO): # Non-specific PV's but with mythen3 specific values - detector_state: A[SignalRW[Mythen3DetectorState], PvSuffix("DetectorState_RBV")] trigger_mode: A[SignalRW[Mythen3TriggerMode], PvSuffix.rbv("TriggerMode")] # mythen3 specific PV's @@ -170,18 +164,6 @@ def __init__( ): self.driver = Mythen3Driver(prefix + drv_suffix) - # self.writer = writer_cls.with_io( - # prefix, - # path_provider, - # dataset_source=self.driver, - # fileio_suffix=fileio_suffix, - # ) - - # super().__init__( - # controller=self.controller, - # writer=self.writer, - # name=name, - # ) # plugins=plugins # config_sigs=config_sigs super().__init__( prefix=prefix, driver=self.driver, diff --git a/tests/devices/beamlines/i11/test_mythen.py b/tests/devices/beamlines/i11/test_mythen.py index 791ebacf471..945c8252198 100644 --- a/tests/devices/beamlines/i11/test_mythen.py +++ b/tests/devices/beamlines/i11/test_mythen.py @@ -1,7 +1,11 @@ from pathlib import Path import pytest -from ophyd_async.core import DetectorTrigger, TriggerInfo, init_devices +from ophyd_async.core import ( + SignalDict, + error_if_none, + init_devices, +) from ophyd_async.epics.adcore import ADImageMode from dodal.common.beamlines.beamline_utils import get_path_provider, set_path_provider @@ -37,50 +41,35 @@ async def i11_mythen() -> Mythen3: def test_mythen_deadtime(i11_mythen: Mythen3) -> None: # deadtime is constant for Mythen3, so we can just check it - assert i11_mythen.controller.get_deadtime(10.0) == _DEADTIMES[_BIT_DEPTH] + trigger_logic = error_if_none( + i11_mythen._trigger_logic, "TriggerLogic cannot be None." + ) + assert trigger_logic.get_deadtime(SignalDict()) == _DEADTIMES[_BIT_DEPTH] async def test_mythen_prepare_when_det_trig_internal(i11_mythen: Mythen3) -> None: - trigger_info = TriggerInfo( - number_of_events=1, - trigger=DetectorTrigger.INTERNAL, - deadtime=1, - livetime=10.0, - exposure_timeout=30.0, - exposures_per_event=1, + trigger_logic = error_if_none( + i11_mythen._trigger_logic, "TriggerLogic cannot be None." ) - - await i11_mythen.controller.prepare(trigger_info) + await trigger_logic.prepare_internal(num=1, livetime=10.0, deadtime=1) assert ( await i11_mythen.driver.trigger_mode.get_value() == Mythen3TriggerMode.INTERNAL ) async def test_mythen_prepare_when_det_trig_external(i11_mythen: Mythen3) -> None: - trigger_info = TriggerInfo( - number_of_events=1, - trigger=DetectorTrigger.EXTERNAL_LEVEL, - deadtime=1, - livetime=10.0, - exposure_timeout=30.0, - exposures_per_event=1, + trigger_logic = error_if_none( + i11_mythen._trigger_logic, "TriggerLogic cannot be None." ) - - await i11_mythen.controller.prepare(trigger_info) + await trigger_logic.prepare_level(num=1) assert ( await i11_mythen.driver.trigger_mode.get_value() == Mythen3TriggerMode.EXTERNAL ) async def test_mythen_prepare_when_continous_exposure(i11_mythen: Mythen3) -> None: - trigger_info = TriggerInfo( - number_of_events=0, - trigger=DetectorTrigger.EXTERNAL_LEVEL, - deadtime=1, - livetime=10.0, - exposure_timeout=30.0, - exposures_per_event=1, + trigger_logic = error_if_none( + i11_mythen._trigger_logic, "TriggerLogic cannot be None." ) - - await i11_mythen.controller.prepare(trigger_info) + await trigger_logic.prepare_edge(num=0, livetime=10) assert await i11_mythen.driver.image_mode.get_value() == ADImageMode.CONTINUOUS From bad5706734a062445df0e1664271ceb146d2478f Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Wed, 25 Mar 2026 09:31:42 +0000 Subject: [PATCH 17/30] Fix i22 saxs test --- tests/devices/beamlines/i22/test_metadataholder.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/devices/beamlines/i22/test_metadataholder.py b/tests/devices/beamlines/i22/test_metadataholder.py index 98f4bcb65d1..2dff3bcbfbf 100644 --- a/tests/devices/beamlines/i22/test_metadataholder.py +++ b/tests/devices/beamlines/i22/test_metadataholder.py @@ -11,8 +11,8 @@ def saxs(static_path_provider: PathProvider) -> PilatusDetector: with init_devices(mock=True): saxs = NXSasPilatus( prefix="-EA-PILAT-01:", - drv_suffix=CAM_SUFFIX, - fileio_suffix=HDF5_SUFFIX, + driver_suffix=CAM_SUFFIX, + writer_suffix=HDF5_SUFFIX, metadata_holder=NXSasMetadataHolder( x_pixel_size=(1.72e-1, "mm"), y_pixel_size=(1.72e-1, "mm"), From fcef657720027561471f6aee905189b87269d2f4 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Wed, 25 Mar 2026 09:33:36 +0000 Subject: [PATCH 18/30] Remove i24 import CommissioningJungfrau --- src/dodal/beamlines/i24.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dodal/beamlines/i24.py b/src/dodal/beamlines/i24.py index 8ff2955cdf9..5d9b542e33e 100644 --- a/src/dodal/beamlines/i24.py +++ b/src/dodal/beamlines/i24.py @@ -1,7 +1,6 @@ from functools import cache from pathlib import Path -from dodal.devices.beamlines.i24.commissioning_jungfrau import CommissioningJungfrau from ophyd_async.core import AutoMaxIncrementingPathProvider, PathProvider from ophyd_async.fastcs.jungfrau import JungfrauDetector From 5c3cac2af7ec086bc41bddecfcb1aa9a1d825e84 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Wed, 25 Mar 2026 10:53:06 +0000 Subject: [PATCH 19/30] Attempt to fix dodal plans for eiger --- ...nfigure_arm_trigger_and_disarm_detector.py | 50 +++++++++---------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/src/dodal/plans/configure_arm_trigger_and_disarm_detector.py b/src/dodal/plans/configure_arm_trigger_and_disarm_detector.py index d022dcae669..fe31d6b67c0 100644 --- a/src/dodal/plans/configure_arm_trigger_and_disarm_detector.py +++ b/src/dodal/plans/configure_arm_trigger_and_disarm_detector.py @@ -34,15 +34,15 @@ def configure_arm_trigger_and_disarm_detector( yield from change_roi_mode(eiger, detector_params, wait=True) LOGGER.info(f"Changing ROI Mode: {time.time() - start}s") start = time.time() - yield from bps.abs_set(eiger.odin.num_frames_chunks, 1, wait=True) + yield from bps.abs_set(eiger.od.fp.process_frames_per_block, 1, wait=True) LOGGER.info(f"Setting # of Frame Chunks: {time.time() - start}s") start = time.time() yield from bps.abs_set( - eiger.drv.detector.photon_energy, detector_params.expected_energy_ev, wait=True + eiger.detector.photon_energy, detector_params.expected_energy_ev, wait=True ) LOGGER.info(f"Setting Photon Energy: {time.time() - start}s") start = time.time() - yield from bps.abs_set(eiger.drv.detector.ntrigger, 1, wait=True) + yield from bps.abs_set(eiger.detector.ntrigger, 1, wait=True) LOGGER.info(f"Setting Number of Triggers: {time.time() - start}s") start = time.time() yield from set_mx_settings_pvs(eiger, detector_params, wait=True) @@ -54,7 +54,7 @@ def configure_arm_trigger_and_disarm_detector( yield from bps.kickoff(eiger, wait=True) LOGGER.info(f"Kickoff Eiger: {time.time() - start}s") start = time.time() - yield from bps.trigger(eiger.drv.detector.trigger, wait=True) + yield from bps.trigger(eiger.detector.trigger, wait=True) LOGGER.info(f"Triggering Eiger: {time.time() - start}s") start = time.time() yield from bps.complete(eiger, wait=True) @@ -71,10 +71,10 @@ def set_cam_pvs( group="cam_pvs", ): yield from bps.abs_set( - eiger.drv.detector.count_time, detector_params.exposure_time_s, group=group + eiger.detector.count_time, detector_params.exposure_time_s, group=group ) yield from bps.abs_set( - eiger.drv.detector.frame_time, detector_params.exposure_time_s, group=group + eiger.detector.frame_time, detector_params.exposure_time_s, group=group ) if wait: @@ -94,30 +94,30 @@ def change_roi_mode( ) yield from bps.abs_set( - eiger.drv.detector.roi_mode, + eiger.detector.roi_mode, "4M" if detector_params.use_roi_mode else "disabled", group=group, ) yield from bps.abs_set( - eiger.odin.image_height, + eiger.od.fp.data_dims_0, detector_dimensions.height, group=group, ) yield from bps.abs_set( - eiger.odin.image_width, - detector_dimensions.width, - group=group, - ) - yield from bps.abs_set( - eiger.odin.num_row_chunks, - detector_dimensions.height, - group=group, - ) - yield from bps.abs_set( - eiger.odin.num_col_chunks, + eiger.od.fp.data_dims_1, detector_dimensions.width, group=group, ) + # yield from bps.abs_set( + # eiger.odin.num_row_chunks, + # detector_dimensions.height, + # group=group, + # ) + # yield from bps.abs_set( + # eiger.odin.num_col_chunks, + # detector_dimensions.width, + # group=group, + # ) if wait: yield from bps.wait(group) @@ -133,22 +133,22 @@ def set_mx_settings_pvs( detector_params.detector_distance ) - yield from bps.abs_set(eiger.drv.detector.beam_center_x, beam_x_pixels, group=group) - yield from bps.abs_set(eiger.drv.detector.beam_center_y, beam_y_pixels, group=group) + yield from bps.abs_set(eiger.detector.beam_center_x, beam_x_pixels, group=group) + yield from bps.abs_set(eiger.detector.beam_center_y, beam_y_pixels, group=group) yield from bps.abs_set( - eiger.drv.detector.detector_distance, + eiger.detector.detector_distance, detector_params.detector_distance, group=group, ) yield from bps.abs_set( - eiger.drv.detector.omega_start, detector_params.omega_start, group=group + eiger.detector.omega_start, detector_params.omega_start, group=group ) yield from bps.abs_set( - eiger.drv.detector.omega_increment, detector_params.omega_increment, group=group + eiger.detector.omega_increment, detector_params.omega_increment, group=group ) yield from bps.abs_set( - eiger.drv.detector.photon_energy, + eiger.detector.photon_energy, detector_params.expected_energy_ev, group=group, ) From c16100d4b59705d48db61f8574c6203cdb231f17 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Wed, 25 Mar 2026 11:53:14 +0000 Subject: [PATCH 20/30] Update electron analyser detector doc string --- .../electron_analyser/base/base_detector.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/dodal/devices/electron_analyser/base/base_detector.py b/src/dodal/devices/electron_analyser/base/base_detector.py index 3721ed3b33a..957b254c09b 100644 --- a/src/dodal/devices/electron_analyser/base/base_detector.py +++ b/src/dodal/devices/electron_analyser/base/base_detector.py @@ -28,25 +28,27 @@ class ElectronAnalyserDetector( StandardDetector, Generic[TAbstractAnalyserDriverIO, TAbstractBaseRegion], ): - """Detector for data acquisition of electron analyser.""" + """Detector for data acquisition of electron analyser. Can be configured with + region data via set method. + """ def __init__( self, arm_logic: DetectorArmLogic, trigger_logic: DetectorTriggerLogic, region_logic: RegionLogic, - # ToDo - Add data logic name: str = "", ): - self.add_detector_logics(arm_logic, trigger_logic) - self._region_logic = region_logic self.binding_energy_axis = derived_signal_r( self._calculate_binding_energy_axis, "eV", - energy_axis=self._region_logic.driver.energy_axis, - excitation_energy=self._region_logic.energy_source.energy, - energy_mode=self._region_logic.driver.energy_mode, + energy_axis=region_logic.driver.energy_axis, + excitation_energy=region_logic.energy_source.energy, + energy_mode=region_logic.driver.energy_mode, ) + self._region_logic = region_logic + # ToDo - Add data logic + self.add_detector_logics(arm_logic, trigger_logic) self.add_config_signals(self.binding_energy_axis) super().__init__(name) From a1ee4025424cb2f551740b0cb5be4b8f3979bf4a Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Mon, 30 Mar 2026 13:10:40 +0000 Subject: [PATCH 21/30] Correct eiger plan --- ...nfigure_arm_trigger_and_disarm_detector.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/dodal/plans/configure_arm_trigger_and_disarm_detector.py b/src/dodal/plans/configure_arm_trigger_and_disarm_detector.py index fe31d6b67c0..8691010812d 100644 --- a/src/dodal/plans/configure_arm_trigger_and_disarm_detector.py +++ b/src/dodal/plans/configure_arm_trigger_and_disarm_detector.py @@ -34,7 +34,7 @@ def configure_arm_trigger_and_disarm_detector( yield from change_roi_mode(eiger, detector_params, wait=True) LOGGER.info(f"Changing ROI Mode: {time.time() - start}s") start = time.time() - yield from bps.abs_set(eiger.od.fp.process_frames_per_block, 1, wait=True) + yield from bps.abs_set(eiger.od.fp.data_chunks_0, 1, wait=True) LOGGER.info(f"Setting # of Frame Chunks: {time.time() - start}s") start = time.time() yield from bps.abs_set( @@ -108,16 +108,16 @@ def change_roi_mode( detector_dimensions.width, group=group, ) - # yield from bps.abs_set( - # eiger.odin.num_row_chunks, - # detector_dimensions.height, - # group=group, - # ) - # yield from bps.abs_set( - # eiger.odin.num_col_chunks, - # detector_dimensions.width, - # group=group, - # ) + yield from bps.abs_set( + eiger.od.fp.data_chunks_1, + detector_dimensions.height, + group=group, + ) + yield from bps.abs_set( + eiger.od.fp.data_chunks_2, + detector_dimensions.width, + group=group, + ) if wait: yield from bps.wait(group) From 82399415563e23b79a1d06dcc8c083f85598d2c6 Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Tue, 31 Mar 2026 10:57:43 +0000 Subject: [PATCH 22/30] partial test conversion --- ...nfigure_arm_trigger_and_disarm_detector.py | 46 +++++++++++-------- 1 file changed, 26 insertions(+), 20 deletions(-) diff --git a/tests/plans/test_configure_arm_trigger_and_disarm_detector.py b/tests/plans/test_configure_arm_trigger_and_disarm_detector.py index 700c093c211..d5a50a108a1 100644 --- a/tests/plans/test_configure_arm_trigger_and_disarm_detector.py +++ b/tests/plans/test_configure_arm_trigger_and_disarm_detector.py @@ -20,9 +20,9 @@ async def fake_eiger(): fake_eiger = FastEiger("", MagicMock()) await fake_eiger.connect(mock=True) - fake_eiger.drv.detector.arm.trigger = AsyncMock() - fake_eiger.drv.detector.disarm.trigger = AsyncMock() - fake_eiger._writer.observe_indices_written = fake_observe_indices_written + fake_eiger.detector.arm.trigger = AsyncMock() + fake_eiger.detector.disarm.trigger = AsyncMock() + fake_eiger.od.fp.observe_indices_written = fake_observe_indices_written return fake_eiger @@ -31,7 +31,7 @@ async def fake_observe_indices_written(timeout: float) -> AsyncGenerator[int, No async def test_configure_arm_trigger_and_disarm_detector( - fake_eiger, eiger_params, run_engine: RunEngine + fake_eiger: FastEiger, eiger_params, run_engine: RunEngine ): trigger_info = TriggerInfo( # Manual trigger, so setting number of triggers to 1. @@ -40,26 +40,32 @@ async def test_configure_arm_trigger_and_disarm_detector( deadtime=0.0001, ) filename: str = "filename.h5" - fake_eiger._writer._path_provider.return_value.filename = filename + fake_eiger._data_logics[0].path_provider.return_value.filename = filename # type: ignore def set_meta_active(*args, **kwargs) -> None: - set_mock_value(fake_eiger.odin.meta_file_name, filename) - set_mock_value(fake_eiger.odin.id, filename) - set_mock_value(fake_eiger.odin.meta_active, "Active") + set_mock_value(fake_eiger.od.fp.file_path, filename) + set_mock_value(fake_eiger.od.mw.acquisition_id, filename) + set_mock_value(fake_eiger.od.mw.writing, True) + set_mock_value(fake_eiger.detector.bit_depth_image, 16) + # set_mock_value(fake_eiger.odin.meta_file_name, filename) + # set_mock_value(fake_eiger.odin.id, filename) + # set_mock_value(fake_eiger.odin.meta_active, "Active") def set_capture_rbv_meta_writing_and_detector_state(*args, **kwargs) -> None: # Mimics capturing and immediete completion status on Eiger. - fake_eiger._writer._path_provider.return_value.filename = "filename.h5" - set_mock_value(fake_eiger.odin.capture_rbv, "Capturing") - set_mock_value(fake_eiger.odin.meta_writing, "Writing") - set_mock_value(fake_eiger.odin.meta_file_name, "filename.h5") - set_mock_value(fake_eiger.odin.id, "filename.h5") - set_mock_value(fake_eiger.odin.fan_ready, 1) - set_mock_value(fake_eiger.drv.detector.state, "idle") - callback_on_mock_put(fake_eiger.odin.num_to_capture, set_meta_active) + # fake_eiger._writer._path_provider.return_value.filename = "filename.h5" + # set_mock_value(fake_eiger.odin.capture_rbv, "Capturing") + # set_mock_value(fake_eiger.odin.meta_writing, "Writing") + # set_mock_value(fake_eiger.odin.meta_file_name, "filename.h5") + # set_mock_value(fake_eiger.odin.id, "filename.h5") + # set_mock_value(fake_eiger.odin.fan_ready, 1) + set_mock_value(fake_eiger.od.fp.start_writing, True) + set_mock_value(fake_eiger.detector.state, "idle") + + callback_on_mock_put(fake_eiger.od.fp.start_writing, set_meta_active) callback_on_mock_put( - fake_eiger.odin.capture, set_capture_rbv_meta_writing_and_detector_state + fake_eiger.od.mw.writing, set_capture_rbv_meta_writing_and_detector_state ) run_engine( @@ -67,10 +73,10 @@ def set_capture_rbv_meta_writing_and_detector_state(*args, **kwargs) -> None: fake_eiger, eiger_params, trigger_info ) ) - fake_eiger.drv.detector.arm.trigger.assert_called_once() + fake_eiger.detector.arm.trigger.assert_called_once() # Disarm occurs at the start and end of the plan. - assert len(fake_eiger.drv.detector.disarm.trigger.call_args_list) == 2 + assert len(fake_eiger.detector.disarm.trigger.call_args_list) == 2 assert ( - await fake_eiger.drv.detector.photon_energy.get_value() + await fake_eiger.detector.photon_energy.get_value() == eiger_params.expected_energy_ev ) From 7a8814ac04caf6dec481c427a6d1719e9ca06cbb Mon Sep 17 00:00:00 2001 From: Shihab Suliman Date: Wed, 22 Apr 2026 15:17:02 +0000 Subject: [PATCH 23/30] tests: amend fastcs eiger test to use new signals --- ...nfigure_arm_trigger_and_disarm_detector.py | 57 +++++++++---------- 1 file changed, 27 insertions(+), 30 deletions(-) diff --git a/tests/plans/test_configure_arm_trigger_and_disarm_detector.py b/tests/plans/test_configure_arm_trigger_and_disarm_detector.py index 700c093c211..6d6ffbb8bcf 100644 --- a/tests/plans/test_configure_arm_trigger_and_disarm_detector.py +++ b/tests/plans/test_configure_arm_trigger_and_disarm_detector.py @@ -1,5 +1,4 @@ -from collections.abc import AsyncGenerator -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import MagicMock import pytest from bluesky.run_engine import RunEngine @@ -7,31 +6,27 @@ DetectorTrigger, TriggerInfo, callback_on_mock_put, + get_mock, set_mock_value, ) from ophyd_async.fastcs.eiger import EigerDetector as FastEiger +from dodal.devices.detector import DetectorParams from dodal.plans.configure_arm_trigger_and_disarm_detector import ( configure_arm_trigger_and_disarm_detector, ) @pytest.fixture -async def fake_eiger(): +async def fake_eiger() -> FastEiger: fake_eiger = FastEiger("", MagicMock()) await fake_eiger.connect(mock=True) - fake_eiger.drv.detector.arm.trigger = AsyncMock() - fake_eiger.drv.detector.disarm.trigger = AsyncMock() - fake_eiger._writer.observe_indices_written = fake_observe_indices_written + set_mock_value(fake_eiger.detector.bit_depth_image, 32) return fake_eiger -async def fake_observe_indices_written(timeout: float) -> AsyncGenerator[int, None]: - yield 1 - - async def test_configure_arm_trigger_and_disarm_detector( - fake_eiger, eiger_params, run_engine: RunEngine + fake_eiger: FastEiger, eiger_params: DetectorParams, run_engine: RunEngine ): trigger_info = TriggerInfo( # Manual trigger, so setting number of triggers to 1. @@ -39,38 +34,40 @@ async def test_configure_arm_trigger_and_disarm_detector( trigger=DetectorTrigger.INTERNAL, deadtime=0.0001, ) - filename: str = "filename.h5" - fake_eiger._writer._path_provider.return_value.filename = filename - def set_meta_active(*args, **kwargs) -> None: - set_mock_value(fake_eiger.odin.meta_file_name, filename) - set_mock_value(fake_eiger.odin.id, filename) - set_mock_value(fake_eiger.odin.meta_active, "Active") + filename: str = "filename.h5" - def set_capture_rbv_meta_writing_and_detector_state(*args, **kwargs) -> None: + def set_detector_into_writing_state(*args, **kwargs) -> None: # Mimics capturing and immediete completion status on Eiger. - fake_eiger._writer._path_provider.return_value.filename = "filename.h5" - set_mock_value(fake_eiger.odin.capture_rbv, "Capturing") - set_mock_value(fake_eiger.odin.meta_writing, "Writing") - set_mock_value(fake_eiger.odin.meta_file_name, "filename.h5") - set_mock_value(fake_eiger.odin.id, "filename.h5") - set_mock_value(fake_eiger.odin.fan_ready, 1) - set_mock_value(fake_eiger.drv.detector.state, "idle") + set_mock_value(fake_eiger.od.writing, True) + set_mock_value(fake_eiger.od.file_prefix, filename) + set_mock_value(fake_eiger.od.acquisition_id, filename) + set_mock_value(fake_eiger.detector.state, "idle") - callback_on_mock_put(fake_eiger.odin.num_to_capture, set_meta_active) callback_on_mock_put( - fake_eiger.odin.capture, set_capture_rbv_meta_writing_and_detector_state + fake_eiger.od.fp.start_writing, set_detector_into_writing_state ) + # Mimics receiving a frame + def set_frames_written(*args, **kwargs) -> None: + set_mock_value(fake_eiger.od.fp.frames_written, 1) + + callback_on_mock_put(fake_eiger.detector.trigger, set_frames_written) + run_engine( configure_arm_trigger_and_disarm_detector( fake_eiger, eiger_params, trigger_info ) ) - fake_eiger.drv.detector.arm.trigger.assert_called_once() + + arm_mock = get_mock(fake_eiger.arm_when_ready) + disarm_mock = get_mock(fake_eiger.detector.disarm) + + # Eiger was armed + assert len(arm_mock.mock_calls) == 1 # Disarm occurs at the start and end of the plan. - assert len(fake_eiger.drv.detector.disarm.trigger.call_args_list) == 2 + assert len(disarm_mock.mock_calls) == 2 assert ( - await fake_eiger.drv.detector.photon_energy.get_value() + await fake_eiger.detector.photon_energy.get_value() == eiger_params.expected_energy_ev ) From 3417ce15ab982e2e175855ae09100c80c8e9a378 Mon Sep 17 00:00:00 2001 From: Shihab Suliman Date: Tue, 28 Apr 2026 17:18:52 +0100 Subject: [PATCH 24/30] tests: amend pattern generator in conftest --- tests/plans/conftest.py | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/tests/plans/conftest.py b/tests/plans/conftest.py index 8cc97adb658..fc143bbe184 100644 --- a/tests/plans/conftest.py +++ b/tests/plans/conftest.py @@ -4,7 +4,11 @@ import pytest from daq_config_server import ConfigClient -from ophyd_async.core import PathProvider, StandardDetector, init_devices +from ophyd_async.core import ( + PathProvider, + StandardDetector, + init_devices, +) from ophyd_async.sim import PatternGenerator, SimBlobDetector, SimMotor from dodal.devices.beamlines.i03.dcm import DCM @@ -38,24 +42,19 @@ def det(tmp_path: Path, path_provider) -> StandardDetector: class DevNullPatternGenerator(PatternGenerator): def __init__(self, sleep=asyncio.sleep): super().__init__(sleep) - self.n_images = 0 + self._written = 0 def open_file(self, path: PurePath, width: int, height: int): - pass + self._update_images_written(0) - async def write_images_to_file( + def setup_acquisition_parameters( self, exposure: float, period: float, number_of_frames: int ): - self.n_images += number_of_frames - - def generate_point(self, channel: int = 1, high_energy: bool = False) -> float: - return 0.0 - - async def wait_for_next_index(self, timeout: float): - pass + self._number_of_frames = number_of_frames - def get_last_index(self) -> int: - return self.n_images + async def write_images_to_file(self): + self._written += 1 + self._update_images_written(self._written) def close_file(self): pass From ce542edb506b31c6f8995952ff1bdf6ddf5e2011 Mon Sep 17 00:00:00 2001 From: Shihab Suliman Date: Tue, 28 Apr 2026 17:27:13 +0100 Subject: [PATCH 25/30] chore: bump ophyd-async version --- pyproject.toml | 2 +- uv.lock | 13 +++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index db38c7e5891..fd0086c6495 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ description = "Ophyd devices and other utils that could be used across DLS beaml dependencies = [ "click", "ophyd", - "ophyd-async[ca,pva]>=v0.17a1", + "ophyd-async[ca,pva]>=v0.17a4", "bluesky>=1.14.5", "pyepics", "pillow", diff --git a/uv.lock b/uv.lock index 8257da2fe15..47c094689eb 100644 --- a/uv.lock +++ b/uv.lock @@ -768,7 +768,7 @@ requires-dist = [ { name = "numpy" }, { name = "opencv-python-headless" }, { name = "ophyd" }, - { name = "ophyd-async", extras = ["ca", "pva"], specifier = ">=0.17a1" }, + { name = "ophyd-async", extras = ["ca", "pva"], specifier = ">=0.17a4" }, { name = "pillow" }, { name = "pydantic", specifier = ">=2.0" }, { name = "pyepics" }, @@ -1040,6 +1040,7 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/e8/2e1462c8fdbe0f210feb5ac7ad2d9029af8be3bf45bd9fa39765f821642f/greenlet-3.3.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:5fd23b9bc6d37b563211c6abbb1b3cab27db385a4449af5c32e932f93017080c", size = 274974, upload-time = "2026-01-23T15:31:02.891Z" }, { url = "https://files.pythonhosted.org/packages/7e/a8/530a401419a6b302af59f67aaf0b9ba1015855ea7e56c036b5928793c5bd/greenlet-3.3.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09f51496a0bfbaa9d74d36a52d2580d1ef5ed4fdfcff0a73730abfbbbe1403dd", size = 577175, upload-time = "2026-01-23T16:00:56.213Z" }, { url = "https://files.pythonhosted.org/packages/8e/89/7e812bb9c05e1aaef9b597ac1d0962b9021d2c6269354966451e885c4e6b/greenlet-3.3.1-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb0feb07fe6e6a74615ee62a880007d976cf739b6669cce95daa7373d4fc69c5", size = 590401, upload-time = "2026-01-23T16:05:26.365Z" }, + { url = "https://files.pythonhosted.org/packages/70/ae/e2d5f0e59b94a2269b68a629173263fa40b63da32f5c231307c349315871/greenlet-3.3.1-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:67ea3fc73c8cd92f42467a72b75e8f05ed51a0e9b1d15398c913416f2dafd49f", size = 601161, upload-time = "2026-01-23T16:15:53.456Z" }, { url = "https://files.pythonhosted.org/packages/5c/ae/8d472e1f5ac5efe55c563f3eabb38c98a44b832602e12910750a7c025802/greenlet-3.3.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:39eda9ba259cc9801da05351eaa8576e9aa83eb9411e8f0c299e05d712a210f2", size = 590272, upload-time = "2026-01-23T15:32:49.411Z" }, { url = "https://files.pythonhosted.org/packages/a8/51/0fde34bebfcadc833550717eade64e35ec8738e6b097d5d248274a01258b/greenlet-3.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e2e7e882f83149f0a71ac822ebf156d902e7a5d22c9045e3e0d1daf59cee2cc9", size = 1550729, upload-time = "2026-01-23T16:04:20.867Z" }, { url = "https://files.pythonhosted.org/packages/16/c9/2fb47bee83b25b119d5a35d580807bb8b92480a54b68fef009a02945629f/greenlet-3.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80aa4d79eb5564f2e0a6144fcc744b5a37c56c4a92d60920720e99210d88db0f", size = 1615552, upload-time = "2026-01-23T15:33:45.743Z" }, @@ -1048,6 +1049,7 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f9/c8/9d76a66421d1ae24340dfae7e79c313957f6e3195c144d2c73333b5bfe34/greenlet-3.3.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:7e806ca53acf6d15a888405880766ec84721aa4181261cd11a457dfe9a7a4975", size = 276443, upload-time = "2026-01-23T15:30:10.066Z" }, { url = "https://files.pythonhosted.org/packages/81/99/401ff34bb3c032d1f10477d199724f5e5f6fbfb59816ad1455c79c1eb8e7/greenlet-3.3.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d842c94b9155f1c9b3058036c24ffb8ff78b428414a19792b2380be9cecf4f36", size = 597359, upload-time = "2026-01-23T16:00:57.394Z" }, { url = "https://files.pythonhosted.org/packages/2b/bc/4dcc0871ed557792d304f50be0f7487a14e017952ec689effe2180a6ff35/greenlet-3.3.1-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:20fedaadd422fa02695f82093f9a98bad3dab5fcda793c658b945fcde2ab27ba", size = 607805, upload-time = "2026-01-23T16:05:28.068Z" }, + { url = "https://files.pythonhosted.org/packages/3b/cd/7a7ca57588dac3389e97f7c9521cb6641fd8b6602faf1eaa4188384757df/greenlet-3.3.1-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c620051669fd04ac6b60ebc70478210119c56e2d5d5df848baec4312e260e4ca", size = 622363, upload-time = "2026-01-23T16:15:54.754Z" }, { url = "https://files.pythonhosted.org/packages/cf/05/821587cf19e2ce1f2b24945d890b164401e5085f9d09cbd969b0c193cd20/greenlet-3.3.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14194f5f4305800ff329cbf02c5fcc88f01886cadd29941b807668a45f0d2336", size = 609947, upload-time = "2026-01-23T15:32:51.004Z" }, { url = "https://files.pythonhosted.org/packages/a4/52/ee8c46ed9f8babaa93a19e577f26e3d28a519feac6350ed6f25f1afee7e9/greenlet-3.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7b2fe4150a0cf59f847a67db8c155ac36aed89080a6a639e9f16df5d6c6096f1", size = 1567487, upload-time = "2026-01-23T16:04:22.125Z" }, { url = "https://files.pythonhosted.org/packages/8f/7c/456a74f07029597626f3a6db71b273a3632aecb9afafeeca452cfa633197/greenlet-3.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:49f4ad195d45f4a66a0eb9c1ba4832bb380570d361912fa3554746830d332149", size = 1636087, upload-time = "2026-01-23T15:33:47.486Z" }, @@ -1056,6 +1058,7 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/ab/d26750f2b7242c2b90ea2ad71de70cfcd73a948a49513188a0fc0d6fc15a/greenlet-3.3.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:7ab327905cabb0622adca5971e488064e35115430cec2c35a50fd36e72a315b3", size = 275205, upload-time = "2026-01-23T15:30:24.556Z" }, { url = "https://files.pythonhosted.org/packages/10/d3/be7d19e8fad7c5a78eeefb2d896a08cd4643e1e90c605c4be3b46264998f/greenlet-3.3.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:65be2f026ca6a176f88fb935ee23c18333ccea97048076aef4db1ef5bc0713ac", size = 599284, upload-time = "2026-01-23T16:00:58.584Z" }, { url = "https://files.pythonhosted.org/packages/ae/21/fe703aaa056fdb0f17e5afd4b5c80195bbdab701208918938bd15b00d39b/greenlet-3.3.1-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7a3ae05b3d225b4155bda56b072ceb09d05e974bc74be6c3fc15463cf69f33fd", size = 610274, upload-time = "2026-01-23T16:05:29.312Z" }, + { url = "https://files.pythonhosted.org/packages/06/00/95df0b6a935103c0452dad2203f5be8377e551b8466a29650c4c5a5af6cc/greenlet-3.3.1-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:12184c61e5d64268a160226fb4818af4df02cfead8379d7f8b99a56c3a54ff3e", size = 624375, upload-time = "2026-01-23T16:15:55.915Z" }, { url = "https://files.pythonhosted.org/packages/cb/86/5c6ab23bb3c28c21ed6bebad006515cfe08b04613eb105ca0041fecca852/greenlet-3.3.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6423481193bbbe871313de5fd06a082f2649e7ce6e08015d2a76c1e9186ca5b3", size = 612904, upload-time = "2026-01-23T15:32:52.317Z" }, { url = "https://files.pythonhosted.org/packages/c2/f3/7949994264e22639e40718c2daf6f6df5169bf48fb038c008a489ec53a50/greenlet-3.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:33a956fe78bbbda82bfc95e128d61129b32d66bcf0a20a1f0c08aa4839ffa951", size = 1567316, upload-time = "2026-01-23T16:04:23.316Z" }, { url = "https://files.pythonhosted.org/packages/8d/6e/d73c94d13b6465e9f7cd6231c68abde838bb22408596c05d9059830b7872/greenlet-3.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b065d3284be43728dd280f6f9a13990b56470b81be20375a207cdc814a983f2", size = 1636549, upload-time = "2026-01-23T15:33:48.643Z" }, @@ -1064,6 +1067,7 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ae/fb/011c7c717213182caf78084a9bea51c8590b0afda98001f69d9f853a495b/greenlet-3.3.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:bd59acd8529b372775cd0fcbc5f420ae20681c5b045ce25bd453ed8455ab99b5", size = 275737, upload-time = "2026-01-23T15:32:16.889Z" }, { url = "https://files.pythonhosted.org/packages/41/2e/a3a417d620363fdbb08a48b1dd582956a46a61bf8fd27ee8164f9dfe87c2/greenlet-3.3.1-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b31c05dd84ef6871dd47120386aed35323c944d86c3d91a17c4b8d23df62f15b", size = 646422, upload-time = "2026-01-23T16:01:00.354Z" }, { url = "https://files.pythonhosted.org/packages/b4/09/c6c4a0db47defafd2d6bab8ddfe47ad19963b4e30f5bed84d75328059f8c/greenlet-3.3.1-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:02925a0bfffc41e542c70aa14c7eda3593e4d7e274bfcccca1827e6c0875902e", size = 658219, upload-time = "2026-01-23T16:05:30.956Z" }, + { url = "https://files.pythonhosted.org/packages/e2/89/b95f2ddcc5f3c2bc09c8ee8d77be312df7f9e7175703ab780f2014a0e781/greenlet-3.3.1-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3e0f3878ca3a3ff63ab4ea478585942b53df66ddde327b59ecb191b19dbbd62d", size = 671455, upload-time = "2026-01-23T16:15:57.232Z" }, { url = "https://files.pythonhosted.org/packages/80/38/9d42d60dffb04b45f03dbab9430898352dba277758640751dc5cc316c521/greenlet-3.3.1-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34a729e2e4e4ffe9ae2408d5ecaf12f944853f40ad724929b7585bca808a9d6f", size = 660237, upload-time = "2026-01-23T15:32:53.967Z" }, { url = "https://files.pythonhosted.org/packages/96/61/373c30b7197f9e756e4c81ae90a8d55dc3598c17673f91f4d31c3c689c3f/greenlet-3.3.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:aec9ab04e82918e623415947921dea15851b152b822661cce3f8e4393c3df683", size = 1615261, upload-time = "2026-01-23T16:04:25.066Z" }, { url = "https://files.pythonhosted.org/packages/fd/d3/ca534310343f5945316f9451e953dcd89b36fe7a19de652a1dc5a0eeef3f/greenlet-3.3.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:71c767cf281a80d02b6c1bdc41c9468e1f5a494fb11bc8688c360524e273d7b1", size = 1683719, upload-time = "2026-01-23T15:33:50.61Z" }, @@ -1072,6 +1076,7 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/28/24/cbbec49bacdcc9ec652a81d3efef7b59f326697e7edf6ed775a5e08e54c2/greenlet-3.3.1-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:3e63252943c921b90abb035ebe9de832c436401d9c45f262d80e2d06cc659242", size = 282706, upload-time = "2026-01-23T15:33:05.525Z" }, { url = "https://files.pythonhosted.org/packages/86/2e/4f2b9323c144c4fe8842a4e0d92121465485c3c2c5b9e9b30a52e80f523f/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:76e39058e68eb125de10c92524573924e827927df5d3891fbc97bd55764a8774", size = 651209, upload-time = "2026-01-23T16:01:01.517Z" }, { url = "https://files.pythonhosted.org/packages/d9/87/50ca60e515f5bb55a2fbc5f0c9b5b156de7d2fc51a0a69abc9d23914a237/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c9f9d5e7a9310b7a2f416dd13d2e3fd8b42d803968ea580b7c0f322ccb389b97", size = 654300, upload-time = "2026-01-23T16:05:32.199Z" }, + { url = "https://files.pythonhosted.org/packages/7c/25/c51a63f3f463171e09cb586eb64db0861eb06667ab01a7968371a24c4f3b/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b9721549a95db96689458a1e0ae32412ca18776ed004463df3a9299c1b257ab", size = 662574, upload-time = "2026-01-23T16:15:58.364Z" }, { url = "https://files.pythonhosted.org/packages/1d/94/74310866dfa2b73dd08659a3d18762f83985ad3281901ba0ee9a815194fb/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:92497c78adf3ac703b57f1e3813c2d874f27f71a178f9ea5887855da413cd6d2", size = 653842, upload-time = "2026-01-23T15:32:55.671Z" }, { url = "https://files.pythonhosted.org/packages/97/43/8bf0ffa3d498eeee4c58c212a3905dd6146c01c8dc0b0a046481ca29b18c/greenlet-3.3.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ed6b402bc74d6557a705e197d47f9063733091ed6357b3de33619d8a8d93ac53", size = 1614917, upload-time = "2026-01-23T16:04:26.276Z" }, { url = "https://files.pythonhosted.org/packages/89/90/a3be7a5f378fc6e84abe4dcfb2ba32b07786861172e502388b4c90000d1b/greenlet-3.3.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:59913f1e5ada20fde795ba906916aea25d442abcc0593fba7e26c92b7ad76249", size = 1676092, upload-time = "2026-01-23T15:33:52.176Z" }, @@ -2089,7 +2094,7 @@ wheels = [ [[package]] name = "ophyd-async" -version = "0.17a1" +version = "0.17a4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "bluesky" }, @@ -2103,9 +2108,9 @@ dependencies = [ { name = "stamina" }, { name = "velocity-profile" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/da/daa89b14b50d332619cd2144c64b58d49152fb16997b42fb5a1a68567865/ophyd_async-0.17a1.tar.gz", hash = "sha256:2c5677c825e6a096043be3b9a7342838377fae1dc404b4d7bbc1aa82e628fff6", size = 545385, upload-time = "2026-03-05T17:41:25.798Z" } +sdist = { url = "https://files.pythonhosted.org/packages/99/e1/2f0de5ded648fe9ca2e6f7415b9229ecb53226a56e775028410fddf0e7c7/ophyd_async-0.17a4.tar.gz", hash = "sha256:0ac0a890499700f8d6024a272ee616ddf278c42c2a28c3bf2d8ee80b5313a94d", size = 554974, upload-time = "2026-04-28T16:34:04.143Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/d8/a845b2f2f622ab88b521688e58925b9e19cfccb235e2c2b9ad19d83b2214/ophyd_async-0.17a1-py3-none-any.whl", hash = "sha256:4579866f0a957f2c072382aa14c9b4d8dea5dc46e3072516bc17fc7af56a1692", size = 205987, upload-time = "2026-03-05T17:41:24.499Z" }, + { url = "https://files.pythonhosted.org/packages/71/59/a4ca5bd2ef8baa92733260634ba9fa6947fd88c6e7b909511964ac6406e1/ophyd_async-0.17a4-py3-none-any.whl", hash = "sha256:8dcaad6a851883c6906dda998c159a366b50b38310e8de87f385a5794c482792", size = 207790, upload-time = "2026-04-28T16:34:02.379Z" }, ] [package.optional-dependencies] From 6934ce22fc5821518e632b1efc609885e4ebf279 Mon Sep 17 00:00:00 2001 From: Shihab Suliman Date: Tue, 28 Apr 2026 18:14:51 +0100 Subject: [PATCH 26/30] test: amend test assertion and mock tetram --- src/dodal/devices/tetramm.py | 2 +- tests/devices/insertion_device/test_energy.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/dodal/devices/tetramm.py b/src/dodal/devices/tetramm.py index 264516b41be..fcda6b3d149 100644 --- a/src/dodal/devices/tetramm.py +++ b/src/dodal/devices/tetramm.py @@ -187,7 +187,7 @@ async def prepare(self, value: TriggerInfo): and self._arm_logic is not None ): # if freerun turn off first LOGGER.info("Disarming TetrAMM from free run") - await self._arm_logic.disarm() + await self._arm_logic.disarm(on_unstage=False) await super().prepare(value) self._validate_deadtime(value) diff --git a/tests/devices/insertion_device/test_energy.py b/tests/devices/insertion_device/test_energy.py index 3cdddd96e8a..1afd2421eef 100644 --- a/tests/devices/insertion_device/test_energy.py +++ b/tests/devices/insertion_device/test_energy.py @@ -178,7 +178,7 @@ async def test_beam_energy_kickoff_set_correct_delay( mock_pgm.energy.kickoff = AsyncMock() await mock_beam_energy.prepare(fly_info) await mock_beam_energy.kickoff() - mock_sleep.assert_awaited_once_with(pgm_acc_time - id_acc_time) + mock_sleep.assert_called_with(pgm_acc_time - id_acc_time) mock_id_gap.kickoff.assert_awaited_once() mock_pgm.energy.kickoff.assert_awaited_once() From 59782f44ed9d970213053863d43069cfd795a131 Mon Sep 17 00:00:00 2001 From: Shihab Suliman Date: Tue, 28 Apr 2026 18:25:39 +0100 Subject: [PATCH 27/30] tests: relax test shutter type signature --- .../devices/electron_analyser/base/test_detector_logic.py | 8 ++++---- tests/devices/test_tetramm.py | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/devices/electron_analyser/base/test_detector_logic.py b/tests/devices/electron_analyser/base/test_detector_logic.py index dc8623d9eb0..6f38dc50626 100644 --- a/tests/devices/electron_analyser/base/test_detector_logic.py +++ b/tests/devices/electron_analyser/base/test_detector_logic.py @@ -27,7 +27,7 @@ ) from dodal.devices.electron_analyser.specs import SpecsAnalyserDriverIO from dodal.devices.electron_analyser.vgscienta import VGScientaAnalyserDriverIO -from dodal.devices.fast_shutter import FastShutter +from dodal.devices.fast_shutter import GenericFastShutter from dodal.devices.selectable_source import SourceSelector from tests.devices.electron_analyser.helper_util import ( TEST_SEQUENCE_REGION_NAMES, @@ -57,7 +57,7 @@ def driver(request: pytest.FixtureRequest) -> AbstractAnalyserDriverIO: @pytest.fixture(params=["shutter1", "dual_fast_shutter"]) -def shutter(request: pytest.FixtureRequest) -> FastShutter: +def shutter(request: pytest.FixtureRequest) -> GenericFastShutter: return request.getfixturevalue(request.param) @@ -70,9 +70,9 @@ def shutter(request: pytest.FixtureRequest) -> FastShutter: ) async def test_shutter_arm_logic_opens_shutters( driver: AbstractAnalyserDriverIO, - shutter: FastShutter, + shutter: GenericFastShutter, close_shutter_idle: bool, - expected_shutter_calls: Callable[[FastShutter], list[Any]], + expected_shutter_calls: Callable[[GenericFastShutter], list[Any]], ): with init_devices(mock=True): close_shutter_idle_signal = soft_signal_rw(bool, close_shutter_idle) diff --git a/tests/devices/test_tetramm.py b/tests/devices/test_tetramm.py index d9737454e84..b8ba09a05de 100644 --- a/tests/devices/test_tetramm.py +++ b/tests/devices/test_tetramm.py @@ -163,7 +163,7 @@ async def test_disarm_disarms_driver( ) ) assert (await tetramm.driver.acquire.get_value()) == 1 - await tetramm._arm_logic.disarm() # type: ignore + await tetramm._arm_logic.disarm(on_unstage=False) # type: ignore assert (await tetramm.driver.acquire.get_value()) == 0 @@ -280,7 +280,7 @@ async def test_tetramm_controller( assert await tetramm.driver.acquire.get_value() is True - await tetramm._arm_logic.disarm() # type: ignore + await tetramm._arm_logic.disarm(on_unstage=False) # type: ignore assert await tetramm.driver.acquire.get_value() is False @@ -330,5 +330,5 @@ async def test_tetramm_disarm_calls_stop_busy_recording( stop_busy_record_mock: MagicMock, tetramm: TetrammDetector, ): - await tetramm._arm_logic.disarm() # type: ignore + await tetramm._arm_logic.disarm(on_unstage=False) # type: ignore stop_busy_record_mock.assert_called_once() From 5e440624f506b63d848c33291f599155b983cdac Mon Sep 17 00:00:00 2001 From: Shihab Suliman Date: Tue, 28 Apr 2026 19:12:15 +0100 Subject: [PATCH 28/30] tests: add tiff and tetram tests to fix codecov --- tests/beamlines/test_b16.py | 35 +++++++++++++++++++++++++++++++++-- tests/devices/test_tetramm.py | 8 ++++++++ 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/tests/beamlines/test_b16.py b/tests/beamlines/test_b16.py index 72903b7ef33..417c93c8c12 100644 --- a/tests/beamlines/test_b16.py +++ b/tests/beamlines/test_b16.py @@ -1,9 +1,14 @@ -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch +import pytest +from ophyd_async.core import SignalDict from ophyd_async.epics.adcore import ADWriterType from dodal.common.beamlines.device_helpers import CAM_SUFFIX, TIFF_SUFFIX -from dodal.devices.beamlines.b16.detector import software_triggered_tiff_area_detector +from dodal.devices.beamlines.b16.detector import ( + TiffTriggerLogic, + software_triggered_tiff_area_detector, +) def test_software_triggered_tiff_area_detector_calls_with_io_correctly(): @@ -66,3 +71,29 @@ def test_software_triggered_tiff_area_detector_calls_with_io_correctly(): # The function should return the AreaDetector instance assert result is mock_area_detector_instance + + +def test_tiff_trigger_logic_get_deadtime(): + driver = MagicMock() + logic = TiffTriggerLogic(driver, deadtime=0.123) + result = logic.get_deadtime(SignalDict()) + assert result == 0.123 + + +@pytest.mark.asyncio +async def test_tiff_trigger_logic_prepare_internal_calls_prepare_exposures(): + driver = MagicMock() + logic = TiffTriggerLogic(driver, deadtime=0.5) + + with patch( + "dodal.devices.beamlines.b16.detector.prepare_exposures", + new_callable=AsyncMock, + ) as mock_prepare: + await logic.prepare_internal(num=5, livetime=1.0, deadtime=0.2) + + mock_prepare.assert_awaited_once_with( + driver, + 5, + 1.0, + 0.2, + ) diff --git a/tests/devices/test_tetramm.py b/tests/devices/test_tetramm.py index b8ba09a05de..d162570a0f7 100644 --- a/tests/devices/test_tetramm.py +++ b/tests/devices/test_tetramm.py @@ -189,6 +189,14 @@ async def test_prepare_with_too_low_a_deadtime_raises_error( ) +async def test_validate_deadtime_raises_error_if_no_trigger_logic( + tetramm: TetrammDetector, +): + tetramm._trigger_logic = None + with pytest.raises(RuntimeError): + tetramm._validate_deadtime(MagicMock()) + + async def test_prepare_arms_tetramm(tetramm: TetrammDetector): trigger_info = TriggerInfo( number_of_events=5, From 32e05ecf9f64eed8cf3b9293b14b4b446c9bb24a Mon Sep 17 00:00:00 2001 From: Oli Wenman Date: Wed, 29 Apr 2026 13:11:12 +0000 Subject: [PATCH 29/30] Remove commented out code --- src/dodal/beamlines/b21.py | 6 ----- src/dodal/devices/beamlines/i11/mythen.py | 24 ------------------- .../electron_analyser/base/__init__.py | 14 ----------- src/dodal/devices/tetramm.py | 4 ++-- 4 files changed, 2 insertions(+), 46 deletions(-) diff --git a/src/dodal/beamlines/b21.py b/src/dodal/beamlines/b21.py index 18715f8a07e..4c7bb48bf26 100644 --- a/src/dodal/beamlines/b21.py +++ b/src/dodal/beamlines/b21.py @@ -40,9 +40,6 @@ def saxs(path_provider: PathProvider) -> EigerDetector: return EigerDetector( prefix=PREFIX.beamline_prefix, path_provider=path_provider, - # driver_suffix="-EA-EIGER-01:", - # hdf_suffix="-EA-EIGER-01:OD:", - # odin_nodes=1, ) @@ -51,9 +48,6 @@ def waxs(path_provider: PathProvider) -> EigerDetector: return EigerDetector( prefix=PREFIX.beamline_prefix, path_provider=path_provider, - # drv_suffix="-EA-EIGER-02:", - # hdf_suffix="-EA-EIGER-02:OD:", - # odin_nodes=1, ) diff --git a/src/dodal/devices/beamlines/i11/mythen.py b/src/dodal/devices/beamlines/i11/mythen.py index f8fddc0012c..1bf882a38f4 100644 --- a/src/dodal/devices/beamlines/i11/mythen.py +++ b/src/dodal/devices/beamlines/i11/mythen.py @@ -100,30 +100,6 @@ def __init__(self, driver: Mythen3Driver): def get_deadtime(self, config_values) -> float: return _DEADTIMES[_BIT_DEPTH] - # async def prepare(self, trigger_info: TriggerInfo) -> None: - # if (exposure := trigger_info.livetime) is not None: - # await self._driver.acquire_time.set(exposure) - - # if trigger_info.trigger is DetectorTrigger.INTERNAL: - # await self._driver.trigger_mode.set(Mythen3TriggerMode.INTERNAL) - # elif trigger_info.trigger in { - # DetectorTrigger.EXTERNAL_LEVEL, - # DetectorTrigger.EXTERNAL_EDGE, - # DetectorTrigger.EXTERNAL_LEVEL, - # }: - # await self._driver.trigger_mode.set(Mythen3TriggerMode.EXTERNAL) - # else: - # raise ValueError(f"Mythen3 does not support {trigger_info.trigger}") - - # if trigger_info.number_of_exposures == 0: - # image_mode = ADImageMode.CONTINUOUS - # else: - # image_mode = ADImageMode.MULTIPLE - # await asyncio.gather( - # self._driver.num_images.set(trigger_info.number_of_exposures), - # self._driver.image_mode.set(image_mode), - # ) - async def prepare_internal(self, num: int, livetime: float, deadtime: float): if livetime: await self._driver.acquire_time.set(livetime) diff --git a/src/dodal/devices/electron_analyser/base/__init__.py b/src/dodal/devices/electron_analyser/base/__init__.py index e32e1c9e512..45310a1e8fd 100644 --- a/src/dodal/devices/electron_analyser/base/__init__.py +++ b/src/dodal/devices/electron_analyser/base/__init__.py @@ -1,14 +1,6 @@ -# from .base_controller import ( -# ElectronAnalyserController, -# GenericElectronAnalyserController, -# ) from .base_detector import ( - # BaseElectronAnalyserDetector, ElectronAnalyserDetector, - # ElectronAnalyserRegionDetector, GenericElectronAnalyserDetector, - # GenericElectronAnalyserDetector, - # GenericElectronAnalyserRegionDetector, ) from .base_driver_io import ( AbstractAnalyserDriverIO, @@ -30,14 +22,8 @@ from .energy_sources import AbstractEnergySource, DualEnergySource, EnergySource __all__ = [ - # "ElectronAnalyserController", - # "GenericElectronAnalyserController", - # "BaseElectronAnalyserDetector", "ElectronAnalyserDetector", - # "ElectronAnalyserRegionDetector", "GenericElectronAnalyserDetector", - # "GenericElectronAnalyserDetector", - # "GenericElectronAnalyserRegionDetector", "AbstractAnalyserDriverIO", "GenericAnalyserDriverIO", "TAbstractAnalyserDriverIO", diff --git a/src/dodal/devices/tetramm.py b/src/dodal/devices/tetramm.py index fcda6b3d149..e1b2dc33479 100644 --- a/src/dodal/devices/tetramm.py +++ b/src/dodal/devices/tetramm.py @@ -133,8 +133,8 @@ def __init__( self, prefix: str, path_provider: PathProvider, - drv_suffix="DRV:", - fileio_suffix="HDF5:", + drv_suffix: str = "DRV:", + fileio_suffix: str = "HDF5:", plugins: Sequence[NDPluginBaseIO] = [], name: str = "", ): From 26e4fe1cc2161f011391a01ee235d63407727617 Mon Sep 17 00:00:00 2001 From: Dominic Oram Date: Mon, 11 May 2026 16:02:38 +0100 Subject: [PATCH 30/30] Fix eiger PV prefixes --- src/dodal/beamlines/b21.py | 10 ++++++---- src/dodal/beamlines/i03.py | 6 ++++-- src/dodal/beamlines/i11.py | 3 ++- src/dodal/beamlines/i19_2.py | 2 +- 4 files changed, 13 insertions(+), 8 deletions(-) diff --git a/src/dodal/beamlines/b21.py b/src/dodal/beamlines/b21.py index 4c7bb48bf26..b5e2a6340ee 100644 --- a/src/dodal/beamlines/b21.py +++ b/src/dodal/beamlines/b21.py @@ -35,18 +35,20 @@ def path_provider() -> PathProvider: return StaticPathProvider(UUIDFilenameProvider(), Path("/tmp")) -@devices.factory() +# Needs the fastCS Eiger/odin, see https://jira.diamond.ac.uk/browse/B21-330 +@devices.factory(skip=True) def saxs(path_provider: PathProvider) -> EigerDetector: return EigerDetector( - prefix=PREFIX.beamline_prefix, + prefix=f"{PREFIX.beamline_prefix}-EA-EIGER-01:", path_provider=path_provider, ) -@devices.factory() +# Needs the fastCS Eiger/odin, see https://jira.diamond.ac.uk/browse/B21-330 +@devices.factory(skip=True) def waxs(path_provider: PathProvider) -> EigerDetector: return EigerDetector( - prefix=PREFIX.beamline_prefix, + prefix=f"{PREFIX.beamline_prefix}-EA-EIGER-02:", path_provider=path_provider, ) diff --git a/src/dodal/beamlines/i03.py b/src/dodal/beamlines/i03.py index e68b92c7299..4de85cac777 100644 --- a/src/dodal/beamlines/i03.py +++ b/src/dodal/beamlines/i03.py @@ -186,10 +186,12 @@ def eiger(eiger: EigerDetector) -> EigerDetector: return eiger -@devices.factory() +# ophyd-async no longer works with a mixed ADOdin and fastCS Eiger. Need to update the +# beamline to use a fastCS Odin and Eiger +@devices.factory(skip=True) def fastcs_eiger(path_provider: PathProvider) -> FastEiger: return FastEiger( - prefix=PREFIX.beamline_prefix, + prefix=f"{PREFIX.beamline_prefix}-EA-EIGER-02:", path_provider=path_provider, ) diff --git a/src/dodal/beamlines/i11.py b/src/dodal/beamlines/i11.py index 15274a74623..aa791d52af9 100644 --- a/src/dodal/beamlines/i11.py +++ b/src/dodal/beamlines/i11.py @@ -43,7 +43,8 @@ def path_provider() -> PathProvider: return StaticPathProvider(UUIDFilenameProvider(), Path("/tmp")) -@devices.factory() +# Mythen detector state does not match ophyd-async see https://jira.diamond.ac.uk/browse/I11-916 +@devices.factory(skip=True) def mythen3(path_provider: PathProvider) -> Mythen3: """Mythen3 Detector from PSI.""" return Mythen3( diff --git a/src/dodal/beamlines/i19_2.py b/src/dodal/beamlines/i19_2.py index 35016d9629e..d341c764e8a 100644 --- a/src/dodal/beamlines/i19_2.py +++ b/src/dodal/beamlines/i19_2.py @@ -88,7 +88,7 @@ def diffractometer() -> FourCircleDiffractometer: @devices.factory() def eiger(path_provider: PathProvider) -> EigerDetector: return EigerDetector( - prefix=PREFIX.beamline_prefix, + prefix=f"{PREFIX.beamline_prefix}-EA-EIGER-01:", path_provider=path_provider, )