workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00004396.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
Test: nexusfiles-scipp|nmx|nmx_read_detector_everything|detector_panel_1
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00004249.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00004109.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00003962.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00003890.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00003750.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00003603.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00003463.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00003323.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00003183.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00003043.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00002903.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00002763.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00002623.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00002483.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00002336.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00002196.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00002056.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00001916.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00001292.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00001215.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00001075.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00000935.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00000858.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00000851.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00000375.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00000228.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00000088.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/977695/raw/nmx_977695_00086384.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00085904.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00085904.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00085904.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00085904.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00085806.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00085673.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00085526.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00085393.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00085253.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00085113.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/nmx_999999_00084970.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/999999_00084826.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/999999_00084690.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/999999_00084550.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/999999_00084401.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/999999_00084261.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/999999_00084128.hdf')
detector_name = 'detector_panel_1'
check_everything_events =
@pytest.mark.parametrize("detector_name", [f"detector_panel_{i}" for i in range(3)])
def test_nmx_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
> result = workflow.compute(DetectorData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/nmx/nmx_load_nexus_test.py:82:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-nmx/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError