None
Test: nexusfiles-scipp|tbl|can_compute_tof|multiblade_detector
None
None
None
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00010134.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:423: in assemble_detector_data
neutron_data = nexus.group_event_data(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
def group_event_data(
*, event_data: sc.DataArray, detector_number: sc.Variable
) -> sc.DataArray:
"""Group event data by detector number.
The detector_number variable also defines the output shape and dimension names.
Parameters
----------
event_data:
Data array with events to group, as returned from :py:func:`load_event_data`.
detector_number:
Variable with detector numbers matching the `event_id` field of the event data.
Returns
-------
:
Data array with events grouped by detector number.
"""
event_id = detector_number.flatten(to='event_id').copy()
constituents = event_data.bins.constituents
begin = constituents['begin']
end = constituents['end']
data = constituents['data'].copy(deep=False)
if 'event_time_zero' in event_data.coords:
data.coords['event_time_zero'] = sc.bins_like(
event_data, fill_value=event_data.coords['event_time_zero']
).bins.constituents['data']
# After loading raw NXevent_data it is guaranteed that the event table
# is contiguous and that there is no masking. We can therefore use the
# more efficient approach of binning from scratch instead of erasing the
# 'event_time_zero' binning defined by NXevent_data. This sanity check should
# therefore always pass unless some unusual modifications were performed.
if (
event_data.masks
> or begin[0] != sc.index(0)
^^^^^^^^
or end[-1] != sc.index(data.sizes[data.dim])
or (begin[1:] != end[:-1]).any()
):
E IndexError: The requested index 0 is out of range. Dimension size is 0 and the allowed range is [0:-1].
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/_nexus_loader.py:544: IndexError
None
None
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00009714.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:423: in assemble_detector_data
neutron_data = nexus.group_event_data(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
def group_event_data(
*, event_data: sc.DataArray, detector_number: sc.Variable
) -> sc.DataArray:
"""Group event data by detector number.
The detector_number variable also defines the output shape and dimension names.
Parameters
----------
event_data:
Data array with events to group, as returned from :py:func:`load_event_data`.
detector_number:
Variable with detector numbers matching the `event_id` field of the event data.
Returns
-------
:
Data array with events grouped by detector number.
"""
event_id = detector_number.flatten(to='event_id').copy()
constituents = event_data.bins.constituents
begin = constituents['begin']
end = constituents['end']
data = constituents['data'].copy(deep=False)
if 'event_time_zero' in event_data.coords:
data.coords['event_time_zero'] = sc.bins_like(
event_data, fill_value=event_data.coords['event_time_zero']
).bins.constituents['data']
# After loading raw NXevent_data it is guaranteed that the event table
# is contiguous and that there is no masking. We can therefore use the
# more efficient approach of binning from scratch instead of erasing the
# 'event_time_zero' binning defined by NXevent_data. This sanity check should
# therefore always pass unless some unusual modifications were performed.
if (
event_data.masks
> or begin[0] != sc.index(0)
^^^^^^^^
or end[-1] != sc.index(data.sizes[data.dim])
or (begin[1:] != end[:-1]).any()
):
E IndexError: The requested index 0 is out of range. Dimension size is 0 and the allowed range is [0:-1].
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/_nexus_loader.py:544: IndexError
None
None
None
None
None
None
None
None
None
None
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00008167.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00007887.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00007747.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00007614.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00007474.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00007334.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00007194.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00007061.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00006928.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00006781.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00006648.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00006508.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00006368.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00006221.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00006081.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/tbl_999999_00005941.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/tbl_999999_00005790.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/tbl_999999_00005658.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/tbl_999999_00005525.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/tbl_999999_00005385.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/tbl_999999_00005245.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/tbl_999999_00005105.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/tbl_999999_00004958.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/tbl_999999_00004678.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(DetectorTofData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/tbl_999999_00004538.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(DetectorTofData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/tbl_999999_00004391.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(DetectorTofData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/tbl_999999_00004251.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(DetectorTofData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/tbl_999999_00004104.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(DetectorTofData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/tbl_999999_00003964.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(DetectorTofData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError
workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/2025/999999/raw/tbl_999999_00003892.hdf')
detector_name = 'multiblade_detector'
@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()
> result = workflow.compute(DetectorTofData[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:42:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:340: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []
def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:294: ValueError