DMSC Integration Testing

Last updated: February 03, 2026 08:25:49

Test: nexusfiles-scipp|tbl|tbl_read_detector_everything|he3_detector_bank0

None

None

None

None

None

None

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00016338.hdf')
detector_name = 'he3_detector_bank0'
check_everything_events =

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_tbl_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name

# Read the event data
> result = workflow.compute(RawDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_load_nexus_test.py:130:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:423: in assemble_detector_data
neutron_data = nexus.group_event_data(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

def group_event_data(
*, event_data: sc.DataArray, detector_number: sc.Variable
) -> sc.DataArray:
"""Group event data by detector number.

The detector_number variable also defines the output shape and dimension names.

Parameters
----------
event_data:
Data array with events to group, as returned from :py:func:`load_event_data`.
detector_number:
Variable with detector numbers matching the `event_id` field of the event data.

Returns
-------
:
Data array with events grouped by detector number.
"""
event_id = detector_number.flatten(to='event_id').copy()
constituents = event_data.bins.constituents
begin = constituents['begin']
end = constituents['end']
data = constituents['data'].copy(deep=False)
if 'event_time_zero' in event_data.coords:
data.coords['event_time_zero'] = sc.bins_like(
event_data, fill_value=event_data.coords['event_time_zero']
).bins.constituents['data']
# After loading raw NXevent_data it is guaranteed that the event table
# is contiguous and that there is no masking. We can therefore use the
# more efficient approach of binning from scratch instead of erasing the
# 'event_time_zero' binning defined by NXevent_data. This sanity check should
# therefore always pass unless some unusual modifications were performed.
if (
event_data.masks
> or begin[0] != sc.index(0)
^^^^^^^^
or end[-1] != sc.index(data.sizes[data.dim])
or (begin[1:] != end[:-1]).any()
):
E IndexError: The requested index 0 is out of range. Dimension size is 0 and the allowed range is [0:-1].

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/_nexus_loader.py:544: IndexError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00016198.hdf')
detector_name = 'he3_detector_bank0'
check_everything_events =

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_tbl_read_detector_everything(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
check_everything_events: Callable,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name

# Read the event data
> result = workflow.compute(RawDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_load_nexus_test.py:130:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:423: in assemble_detector_data
neutron_data = nexus.group_event_data(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

def group_event_data(
*, event_data: sc.DataArray, detector_number: sc.Variable
) -> sc.DataArray:
"""Group event data by detector number.

The detector_number variable also defines the output shape and dimension names.

Parameters
----------
event_data:
Data array with events to group, as returned from :py:func:`load_event_data`.
detector_number:
Variable with detector numbers matching the `event_id` field of the event data.

Returns
-------
:
Data array with events grouped by detector number.
"""
event_id = detector_number.flatten(to='event_id').copy()
constituents = event_data.bins.constituents
begin = constituents['begin']
end = constituents['end']
data = constituents['data'].copy(deep=False)
if 'event_time_zero' in event_data.coords:
data.coords['event_time_zero'] = sc.bins_like(
event_data, fill_value=event_data.coords['event_time_zero']
).bins.constituents['data']
# After loading raw NXevent_data it is guaranteed that the event table
# is contiguous and that there is no masking. We can therefore use the
# more efficient approach of binning from scratch instead of erasing the
# 'event_time_zero' binning defined by NXevent_data. This sanity check should
# therefore always pass unless some unusual modifications were performed.
if (
event_data.masks
> or begin[0] != sc.index(0)
^^^^^^^^
or end[-1] != sc.index(data.sizes[data.dim])
or (begin[1:] != end[:-1]).any()
):
E IndexError: The requested index 0 is out of range. Dimension size is 0 and the allowed range is [0:-1].

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/_nexus_loader.py:544: IndexError

None

None

None

None

None

None

None

None

None

None

None

None

None

None

None

None

None

None

item =

def pytest_runtest_call(item) -> None:
coda_file = getattr(item, "_coda_file", None)
if coda_file is not None:
> item.record_property("coda_file", coda_file)
^^^^^^^^^^^^^^^^^^^^
E AttributeError: 'Function' object has no attribute 'record_property'

tests/conftest.py:17: AttributeError

System Out:

--------------------------------- Captured Log ---------------------------------

request = >
_session_coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00014866.hdf')

@pytest.fixture
def coda_nexus_file_path(request, _session_coda_nexus_file_path: Path) -> Path:
> request.node.record_property(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"coda_file",
str(_session_coda_nexus_file_path),
)
E AttributeError: 'Function' object has no attribute 'record_property'

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dmsc_nightly/testing/nexusfiles/setup_fixtures.py:46: AttributeError

System Out:

--------------------------------- Captured Log ---------------------------------

None

None

None

None

None

None

None

None

None

None

None

None

None

None

None

None

None

None