DMSC Integration Testing

Last updated: March 23, 2026 07:04:33

Test: nexusfiles-scipp|tbl|can_compute_tof|ngem_detector

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00023345.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00023205.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00023065.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00022918.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00022777.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00022637.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00022464.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00022317.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00022177.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00022030.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00021890.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00021757.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00021477.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00021337.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00021197.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00021064.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self =
keys = ess.reduce.time_of_flight.types.TofDetector[ess.imaging.types.SampleRun]

def get(
self,
keys: type | Iterable[type] | "UnionType" | str, # noqa: UP037 (needed by Sphinx)
*,
scheduler: Scheduler | None = None,
handler: ErrorHandler | None = None,
max_depth: int = 4,
) -> TaskGraph:
"""
Return a TaskGraph for the given keys.

Parameters
----------
keys:
Type to compute the result for.
Can be a single type or an iterable of types.
scheduler:
Optional scheduler to use for computing the result. If not given, a
:py:class:`NaiveScheduler` is used if `dask` is not installed,
otherwise dask's threaded scheduler is used.
handler:
Handler for unsatisfied requirements. If not provided,
:py:class:`HandleAsBuildTimeException` is used, which raises an exception.
During development and debugging it can be helpful to use a handler that
raises an exception only when the graph is computed. This can be achieved
by passing :py:class:`HandleAsComputeTimeException` as the handler.
max_depth:
Maximum depth to show in the dependency tree when reporting errors.
"""
if multi := _is_multiple_keys(keys):
targets = tuple(keys) # type: ignore[arg-type]
else:
targets = (keys,)
try:
graph = to_task_graph(self, targets=targets, handler=handler) # type: ignore[arg-type]
except UnsatisfiedRequirement as e:
missing = e.args[1]
nx_graph = self.underlying_graph
if missing in nx_graph:
paths = _find_paths_to_targets(nx_graph, missing, targets)
info = _format_paths_msg(nx_graph, paths)
else:
nodes = ", ".join(map(key_name, nx_graph.nodes))
info = f'{e} Requested node not in graph. Did you mean one of: {nodes}?'
# Not raising `from e` because that includes noisy traceback of internals,
# which are not relevant to the user.
> raise type(e)(f'{info}\n\n') from None
E sciline.handler.UnsatisfiedRequirement: Missing input node 'LookupTableRelativeErrorThreshold'. Affects requested targets (via providers given in parentheses):
E 1. LookupTableRelativeErrorThreshold → (ess.reduce.time_of_flight.eto_to_tof.mask_large_uncertainty_in_lut_detector) → ErrorLimitedTofLookupTable[NXdetector] → (ess.reduce.time_of_flight.eto_to_tof.detector_time_of_flight_data) → TofDetector[SampleRun]

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:281: UnsatisfiedRequirement

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00018234.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []



def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00018234.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []



def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00017765.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []



def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00017612.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []



def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00017472.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []



def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00017332.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []



def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00017185.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []



def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError

View job log here


workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_tbl_999999_00017045.hdf')
detector_name = 'ngem_detector'

@pytest.mark.parametrize(
"detector_name",
[
"he3_detector_bank0",
"he3_detector_bank1",
"multiblade_detector",
"ngem_detector",
"timepix3_detector",
],
)
def test_can_compute_tof(
workflow: sciline.Pipeline,
coda_nexus_file_path: Path,
detector_name: str,
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
workflow[NeXusDetectorName] = detector_name
workflow[TimeOfFlightLookupTableFilename] = tbl_tof_lookup_table_no_choppers()

> result = workflow.compute(TofDetector[SampleRun])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/tbl/tbl_reduction_test.py:45:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:338: in to_transformation
t.value = _time_filter(t.value['time', interval.value])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

transform =
Dimensions: Sizes[time:0, ]
Coordinates:
* average_value float64 [deg] () 0... datetime64 [ns] (time) []
Data:
float64 [deg] (time) []



def _time_filter(transform: sc.DataArray) -> sc.Variable:
if transform.ndim == 0 or transform.sizes == {'time': 1}:
return transform.data.squeeze()
> raise ValueError(
f"Transform is time-dependent: {transform}, but no filter is provided."
)
E ValueError: Transform is time-dependent:
E Dimensions: Sizes[time:0, ]
E Coordinates:
E * average_value float64 [deg] () 0
E * maximum_value float64 [deg] () 0
E * minimum_value float64 [deg] () 0
E * time datetime64 [ns] (time) []
E Data:
E float64 [deg] (time) []
E
E , but no filter is provided.

.tox/nexusfiles-scipp-tbl/lib/python3.12/site-packages/ess/reduce/nexus/workflow.py:292: ValueError