DMSC Integration Testing

Last updated: February 03, 2026 08:25:49

Test: nexusfiles-scipp|dream|can_compute_tof|monitor_cave

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00017187.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:479: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:406: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:119: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:755: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:10460, frame_time:714, ]
Coordinates:
* frame_time int32 ... float64 [au] (time, frame_time) [30, 30, ..., 30, 30] [30, 30, ..., 30, 30]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:10460, frame_time:714, ]
Coordinates:
* frame_time int32 ... float64 [au] (time, frame_time) [30, 30, ..., 30, 30] [30, 30, ..., 30, 30]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00017047.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:755: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:6937, frame_time:714, ]
Coordinates:
* frame_time int32 ... float64 [au] (time, frame_time) [30, 30, ..., 30, 30] [30, 30, ..., 30, 30]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:6937, frame_time:714, ]
Coordinates:
* frame_time int32 ... float64 [au] (time, frame_time) [30, 30, ..., 30, 30] [30, 30, ..., 30, 30]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00016900.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:755: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:6333, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:6333, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00016760.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:755: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:7486, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:7486, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00016627.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:755: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:6875, frame_time:714, ]
Coordinates:
* frame_time int32 ... float64 [au] (time, frame_time) [30, 30, ..., 30, 30] [30, 30, ..., 30, 30]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:6875, frame_time:714, ]
Coordinates:
* frame_time int32 ... float64 [au] (time, frame_time) [30, 30, ..., 30, 30] [30, 30, ..., 30, 30]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00016480.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:755: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:10131, frame_time:714, ]
Coordinates:
* frame_time int32 ... float64 [au] (time, frame_time) [30, 30, ..., 30, 30] [30, 30, ..., 30, 30]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:10131, frame_time:714, ]
Coordinates:
* frame_time int32 ... float64 [au] (time, frame_time) [30, 30, ..., 30, 30] [30, 30, ..., 30, 30]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00016340.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:755: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:8955, frame_time:714, ]
Coordinates:
* frame_time int32 ... float64 [au] (time, frame_time) [30, 30, ..., 30, 30] [30, 30, ..., 30, 30]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:8955, frame_time:714, ]
Coordinates:
* frame_time int32 ... float64 [au] (time, frame_time) [30, 30, ..., 30, 30] [30, 30, ..., 30, 30]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00016200.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:7848, frame_time:714, ]
Coordinates:
* frame_time int32 ... float64 [au] (time, frame_time) [30, 30, ..., 30, 30] [30, 30, ..., 30, 30]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:7848, frame_time:714, ]
Coordinates:
* frame_time int32 ... float64 [au] (time, frame_time) [30, 30, ..., 30, 30] [30, 30, ..., 30, 30]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00016060.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:17936, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [0, 0, ..., 30, 30] [0, 0, ..., 30, 30]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:17936, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [0, 0, ..., 30, 30] [0, 0, ..., 30, 30]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00015920.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:15438, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [0, 0, ..., 30, 30] [0, 0, ..., 30, 30]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:15438, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [0, 0, ..., 30, 30] [0, 0, ..., 30, 30]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00015780.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:13806, frame_time:714, ]
Coordinates:
* frame_time int32 ...
Data:
float64 [au] (time, frame_time) [0, 0, ..., 0, 0] [0, 0, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:13806, frame_time:714, ]
Coordinates:
* frame_time int32 ...
Data:
float64 [au] (time, frame_time) [0, 0, ..., 0, 0] [0, 0, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00015640.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:5084, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:5084, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00015507.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:7113, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:7113, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00014868.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00014868.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00014868.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00014868.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00014868.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00014868.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00014868.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00014868.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00014868.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00014868.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00014868.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00014868.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00014868.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:5601, frame_time:714, ]
Coordinates:
* frame_time int32 ...a:
float64 [au] (time, frame_time) [30, 30, ..., 0, 0] [30, 30, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError

workflow =
coda_nexus_file_path = PosixPath('/ess/data/coda/999999/raw/coda_dream_999999_00013756.hdf')
monitor_type = ess.reduce.nexus.types.CaveMonitor

@pytest.mark.parametrize(
"monitor_type", [BunkerMonitor, CaveMonitor], ids=["bunker", "cave"]
)
def test_can_compute_tof__monitor_(
workflow: sciline.Pipeline, coda_nexus_file_path: Path, monitor_type: type
) -> None:
workflow[Filename[SampleRun]] = coda_nexus_file_path
> result = workflow.compute(TofMonitor[SampleRun, monitor_type])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

tests/nexusfiles-scipp/dream/dream_reduction_test.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/pipeline.py:191: in compute
return self.get(tp, **kwargs).compute(reporter=reporter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/task_graph.py:122: in compute
return self._scheduler.get(self._graph, [targets], reporter=reporter)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/sciline/scheduler.py:119: in get
return self._dask_get(dsk, list(map(_to_dask_key, keys)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/threaded.py:91: in get
results = get_async(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:549: in get_async
raise_exception(exc, tb)
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:353: in reraise
raise exc
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/local.py:258: in execute_task
result = task(data)
^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/_task_spec.py:759: in __call__
return self.func(*new_argspec)
^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/dask/utils.py:80: in apply
return func(*args)
^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:473: in monitor_time_of_flight_data
_compute_tof_data(
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:403: in _compute_tof_data
data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/ess/reduce/time_of_flight/eto_to_tof.py:116: in _time_of_flight_data_histogram
rebinned = da.rebin({key: new_bins})
^^^^^^^^^^^^^^^^^^^^^^^^^
.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/data_group.py:744: in impl
return func(data, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

x =
Dimensions: Sizes[time:4944, frame_time:714, ]
Coordinates:
* frame_time int32 ...
Data:
float64 [au] (time, frame_time) [0, 0, ..., 0, 0] [0, 0, ..., 0, 0]


arg_dict = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
kwargs = {}
edges = {'frame_time': (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]}
out =
Dimensions: Sizes[time:4944, frame_time:714, ]
Coordinates:
* frame_time int32 ...
Data:
float64 [au] (time, frame_time) [0, 0, ..., 0, 0] [0, 0, ..., 0, 0]


dim = 'frame_time'
edge = (frame_time: 716) float64 [µs] [0, 0, ..., 7.12285e+07, 7.13285e+07]

@data_group_overload
def rebin(
x: Variable | DataArray | Dataset | DataGroup[Any],
arg_dict: IntoStrDict[SupportsIndex | Variable] | None = None,
/,
**kwargs: SupportsIndex | Variable,
) -> Variable | DataArray | Dataset | DataGroup[Any]:
"""Rebin a data array or dataset.

The coordinate of the input for the dimension to be rebinned must contain bin edges,
i.e., the data must be histogrammed.

If the input has masks that contain the dimension being rebinned then those
masks are applied to the data before rebinning. That is, masked values are treated
as zero.

Parameters
----------
x:
Data to rebin.
arg_dict:
Dictionary mapping dimension labels to binning parameters.
**kwargs:
Mapping of dimension label to corresponding binning parameters.

Returns
-------
:
Data rebinned according to the new bin edges.

See Also
--------
scipp.bin:
For changing the binning of binned (as opposed to dense, histogrammed) data.
scipp.hist:
For histogramming data.

Examples
--------

Rebin a data array along one of its dimensions, specifying (1) number of bins, (2)
bin width, or (3) actual binning:

>>> from numpy.random import default_rng
>>> rng = default_rng(seed=1234)
>>> x = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> y = sc.array(dims=['row'], unit='m', values=rng.random(100))
>>> data = sc.ones(dims=['row'], unit='K', shape=[100])
>>> table = sc.DataArray(data=data, coords={'x': x, 'y': y})
>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=2).sizes
{'x': 2, 'y': 100}

>>> da.rebin(x=sc.scalar(0.2, unit='m')).sizes
{'x': 5, 'y': 100}

>>> da.rebin(x=sc.linspace('x', 0.2, 0.8, num=10, unit='m')).sizes
{'x': 9, 'y': 100}

Rebin a data array along two of its dimensions:

>>> da = table.hist(x=100, y=100)
>>> da.rebin(x=4, y=6).sizes
{'x': 4, 'y': 6}
"""
if isinstance(x, DataGroup):
# Only to make mypy happy because we have `DataGroup` in annotation of `x`
# so that Sphinx shows it.
raise TypeError("Internal error: input should not be a DataGroup")
edges = _make_edges(x, arg_dict, kwargs)
out = x
for dim, edge in edges.items():
> out = _cpp.rebin(out, dim, edge)
^^^^^^^^^^^^^^^^^^^^^^^^^^
E scipp._scipp.core.BinEdgeError: The input does not have coordinates with bin-edges.

.tox/nexusfiles-scipp-dream/lib/python3.12/site-packages/scipp/core/binning.py:971: BinEdgeError