Skip to content

Subtract sample run by empty can #144

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 6 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions src/ess/dream/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# Copyright (c) 2023 Scipp contributors (https://github.com/scipp)
"""Data for tests and documentation with DREAM."""

_version = "1"
_version = "2"

__all__ = ["get_path"]

Expand All @@ -21,15 +21,15 @@ def _make_pooch():
"DREAM_nexus_sorted-2023-12-07.nxs": "md5:22824e14f6eb950d24a720b2a0e2cb66",
"DREAM_simple_pwd_workflow/data_dream_diamond_vana_container_sample_union.csv.zip": "md5:33302d0506b36aab74003b8aed4664cc", # noqa: E501
"DREAM_simple_pwd_workflow/data_dream_diamond_vana_container_sample_union_run2.csv.zip": "md5:c7758682f978d162dcb91e47c79abb83", # noqa: E501
"DREAM_simple_pwd_workflow/data_dream_vana_container_sample_union.csv.zip": "md5:1e22917b2bb68b5cacfb506b72700a4d", # noqa: E501
"DREAM_simple_pwd_workflow/data_dream_vana_container_sample_union.csv.zip": "md5:de31cb8b1887d10c42091d040aaa94ef", # noqa: E501
"DREAM_simple_pwd_workflow/data_dream_vanadium.csv.zip": "md5:e5addfc06768140c76533946433fa2ec", # noqa: E501
"DREAM_simple_pwd_workflow/data_dream_vanadium_inc_coh.csv.zip": "md5:39d1a44e248b12966b26f7c2f6c602a2", # noqa: E501
"DREAM_simple_pwd_workflow/Cave_TOF_Monitor_diam_in_can.dat": "md5:ef24f4a4186c628574046e6629e31611", # noqa: E501
"DREAM_simple_pwd_workflow/Cave_TOF_Monitor_van_can.dat": "md5:e63456c347fb36a362a0b5ae2556b3cf", # noqa: E501
"DREAM_simple_pwd_workflow/Cave_TOF_Monitor_van_can.dat": "md5:2cdef7ad9912652149b7e687381d2e99", # noqa: E501
"DREAM_simple_pwd_workflow/Cave_TOF_Monitor_vana_inc_coh.dat": "md5:701d66792f20eb283a4ce76bae0c8f8f", # noqa: E501
"DREAM-high-flux-tof-lookup-table.h5": "md5:404145a970ed1188e524cba10194610e", # noqa: E501
"DREAM-high-flux-tof-lookup-table.h5": "md5:1b95a359fa7b0d8b4277806ece9bf279", # noqa: E501
# Smaller files for unit tests
"DREAM_simple_pwd_workflow/TEST_data_dream_diamond_vana_container_sample_union.csv.zip": "md5:018a87e0934c1dd0f07a708e9d497891", # noqa: E501
"DREAM_simple_pwd_workflow/TEST_data_dream_diamond_vana_container_sample_union.csv.zip": "md5:83638c70a445f45816aee67f43d7f749", # noqa: E501
"DREAM_simple_pwd_workflow/TEST_data_dream_vana_container_sample_union.csv.zip": "md5:6b4b6c3a7358cdb1dc5a36b56291ab1b", # noqa: E501
"DREAM_simple_pwd_workflow/TEST_data_dream_vanadium.csv.zip": "md5:178f9bea9f35dbdef693e38ff893c258", # noqa: E501
"TEST_data_dream0_new_hkl_Si_pwd.csv.zip": "md5:df6c41f4b7b21e129915808f625828f6", # noqa: E501
Expand Down Expand Up @@ -189,8 +189,8 @@ def simulated_empty_can(*, small: bool = False) -> str:

SciCat:

- PID: ``20.500.12269/1a280698-aa5a-4cfb-bc4f-68fcd40462cc``
- URL: https://staging.scicat.ess.eu/datasets/20.500.12269%2F1a280698-aa5a-4cfb-bc4f-68fcd40462cc
- PID: ``20.500.12269/bdaf009d-b09a-4622-bc78-46240873b3a0``
- URL: https://staging.scicat.ess.eu/datasets/20.500.12269%2Fbdaf009d-b09a-4622-bc78-46240873b3a0

**Container**:

Expand Down
52 changes: 9 additions & 43 deletions src/ess/dream/io/geant4.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,23 @@
import numpy as np
import sciline
import scipp as sc
import scippneutron as scn
import scippnexus as snx
from scippneutron.metadata import ESS_SOURCE

from ess.powder.types import (
BackgroundRun,
Beamline,
CalibratedBeamline,
CalibratedDetector,
CalibratedMonitor,
CalibrationData,
CalibrationFilename,
CaveMonitor,
CaveMonitorPosition,
DetectorData,
DetectorLtotal,
Filename,
MonitorData,
MonitorFilename,
MonitorLtotal,
MonitorType,
NeXusComponent,
NeXusDetectorName,
Expand All @@ -31,8 +30,7 @@
Source,
VanadiumRun,
)
from ess.reduce.nexus.types import CalibratedBeamline
from ess.reduce.nexus.workflow import GenericNeXusWorkflow
from ess.reduce.time_of_flight.workflow import GenericTofWorkflow

MANTLE_DETECTOR_ID = sc.index(7)
HIGH_RES_DETECTOR_ID = sc.index(8)
Expand Down Expand Up @@ -276,30 +274,22 @@ def assemble_detector_data(
out.bins.coords['event_time_offset'] = out.bins.coords['tof'] % period.to(
unit=detector.bins.coords['tof'].bins.unit
)
graph = scn.conversion.graph.beamline.beamline(scatter=True)
return DetectorData[RunType](
out.bins.drop_coords('tof').transform_coords(
"Ltotal", graph=graph, keep_intermediate=True
)
)
return DetectorData[RunType](out.bins.drop_coords('tof'))


def assemble_monitor_data(
monitor: CalibratedMonitor[RunType, MonitorType],
) -> MonitorData[RunType, MonitorType]:
"""
Dummy assembly of monitor data, monitor already contains neutron data.
We simply add a Ltotal coordinate necessary to calculate the time-of-flight.
Dummy assembly of monitor data, monitor already contains neutron data with all
necessary coordinates.

Parameters
----------
monitor:
The calibrated monitor data.
"""
graph = scn.conversion.graph.beamline.beamline(scatter=False)
return MonitorData[RunType, MonitorType](
monitor.transform_coords("Ltotal", graph=graph)
)
return MonitorData[RunType, MonitorType](monitor)


def dummy_source_position() -> Position[snx.NXsource, RunType]:
Expand All @@ -314,28 +304,6 @@ def dummy_sample_position() -> Position[snx.NXsample, RunType]:
)


def extract_detector_ltotal(detector: DetectorData[RunType]) -> DetectorLtotal[RunType]:
"""
Extract Ltotal from the detector data.
TODO: This is a temporary implementation. We should instead read the positions
separately from the event data, so we don't need to re-load the positions every time
new events come in while streaming live data.
"""
return DetectorLtotal[RunType](detector.coords["Ltotal"])


def extract_monitor_ltotal(
monitor: MonitorData[RunType, MonitorType],
) -> MonitorLtotal[RunType, MonitorType]:
"""
Extract Ltotal from the monitor data.
TODO: This is a temporary implementation. We should instead read the positions
separately from the event data, so we don't need to re-load the positions every time
new events come in while streaming live data.
"""
return MonitorLtotal[RunType, MonitorType](monitor.coords["Ltotal"])


def dream_beamline() -> Beamline:
return Beamline(
name="DREAM",
Expand All @@ -352,8 +320,8 @@ def LoadGeant4Workflow() -> sciline.Pipeline:
"""
Workflow for loading NeXus data.
"""
wf = GenericNeXusWorkflow(
run_types=[SampleRun, VanadiumRun], monitor_types=[CaveMonitor]
wf = GenericTofWorkflow(
run_types=[SampleRun, VanadiumRun, BackgroundRun], monitor_types=[CaveMonitor]
)
wf.insert(extract_geant4_detector)
wf.insert(load_geant4_csv)
Expand All @@ -364,8 +332,6 @@ def LoadGeant4Workflow() -> sciline.Pipeline:
wf.insert(assemble_monitor_data)
wf.insert(dummy_source_position)
wf.insert(dummy_sample_position)
wf.insert(extract_detector_ltotal)
wf.insert(extract_monitor_ltotal)
wf.insert(dream_beamline)
wf.insert(ess_source)
return wf
6 changes: 5 additions & 1 deletion src/ess/dream/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
)
from ess.powder.types import (
AccumulatedProtonCharge,
BackgroundRun,
CaveMonitorPosition, # Should this be a DREAM-only parameter?
PixelMaskFilename,
Position,
Expand Down Expand Up @@ -69,10 +70,13 @@ def default_parameters() -> dict:
return {
Position[snx.NXsample, SampleRun]: sample_position,
Position[snx.NXsample, VanadiumRun]: sample_position,
Position[snx.NXsample, BackgroundRun]: sample_position,
Position[snx.NXsource, SampleRun]: source_position,
Position[snx.NXsource, VanadiumRun]: source_position,
Position[snx.NXsource, BackgroundRun]: source_position,
AccumulatedProtonCharge[SampleRun]: charge,
AccumulatedProtonCharge[VanadiumRun]: charge,
AccumulatedProtonCharge[BackgroundRun]: charge,
TofMask: None,
WavelengthMask: None,
TwoThetaMask: None,
Expand All @@ -85,7 +89,7 @@ def default_parameters() -> dict:
def _collect_reducer_software() -> ReducerSoftwares:
return ReducerSoftwares(
[
Software.from_package_metadata('essdiffraction'),
# Software.from_package_metadata('essdiffraction'),
Software.from_package_metadata('scippneutron'),
Software.from_package_metadata('scipp'),
]
Expand Down
89 changes: 2 additions & 87 deletions src/ess/powder/conversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,42 +4,26 @@
Coordinate transformations for powder diffraction.
"""

import sciline as sl
import scipp as sc
import scippneutron as scn

from ess.reduce import time_of_flight

from .calibration import OutputCalibrationData
from .correction import merge_calibration
from .logging import get_logger
from .types import (
CalibrationData,
DataWithScatteringCoordinates,
DetectorData,
DetectorLtotal,
DistanceResolution,
DspacingData,
ElasticCoordTransformGraph,
FilteredData,
IofDspacing,
IofTof,
LookupTableRelativeErrorThreshold,
LtotalRange,
MaskedData,
MonitorData,
MonitorLtotal,
MonitorTofData,
MonitorType,
PulsePeriod,
PulseStride,
PulseStrideOffset,
RunType,
SimulationResults,
TimeOfFlightLookupTable,
TimeOfFlightLookupTableFilename,
TimeResolution,
TofData,
TofMonitorData,
WavelengthMonitor,
)

Expand Down Expand Up @@ -247,81 +231,14 @@ def convert_reduced_to_tof(
)


def build_tof_lookup_table(
simulation: SimulationResults,
ltotal_range: LtotalRange,
pulse_period: PulsePeriod,
pulse_stride: PulseStride,
pulse_stride_offset: PulseStrideOffset,
distance_resolution: DistanceResolution,
time_resolution: TimeResolution,
error_threshold: LookupTableRelativeErrorThreshold,
) -> TimeOfFlightLookupTable:
wf = sl.Pipeline(
time_of_flight.providers(), params=time_of_flight.default_parameters()
)
wf[time_of_flight.SimulationResults] = simulation
wf[time_of_flight.LtotalRange] = ltotal_range
wf[time_of_flight.PulsePeriod] = pulse_period
wf[time_of_flight.PulseStride] = pulse_stride
wf[time_of_flight.PulseStrideOffset] = pulse_stride_offset
wf[time_of_flight.DistanceResolution] = distance_resolution
wf[time_of_flight.TimeResolution] = time_resolution
wf[time_of_flight.LookupTableRelativeErrorThreshold] = error_threshold
return wf.compute(time_of_flight.TimeOfFlightLookupTable)


def load_tof_lookup_table(
filename: TimeOfFlightLookupTableFilename,
) -> TimeOfFlightLookupTable:
return TimeOfFlightLookupTable(sc.io.load_hdf5(filename))


def compute_detector_time_of_flight(
detector_data: DetectorData[RunType],
lookup: TimeOfFlightLookupTable,
ltotal: DetectorLtotal[RunType],
pulse_period: PulsePeriod,
pulse_stride: PulseStride,
pulse_stride_offset: PulseStrideOffset,
) -> TofData[RunType]:
wf = sl.Pipeline(
time_of_flight.providers(), params=time_of_flight.default_parameters()
)
wf[time_of_flight.RawData] = detector_data
wf[time_of_flight.TimeOfFlightLookupTable] = lookup
wf[time_of_flight.Ltotal] = ltotal
wf[time_of_flight.PulsePeriod] = pulse_period
wf[time_of_flight.PulseStride] = pulse_stride
wf[time_of_flight.PulseStrideOffset] = pulse_stride_offset
return TofData[RunType](wf.compute(time_of_flight.TofData))


def compute_monitor_time_of_flight(
monitor: MonitorData[RunType, MonitorType],
lookup: TimeOfFlightLookupTable,
ltotal: MonitorLtotal[RunType, MonitorType],
pulse_period: PulsePeriod,
pulse_stride: PulseStride,
pulse_stride_offset: PulseStrideOffset,
) -> TofMonitorData[RunType, MonitorType]:
wf = sl.Pipeline(
time_of_flight.providers(), params=time_of_flight.default_parameters()
)
wf.insert(time_of_flight.resample_tof_data)
wf[time_of_flight.RawData] = monitor
wf[time_of_flight.TimeOfFlightLookupTable] = lookup
wf[time_of_flight.Ltotal] = ltotal
wf[time_of_flight.PulsePeriod] = pulse_period
wf[time_of_flight.PulseStride] = pulse_stride
wf[time_of_flight.PulseStrideOffset] = pulse_stride_offset
out = wf.compute(time_of_flight.ResampledTofData)
out.masks["zero_counts"] = out.data == sc.scalar(0.0, unit=out.data.unit)
return TofMonitorData[RunType, MonitorType](out)


def convert_monitor_to_wavelength(
monitor: TofMonitorData[RunType, MonitorType],
monitor: MonitorTofData[RunType, MonitorType],
) -> WavelengthMonitor[RunType, MonitorType]:
graph = {
**scn.conversion.graph.beamline.beamline(scatter=False),
Expand All @@ -338,7 +255,5 @@ def convert_monitor_to_wavelength(
convert_to_dspacing,
convert_reduced_to_tof,
convert_monitor_to_wavelength,
compute_detector_time_of_flight,
compute_monitor_time_of_flight,
load_tof_lookup_table,
)
25 changes: 23 additions & 2 deletions src/ess/powder/correction.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
from ._util import event_or_outer_coord
from .types import (
AccumulatedProtonCharge,
BackgroundRun,
BackgroundSubtractedData,
BackgroundSubtractedDataTwoTheta,
CaveMonitor,
DataWithScatteringCoordinates,
FocussedDataDspacing,
Expand Down Expand Up @@ -155,7 +158,7 @@ def _normalize_by_vanadium(


def normalize_by_vanadium_dspacing(
data: FocussedDataDspacing[SampleRun],
data: BackgroundSubtractedData[SampleRun],
vanadium: FocussedDataDspacing[VanadiumRun],
uncertainty_broadcast_mode: UncertaintyBroadcastMode,
) -> IofDspacing:
Expand Down Expand Up @@ -185,7 +188,7 @@ def normalize_by_vanadium_dspacing(


def normalize_by_vanadium_dspacing_and_two_theta(
data: FocussedDataDspacingTwoTheta[SampleRun],
data: BackgroundSubtractedDataTwoTheta[SampleRun],
vanadium: FocussedDataDspacingTwoTheta[VanadiumRun],
uncertainty_broadcast_mode: UncertaintyBroadcastMode,
) -> IofDspacingTwoTheta:
Expand Down Expand Up @@ -335,6 +338,22 @@ def _shallow_copy(da: sc.DataArray) -> sc.DataArray:
return out


def subtract_background(
data: FocussedDataDspacing[SampleRun],
background: FocussedDataDspacing[BackgroundRun],
) -> BackgroundSubtractedData[SampleRun]:
return BackgroundSubtractedData[SampleRun](data.bins.concatenate(-background))


def subtract_background_two_theta(
data: FocussedDataDspacingTwoTheta[SampleRun],
background: FocussedDataDspacingTwoTheta[BackgroundRun],
) -> BackgroundSubtractedDataTwoTheta[SampleRun]:
return BackgroundSubtractedDataTwoTheta[SampleRun](
data.bins.concatenate(-background)
)


class RunNormalization(enum.Enum):
"""Type of normalization applied to each run."""

Expand All @@ -357,6 +376,8 @@ def insert_run_normalization(


providers = (
subtract_background,
subtract_background_two_theta,
normalize_by_proton_charge,
normalize_by_vanadium_dspacing,
normalize_by_vanadium_dspacing_and_two_theta,
Expand Down
Loading
Loading