Skip to content

Commit

Permalink
Merge pull request #998 from neutrons/expunge_correct_tof_offset
Browse files Browse the repository at this point in the history
remove correct_tof_offset
  • Loading branch information
jmborr authored Feb 19, 2025
2 parents d88ea02 + 45496ae commit e43aaec
Show file tree
Hide file tree
Showing 10 changed files with 32 additions and 80 deletions.
1 change: 1 addition & 0 deletions docs/release_notes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ Release Notes

**Of interest to the User**:

- PR #998: remove the TOF offset that is done by the data aquisition system
- PR #994: Remove unused module `drtsans/tof/eqsans/reduce.py`
- PR #993: Skip slices with too high transmission error when using time sliced sample transmission run
- PR #325: Migrates repository from GitLab to GitHub
Expand Down
20 changes: 0 additions & 20 deletions src/drtsans/tof/eqsans/correct_frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
logger,
SetInstrumentParameter,
ModeratorTzero,
ChangeBinOffset,
)
from drtsans.samplelogs import SampleLogs
from drtsans.tof.eqsans.chopper import EQSANSDiskChopperSet
Expand Down Expand Up @@ -418,25 +417,6 @@ def correct_emission_time(input_workspace):
)


def correct_tof_offset(input_workspace):
r"""
This corrects the TOF offset when running in 60Hz, non-frameskip. A
constant 664.7us needs to be removed, otherwise nothing is changed.
Parameters
----------
input_workspace: ~mantid.api.IEventsWorkspace
Data workspace
"""
# If 60Hz (non-frameskip) then subtract 664.7us from all tofs
if not _is_frame_skipping(input_workspace):
ChangeBinOffset(
InputWorkspace=input_workspace,
OutputWorkspace=input_workspace,
Offset=-664.7,
)


def smash_monitor_spikes(input_workspace, output_workspace=None):
r"""
Detect and remove spikes in monitor data TOF between min and max TOF's.
Expand Down
5 changes: 0 additions & 5 deletions src/drtsans/tof/eqsans/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
transform_to_wavelength,
smash_monitor_spikes,
set_init_uncertainties,
correct_tof_offset,
correct_emission_time,
)
from drtsans.tof.eqsans.geometry import source_monitor_distance
Expand Down Expand Up @@ -180,8 +179,6 @@ def load_events(
# EQSANS specific part benefits from converting workspace to a string
output_workspace = str(output_workspace)

# Correct TOF offset
correct_tof_offset(output_workspace)
# Correct TOF of detector
correct_detector_frame(output_workspace, path_to_pixel=path_to_pixel)
# Correct TOF for emission time
Expand Down Expand Up @@ -504,8 +501,6 @@ def load_and_split(
)

for _w in ws_group:
# Correct TOF offset
correct_tof_offset(_w)
# Correct TOF of detector
correct_detector_frame(_w, path_to_pixel=path_to_pixel)
# Correct TOF for emission time
Expand Down
2 changes: 1 addition & 1 deletion tests/data/drtsans-data
Submodule drtsans-data updated from 949422 to fe6a84
Original file line number Diff line number Diff line change
Expand Up @@ -377,12 +377,14 @@ def _run_reduction_and_compare(config, expected_result_basename):
test_iq1d_file = os.path.join(test_dir, config["outputFileName"] + "_Iq.dat")
gold_iq1d_file = os.path.join(datarepo_dir.eqsans, "test_corrections", expected_result_basename + "_Iq.dat")
# compare
np.testing.assert_allclose(np.loadtxt(test_iq1d_file), np.loadtxt(gold_iq1d_file))
np.testing.assert_allclose(np.loadtxt(test_iq1d_file), np.loadtxt(gold_iq1d_file), atol=0.003)

test_iq2d_file = os.path.join(test_dir, config["outputFileName"] + "_Iqxqy.dat")
gold_iq2d_file = os.path.join(datarepo_dir.eqsans, "test_corrections", expected_result_basename + "_Iqxqy.dat")
# compare
np.testing.assert_allclose(np.loadtxt(test_iq2d_file, skiprows=4), np.loadtxt(gold_iq2d_file, skiprows=4))
np.testing.assert_allclose(
np.loadtxt(test_iq2d_file, skiprows=4), np.loadtxt(gold_iq2d_file, skiprows=4), atol=0.003
)

DeleteWorkspace("_empty")
DeleteWorkspace("_mask")
Expand Down
6 changes: 3 additions & 3 deletions tests/integration/drtsans/tof/eqsans/test_integration_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,9 @@
("EQSANS_86217", 508339, 1300, 14122, 9595, 59633, True, 2.61, 14.72, 540),
("EQSANS_92353", 262291, 4000, 14122, 11288, 61309, True, 2.59, 12.98, 431),
("EQSANS_85550", 270022, 5000, 14122, 11914, 61930, True, 2.59, 12.43, 599),
("EQSANS_101595", 289989, 1300, 14122, 7649, 24392, False, 2.11, 5.65, 141),
("EQSANS_88565", 19362, 4000, 14122, 45485, 62174, False, 10.02, 13.2, 694),
("EQSANS_88901", 340431, 8000, 14122, 66629, 83378, False, 11.99, 14.62, 54377),
("EQSANS_101595", 289989, 1300, 14122, 7657, 24384, False, 2.11, 5.65, 150),
("EQSANS_88565", 19362, 4000, 14122, 45486, 62172, False, 10.02, 13.2, 743),
("EQSANS_88901", 340431, 8000, 14122, 67202, 83868, False, 11.99, 14.62, 56013),
)

run_sets = [{k: v for k, v in zip(keys, value)} for value in values]
Expand Down
34 changes: 15 additions & 19 deletions tests/unit/drtsans/tof/eqsans/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,18 +59,16 @@ def test_load_all_files_simple_interval(datarepo_dir):
assert len(loaded.sample) == 1
history = loaded.sample[0].data.getHistory()

assert history.size() == 11
assert history.size() == 10
assert history.getAlgorithm(0).name() == "LoadEventNexus"
assert history.getAlgorithm(0).getProperty("Filename").value.endswith("sns/eqsans/EQSANS_105428.nxs.h5")
assert history.getAlgorithm(2).name() == "MoveInstrumentComponent"
assert history.getAlgorithm(3).name() == "ChangeBinOffset"
assert history.getAlgorithm(4).name() == "SetInstrumentParameter"
# assert history.getAlgorithm(4).name() == "ModeratorTZero"
assert history.getAlgorithm(6).name() == "MoveInstrumentComponent"
assert history.getAlgorithm(7).name() == "ConvertUnits"
assert history.getAlgorithm(8).name() == "Rebin"
assert history.getAlgorithm(9).name() == "SetUncertainties"
assert history.getAlgorithm(10).name() == "AddSampleLogMultiple"
assert history.getAlgorithm(3).name() == "SetInstrumentParameter"
assert history.getAlgorithm(5).name() == "MoveInstrumentComponent"
assert history.getAlgorithm(6).name() == "ConvertUnits"
assert history.getAlgorithm(7).name() == "Rebin"
assert history.getAlgorithm(8).name() == "SetUncertainties"
assert history.getAlgorithm(9).name() == "AddSampleLogMultiple"

assert loaded.background.data is None
assert loaded.background_transmission.data is None
Expand All @@ -86,7 +84,7 @@ def test_load_all_files_simple_interval(datarepo_dir):

# check interval
w = loaded.sample[0].data
assert int(w.extractY().sum()) == 706
assert int(w.extractY().sum()) == 773

# Change reduction input and rerun load_all_files
reduction_input["configuration"]["useDefaultMask"] = True
Expand Down Expand Up @@ -132,18 +130,16 @@ def test_load_all_files_simple(datarepo_dir):
assert len(loaded.sample) == 1
history = loaded.sample[0].data.getHistory()

assert history.size() == 11
assert history.size() == 10
assert history.getAlgorithm(0).name() == "LoadEventNexus"
assert history.getAlgorithm(0).getProperty("Filename").value.endswith("sns/eqsans/EQSANS_105428.nxs.h5")
assert history.getAlgorithm(2).name() == "MoveInstrumentComponent"
assert history.getAlgorithm(3).name() == "ChangeBinOffset"
assert history.getAlgorithm(4).name() == "SetInstrumentParameter"
# assert history.getAlgorithm(4).name() == "ModeratorTZero"
assert history.getAlgorithm(6).name() == "MoveInstrumentComponent"
assert history.getAlgorithm(7).name() == "ConvertUnits"
assert history.getAlgorithm(8).name() == "Rebin"
assert history.getAlgorithm(9).name() == "SetUncertainties"
assert history.getAlgorithm(10).name() == "AddSampleLogMultiple"
assert history.getAlgorithm(3).name() == "SetInstrumentParameter"
assert history.getAlgorithm(5).name() == "MoveInstrumentComponent"
assert history.getAlgorithm(6).name() == "ConvertUnits"
assert history.getAlgorithm(7).name() == "Rebin"
assert history.getAlgorithm(8).name() == "SetUncertainties"
assert history.getAlgorithm(9).name() == "AddSampleLogMultiple"

assert loaded.background.data is None
assert loaded.background_transmission.data is None
Expand Down
22 changes: 0 additions & 22 deletions tests/unit/drtsans/tof/eqsans/test_correct_frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,27 +338,5 @@ def test_correct_emission_time_30Hz(clean_workspace):
assert_allclose(w.getSpectrum(0).getTofs() * 10000 * h / (z * m), expected_wl, rtol=1e-4)


def test_correct_tof_offset(clean_workspace):
# Make a simple workspace with correct distances and add tofs to it
w = CreateSampleWorkspace("Event", NumBanks=1, BankPixelWidth=1, NumEvents=10)
clean_workspace(w)

starting_tofs = w.getSpectrum(0).getTofs()

# set the workspace to frame_skipping, the tofs should be unchanged
SampleLogs(w).insert("is_frame_skipping", 1)
# run correct_tof_offset on workspace
correct_frame.correct_tof_offset(w)
# compare starting and final tofs
assert_allclose(w.getSpectrum(0).getTofs(), starting_tofs)

# set the workspace to not frame_skipping, the tofs should be changed by 664.7
SampleLogs(w).insert("is_frame_skipping", 0)
# run correct_tof_offset on workspace
correct_frame.correct_tof_offset(w)
# compare starting and final tofs with expected difference
assert_allclose(w.getSpectrum(0).getTofs(), starting_tofs - 664.7)


if __name__ == "__main__":
pytest.main([__file__])
12 changes: 6 additions & 6 deletions tests/unit/drtsans/tof/eqsans/test_load.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,10 +96,10 @@ def test_merge_Data(datarepo_dir):
assert merged_sample_logs.proton_charge.size() == 12933 + 17343 + 4341

# Check integrated intensity increases as the total sum
assert mtd[str(ws0)].extractY().sum() == 284923
assert mtd[str(ws1)].extractY().sum() == 1368485
assert mtd[str(ws0)].extractY().sum() == 288830
assert mtd[str(ws1)].extractY().sum() == 1338500
assert mtd[str(ws2)].extractY().sum() == 65694
assert mtd[str(merged_workspaces)].extractY().sum() == 284923 + 1368485 + 65694
assert mtd[str(merged_workspaces)].extractY().sum() == 288830 + 1338500 + 65694

mtd.remove(str(ws0))
mtd.remove(str(ws1))
Expand Down Expand Up @@ -233,11 +233,11 @@ def test_load_and_split_and_histogram(datarepo_dir, clean_workspace):

# check values for Y and E don't change unexpectedly
assert filtered_ws.getItem(0).extractY().max() == 4
assert filtered_ws.getItem(1).extractY().max() == 25
assert filtered_ws.getItem(1).extractY().max() == 27
assert filtered_ws.getItem(2).extractY().max() == 3
assert filtered_ws.getItem(0).extractE().max() == pytest.approx(2, abs=1e-7)
assert filtered_ws.getItem(1).extractE().max() == pytest.approx(5, abs=1e-7)
assert filtered_ws.getItem(2).extractE().max() == pytest.approx(1.7320508, abs=1e-7)
assert filtered_ws.getItem(1).extractE().max() == pytest.approx(5.2, abs=0.1)
assert filtered_ws.getItem(2).extractE().max() == pytest.approx(1.7, abs=0.1)

# check metadata is set correctly
assert SampleLogs(filtered_ws.getItem(0)).slice.value == 1
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/drtsans/tof/eqsans/test_normalization.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ def test_normalize_by_monitor(flux_to_monitor, data_ws, monitor_ws, temp_workspa
data_workspace_normalized = SumSpectra(data_workspace_normalized, OutputWorkspace=data_workspace_normalized.name())
# Second we integrate over all wavelength bins and check the value will not change as the code in the
# repository evolves
assert sum(data_workspace_normalized.dataY(0)) == approx(0.621, abs=1e-03)
assert sum(data_workspace_normalized.dataY(0)) == approx(0.552, abs=1e-03)


@pytest.mark.datarepo
Expand Down Expand Up @@ -315,7 +315,7 @@ def test_normalize_by_flux(beam_flux, flux_to_monitor, data_ws, monitor_ws, temp
# then we integrate this single spectrum over all wavelengths
total_normalized_intensity = sum(summed_normalized.readY(0))
# here we just check that the result will not change as the code in the repository evolves
assert total_normalized_intensity == approx(0.621, abs=1e-3)
assert total_normalized_intensity == approx(0.552, abs=1e-3)

#
# Third we normalize by run duration with method='time'
Expand Down

0 comments on commit e43aaec

Please sign in to comment.