From b42fc7be40fd26a62c1e5bf36c235270e9a6a1a3 Mon Sep 17 00:00:00 2001 From: korttravis Date: Thu, 5 Dec 2024 13:30:59 -0500 Subject: [PATCH] Reduction process: effective-instrument geometry. (#461) * Reduction process: effective-instrument geometry. At the end of the reduction process, the instrument associated with each output workspace is modified. A new _effective_ instrument is substituted for each workspace. This instrument has the same number of pixels as there are group-ids, and the location of each pixel is set to the mean location of the _unmasked_ original pixels participating in that pixel group. By implication, this substitution results in there being one pixel per spectrum in the output workspaces. This commit includes the following changes: * A new `EffectiveInstrumentRecipe` implemented as a subrecipe called by `ReductionRecipe` for each grouping; * Modifications to `LocalDataService.writeReductionData` to use updated Mantid algorithms, now allowing limited I/O of programmatically-generated instruments; * Modification of `ReductionIngredients` to include the _unmasked_ `PixelGroup`s; * Modification of `SousChef.prepReductionIngredients` to prepare the _unmasked_ `PixelGroup`s; * Modification of existing unit tests, and implementation of new unit tests to verify the new subrecipe's execution. Associated with this PR are three Mantid PRs, including changes to the `EditInstrumentGeometry`, `SaveNexusESS`, and `LoadNexusProcessed` algorithms. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: Kort Travis Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- environment.yml | 4 +- .../EffectiveInstrumentIngredients.py | 11 + .../dao/ingredients/ReductionIngredients.py | 5 + src/snapred/backend/dao/state/PixelGroup.py | 2 +- .../backend/data/DataFactoryService.py | 19 +- src/snapred/backend/data/LocalDataService.py | 55 ++++- src/snapred/backend/error/ContinueWarning.py | 3 +- .../backend/error/RecoverableException.py | 3 +- .../recipe/EffectiveInstrumentRecipe.py | 81 +++++++ ...ixelGroupingParametersCalculationRecipe.py | 2 +- src/snapred/backend/recipe/Recipe.py | 2 +- .../recipe/ReductionGroupProcessingRecipe.py | 25 --- src/snapred/backend/recipe/ReductionRecipe.py | 12 +- .../backend/service/ReductionService.py | 39 ++-- src/snapred/backend/service/SousChef.py | 25 ++- src/snapred/resources/application.yml | 6 + src/snapred/ui/workflow/DiffCalWorkflow.py | 2 +- .../cis_tests/effective_instrument_script.py | 119 +++++++++++ tests/resources/application.yml | 5 + .../calibration/ReductionIngredients.json | 202 ++++++++++++++++++ .../backend/data/test_DataFactoryService.py | 14 +- .../backend/data/test_LocalDataService.py | 191 +++++++++++++++-- .../recipe/test_EffectiveInstrumentRecipe.py | 190 ++++++++++++++++ .../recipe/test_PreprocessReductionRecipe.py | 15 +- .../backend/recipe/test_ReductionRecipe.py | 139 +++++++++++- .../backend/service/test_ReductionService.py | 69 +++++- tests/unit/backend/service/test_SousChef.py | 65 +++--- .../unit/ui/workflow/test_DiffCalWorkflow.py | 31 ++- tests/util/SculleryBoy.py | 7 +- 29 files changed, 1187 insertions(+), 156 deletions(-) create mode 100644 src/snapred/backend/dao/ingredients/EffectiveInstrumentIngredients.py create mode 100644 src/snapred/backend/recipe/EffectiveInstrumentRecipe.py create mode 100644 tests/cis_tests/effective_instrument_script.py create mode 100644 tests/unit/backend/recipe/test_EffectiveInstrumentRecipe.py diff --git a/environment.yml b/environment.yml index 9778e71fc..6610d510d 100644 --- a/environment.yml +++ b/environment.yml @@ -2,12 +2,12 @@ name: SNAPRed channels: - conda-forge - default -- mantid-ornl/label/rc +- mantid/label/nightly dependencies: - python=3.10 - pip - pydantic>=2.7.3,<3 -- mantidworkbench=6.10.0.2rc1 +- mantidworkbench>=6.11.20241111 - qtpy - pre-commit - pytest diff --git a/src/snapred/backend/dao/ingredients/EffectiveInstrumentIngredients.py b/src/snapred/backend/dao/ingredients/EffectiveInstrumentIngredients.py new file mode 100644 index 000000000..04cdef59f --- /dev/null +++ b/src/snapred/backend/dao/ingredients/EffectiveInstrumentIngredients.py @@ -0,0 +1,11 @@ +from pydantic import BaseModel, ConfigDict + +from snapred.backend.dao.state.PixelGroup import PixelGroup + + +class EffectiveInstrumentIngredients(BaseModel): + unmaskedPixelGroup: PixelGroup + + model_config = ConfigDict( + extra="forbid", + ) diff --git a/src/snapred/backend/dao/ingredients/ReductionIngredients.py b/src/snapred/backend/dao/ingredients/ReductionIngredients.py index c25955881..ea9ce4e4c 100644 --- a/src/snapred/backend/dao/ingredients/ReductionIngredients.py +++ b/src/snapred/backend/dao/ingredients/ReductionIngredients.py @@ -8,6 +8,7 @@ # These are from the same `__init__` module, so for the moment, we require the full import specifications. # (That is, not just "from snapred.backend.dao.ingredients import ...".) from snapred.backend.dao.ingredients.ArtificialNormalizationIngredients import ArtificialNormalizationIngredients +from snapred.backend.dao.ingredients.EffectiveInstrumentIngredients import EffectiveInstrumentIngredients from snapred.backend.dao.ingredients.GenerateFocussedVanadiumIngredients import GenerateFocussedVanadiumIngredients from snapred.backend.dao.ingredients.PreprocessReductionIngredients import PreprocessReductionIngredients from snapred.backend.dao.ingredients.ReductionGroupProcessingIngredients import ReductionGroupProcessingIngredients @@ -22,6 +23,7 @@ class ReductionIngredients(BaseModel): timestamp: float pixelGroups: List[PixelGroup] + unmaskedPixelGroups: List[PixelGroup] # these should come from calibration / normalization records # But will not exist if we proceed without calibration / normalization @@ -63,6 +65,9 @@ def applyNormalization(self, groupingIndex: int) -> ApplyNormalizationIngredient pixelGroup=self.pixelGroups[groupingIndex], ) + def effectiveInstrument(self, groupingIndex: int) -> EffectiveInstrumentIngredients: + return EffectiveInstrumentIngredients(unmaskedPixelGroup=self.unmaskedPixelGroups[groupingIndex]) + model_config = ConfigDict( extra="forbid", ) diff --git a/src/snapred/backend/dao/state/PixelGroup.py b/src/snapred/backend/dao/state/PixelGroup.py index e0949f835..33b2764bd 100644 --- a/src/snapred/backend/dao/state/PixelGroup.py +++ b/src/snapred/backend/dao/state/PixelGroup.py @@ -10,7 +10,7 @@ class PixelGroup(BaseModel): - # allow initializtion from either dictionary or list + # allow initialization from either dictionary or list pixelGroupingParameters: Union[List[PixelGroupingParameters], Dict[int, PixelGroupingParameters]] = {} nBinsAcrossPeakWidth: int = Config["calibration.diffraction.nBinsAcrossPeakWidth"] focusGroup: FocusGroup diff --git a/src/snapred/backend/data/DataFactoryService.py b/src/snapred/backend/data/DataFactoryService.py index 47f92aa3b..dcf28a8f5 100644 --- a/src/snapred/backend/data/DataFactoryService.py +++ b/src/snapred/backend/data/DataFactoryService.py @@ -191,24 +191,21 @@ def getReductionState(self, runId: str, useLiteMode: bool) -> ReductionState: return reductionState @validate_call - def getReductionDataPath(self, runId: str, useLiteMode: bool, version: int) -> Path: - return self.lookupService._constructReductionDataPath(runId, useLiteMode, version) + def getReductionDataPath(self, runId: str, useLiteMode: bool, timestamp: float) -> Path: + return self.lookupService._constructReductionDataPath(runId, useLiteMode, timestamp) @validate_call - def getReductionRecord(self, runId: str, useLiteMode: bool, version: Optional[int] = None) -> ReductionRecord: - """ - If no version is passed, will use the latest version applicable to runId - """ - return self.lookupService.readReductionRecord(runId, useLiteMode, version) + def getReductionRecord(self, runId: str, useLiteMode: bool, timestamp: float) -> ReductionRecord: + return self.lookupService.readReductionRecord(runId, useLiteMode, timestamp) @validate_call - def getReductionData(self, runId: str, useLiteMode: bool, version: int) -> ReductionRecord: - return self.lookupService.readReductionData(runId, useLiteMode, version) + def getReductionData(self, runId: str, useLiteMode: bool, timestamp: float) -> ReductionRecord: + return self.lookupService.readReductionData(runId, useLiteMode, timestamp) @validate_call - def getCompatibleReductionMasks(self, runNumber: str, useLiteMode: bool) -> List[WorkspaceName]: + def getCompatibleReductionMasks(self, runId: str, useLiteMode: bool) -> List[WorkspaceName]: # Assemble a list of masks, both resident and otherwise, that are compatible with the current reduction - return self.lookupService.getCompatibleReductionMasks(runNumber, useLiteMode) + return self.lookupService.getCompatibleReductionMasks(runId, useLiteMode) ##### WORKSPACE METHODS ##### diff --git a/src/snapred/backend/data/LocalDataService.py b/src/snapred/backend/data/LocalDataService.py index 8d0840555..2de734071 100644 --- a/src/snapred/backend/data/LocalDataService.py +++ b/src/snapred/backend/data/LocalDataService.py @@ -243,7 +243,7 @@ def getIPTS(self, runNumber: str, instrumentName: str = Config["instrument.name" def stateExists(self, runId: str) -> bool: stateId, _ = self.generateStateId(runId) statePath = self.constructCalibrationStateRoot(stateId) - # Shouldnt need to check lite as we init both at the same time + # Shouldn't need to check lite as we init both at the same time return statePath.exists() def workspaceIsInstance(self, wsName: str, wsType: Any) -> bool: @@ -382,7 +382,7 @@ def _constructReductionRecordFilePath(self, runNumber: str, useLiteMode: bool, t @validate_call def _constructReductionDataFilePath(self, runNumber: str, useLiteMode: bool, timestamp: float) -> Path: fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() - fileName += Config["nexus.file.extension"] + fileName += Config["reduction.output.extension"] filePath = self._constructReductionDataPath(runNumber, useLiteMode, timestamp) / fileName return filePath @@ -649,6 +649,29 @@ def writeReductionData(self, record: ReductionRecord): -- `writeReductionRecord` must have been called prior to this method. """ + # Implementation notes: + # + # 1) For SNAPRed's current reduction-workflow output implementation: + # + # * In case an effective instrument has been substituted, + # `SaveNexusESS` _must_ be used, `SaveNexus` by itself won't work; + # + # * ONLY a simplified instrument geometry can be saved, + # for example, as produced by `EditInstrumentGeometry`: + # this geometry includes no monitors, only a single non-nested detector bank, and no parameter map. + # + # * `LoadNexus` should work with all of this _automatically_. + # + # Hopefully this will eventually be fixed, but right now this is a limitation of Mantid's + # instrument-I/O implementation (for non XML-based instruments). + # + # 2) For SNAPRed internal use: + # if `reduction.output.useEffectiveInstrument` is set to false in "application.yml", + # output workspaces will be saved without converting their instruments to the reduced form. + # Both of these alternatives are retained to allow some flexibility in what specifically + # is saved with the reduction data. + # + runNumber, useLiteMode, timestamp = record.runNumber, record.useLiteMode, record.timestamp filePath = self._constructReductionDataFilePath(runNumber, useLiteMode, timestamp) @@ -659,14 +682,40 @@ def writeReductionData(self, record: ReductionRecord): # WARNING: `writeReductionRecord` must be called before `writeReductionData`. raise RuntimeError(f"reduction version directories {filePath.parent} do not exist") + useEffectiveInstrument = Config["reduction.output.useEffectiveInstrument"] + for ws in record.workspaceNames: # Append workspaces to hdf5 file, in order of the `workspaces` list - self.writeWorkspace(filePath.parent, Path(filePath.name), ws, append=True) if ws.tokens("workspaceType") == wngt.REDUCTION_PIXEL_MASK: + # The mask workspace always uses the non-reduced instrument. + self.mantidSnapper.SaveNexus( + f"Append workspace '{ws}' to reduction output", + InputWorkspace=ws, + Filename=str(filePath), + Append=True, + ) + self.mantidSnapper.executeQueue() + # Write an additional copy of the combined pixel mask as a separate `SaveDiffCal`-format file maskFilename = ws + ".h5" self.writePixelMask(filePath.parent, Path(maskFilename), ws) + else: + if useEffectiveInstrument: + self.mantidSnapper.SaveNexusESS( + f"Append workspace '{ws}' to reduction output", + InputWorkspace=ws, + Filename=str(filePath), + Append=True, + ) + else: + self.mantidSnapper.SaveNexus( + f"Append workspace '{ws}' to reduction output", + InputWorkspace=ws, + Filename=str(filePath), + Append=True, + ) + self.mantidSnapper.executeQueue() # Append the "metadata" group, containing the `ReductionRecord` metadata with h5py.File(filePath, "a") as h5: diff --git a/src/snapred/backend/error/ContinueWarning.py b/src/snapred/backend/error/ContinueWarning.py index 69869ae95..285c5487a 100644 --- a/src/snapred/backend/error/ContinueWarning.py +++ b/src/snapred/backend/error/ContinueWarning.py @@ -30,8 +30,7 @@ def flags(self): return self.model.flags def __init__(self, message: str, flags: "Type" = 0): - ContinueWarning.Model.update_forward_refs() - ContinueWarning.Model.model_rebuild(force=True) + ContinueWarning.Model.model_rebuild(force=True) # replaces: `update_forward_refs` method self.model = ContinueWarning.Model(message=message, flags=flags) super().__init__(message) diff --git a/src/snapred/backend/error/RecoverableException.py b/src/snapred/backend/error/RecoverableException.py index ad96d254a..a9f90c3c1 100644 --- a/src/snapred/backend/error/RecoverableException.py +++ b/src/snapred/backend/error/RecoverableException.py @@ -37,8 +37,7 @@ def data(self): return self.model.data def __init__(self, message: str, flags: "Type" = 0, data: Optional[Any] = None): - RecoverableException.Model.update_forward_refs() - RecoverableException.Model.model_rebuild(force=True) + RecoverableException.Model.model_rebuild(force=True) # replaces: `update_forward_refs` method self.model = RecoverableException.Model(message=message, flags=flags, data=data) logger.error(f"{extractTrueStacktrace()}") super().__init__(message) diff --git a/src/snapred/backend/recipe/EffectiveInstrumentRecipe.py b/src/snapred/backend/recipe/EffectiveInstrumentRecipe.py new file mode 100644 index 000000000..cd79bec05 --- /dev/null +++ b/src/snapred/backend/recipe/EffectiveInstrumentRecipe.py @@ -0,0 +1,81 @@ +from typing import Any, Dict, List, Tuple + +import numpy as np + +from snapred.backend.dao.ingredients import EffectiveInstrumentIngredients as Ingredients +from snapred.backend.error.AlgorithmException import AlgorithmException +from snapred.backend.log.logger import snapredLogger +from snapred.backend.recipe.Recipe import Recipe +from snapred.meta.decorators.Singleton import Singleton +from snapred.meta.mantid.WorkspaceNameGenerator import WorkspaceName + +logger = snapredLogger.getLogger(__name__) + +Pallet = Tuple[Ingredients, Dict[str, str]] + + +@Singleton +class EffectiveInstrumentRecipe(Recipe[Ingredients]): + def unbagGroceries(self, groceries: Dict[str, Any]): + self.inputWS = groceries["inputWorkspace"] + self.outputWS = groceries.get("outputWorkspace", groceries["inputWorkspace"]) + + def chopIngredients(self, ingredients): + self.unmaskedPixelGroup = ingredients.unmaskedPixelGroup + + def queueAlgos(self): + """ + Queues up the processing algorithms for the recipe. + Requires: unbagged groceries. + """ + # `EditInstrumentGeometry` modifies in-place, so we need to clone if a distinct output workspace is required. + if self.outputWS != self.inputWS: + self.mantidSnapper.CloneWorkspace( + "Clone workspace for reduced instrument", OutputWorkspace=self.outputWS, InputWorkspace=self.inputWS + ) + self.mantidSnapper.EditInstrumentGeometry( + f"Editing instrument geometry for grouping '{self.unmaskedPixelGroup.focusGroup.name}'", + Workspace=self.outputWS, + # TODO: Mantid defect: allow SI units here! + L2=self.unmaskedPixelGroup.L2, + Polar=np.rad2deg(self.unmaskedPixelGroup.twoTheta), + Azimuthal=np.rad2deg(self.unmaskedPixelGroup.azimuth), + # + InstrumentName=f"SNAP_{self.unmaskedPixelGroup.focusGroup.name}", + ) + + def validateInputs(self, ingredients: Ingredients, groceries: Dict[str, WorkspaceName]): + pass + + def execute(self): + """ + Final step in a recipe, executes the queued algorithms. + Requires: queued algorithms. + """ + try: + self.mantidSnapper.executeQueue() + except AlgorithmException as e: + errorString = str(e) + raise RuntimeError(errorString) from e + + def cook(self, ingredients, groceries: Dict[str, str]) -> Dict[str, Any]: + """ + Main interface method for the recipe. + Given the ingredients and groceries, it prepares, executes and returns the final workspace. + """ + self.prep(ingredients, groceries) + self.execute() + return self.outputWS + + def cater(self, shipment: List[Pallet]) -> List[Dict[str, Any]]: + """ + A secondary interface method for the recipe. + It is a batched version of cook. + Given a shipment of ingredients and groceries, it prepares, executes and returns the final workspaces. + """ + output = [] + for ingredients, grocery in shipment: + self.prep(ingredients, grocery) + output.append(self.outputWS) + self.execute() + return output diff --git a/src/snapred/backend/recipe/PixelGroupingParametersCalculationRecipe.py b/src/snapred/backend/recipe/PixelGroupingParametersCalculationRecipe.py index 6569051f5..44632afb1 100644 --- a/src/snapred/backend/recipe/PixelGroupingParametersCalculationRecipe.py +++ b/src/snapred/backend/recipe/PixelGroupingParametersCalculationRecipe.py @@ -36,7 +36,7 @@ def executeRecipe( "Calling algorithm", Ingredients=ingredients.json(), GroupingWorkspace=groceries["groupingWorkspace"], - MaskWorkspace=groceries.get("MaskWorkspace", ""), + MaskWorkspace=groceries.get("maskWorkspace", ""), ) self.mantidSnapper.executeQueue() # NOTE contradictory issues with Callbacks between GUI and unit tests diff --git a/src/snapred/backend/recipe/Recipe.py b/src/snapred/backend/recipe/Recipe.py index 9723305e3..9dd0bf940 100644 --- a/src/snapred/backend/recipe/Recipe.py +++ b/src/snapred/backend/recipe/Recipe.py @@ -47,7 +47,7 @@ def unbagGroceries(self, groceries: Dict[str, WorkspaceName]): @abstractmethod def queueAlgos(self): """ - Queues up the procesing algorithms for the recipe. + Queues up the processing algorithms for the recipe. Requires: unbagged groceries and chopped ingredients. """ diff --git a/src/snapred/backend/recipe/ReductionGroupProcessingRecipe.py b/src/snapred/backend/recipe/ReductionGroupProcessingRecipe.py index c39016948..b26a19f5d 100644 --- a/src/snapred/backend/recipe/ReductionGroupProcessingRecipe.py +++ b/src/snapred/backend/recipe/ReductionGroupProcessingRecipe.py @@ -28,31 +28,6 @@ def queueAlgos(self): Queues up the processing algorithms for the recipe. Requires: unbagged groceries. """ - # TODO: This is all subject to change based on EWM 4798 - # if self.rawInput is not None: - # logger.info("Processing Reduction Group...") - # estimateGeometryAlgo = EstimateFocusedInstrumentGeometry() - # estimateGeometryAlgo.initialize() - # estimateGeometryAlgo.setProperty("GroupingWorkspace", self.groupingWS) - # estimateGeometryAlgo.setProperty("OutputWorkspace", self.geometryOutputWS) - # try: - # estimateGeometryAlgo.execute() - # data["focusParams"] = estimateGeometryAlgo.getPropertyValue("FocusParams") - # except RuntimeError as e: - # errorString = str(e) - # raise RuntimeError(errorString) from e - # else: - # raise NotImplementedError - - # self.mantidSnapper.EditInstrumentGeometry( - # "Editing Instrument Geometry...", - # Workspace=self.geometryOutputWS, - # L2=data["focusParams"].L2, - # Polar=data["focusParams"].Polar, - # Azimuthal=data["focusParams"].Azimuthal, - # ) - # self.rawInput = self.geometryOutputWS - self.mantidSnapper.ConvertUnits( "Converting to TOF...", InputWorkspace=self.rawInput, diff --git a/src/snapred/backend/recipe/ReductionRecipe.py b/src/snapred/backend/recipe/ReductionRecipe.py index cc6253e22..aaf905d8b 100644 --- a/src/snapred/backend/recipe/ReductionRecipe.py +++ b/src/snapred/backend/recipe/ReductionRecipe.py @@ -3,11 +3,13 @@ from snapred.backend.dao.ingredients import ReductionIngredients as Ingredients from snapred.backend.log.logger import snapredLogger from snapred.backend.recipe.ApplyNormalizationRecipe import ApplyNormalizationRecipe +from snapred.backend.recipe.EffectiveInstrumentRecipe import EffectiveInstrumentRecipe from snapred.backend.recipe.GenerateFocussedVanadiumRecipe import GenerateFocussedVanadiumRecipe from snapred.backend.recipe.GenericRecipe import ArtificialNormalizationRecipe from snapred.backend.recipe.PreprocessReductionRecipe import PreprocessReductionRecipe from snapred.backend.recipe.Recipe import Recipe, WorkspaceName from snapred.backend.recipe.ReductionGroupProcessingRecipe import ReductionGroupProcessingRecipe +from snapred.meta.Config import Config from snapred.meta.mantid.WorkspaceNameGenerator import ValueFormatter as wnvf from snapred.meta.mantid.WorkspaceNameGenerator import WorkspaceNameGenerator as wng @@ -61,7 +63,7 @@ def unbagGroceries(self, groceries: Dict[str, Any]): self.groceries = groceries.copy() self.sampleWs = groceries["inputWorkspace"] self.normalizationWs = groceries.get("normalizationWorkspace", "") - self.maskWs = groceries.get("maskWorkspace", "") + self.maskWs = groceries.get("combinedMask", "") self.groupingWorkspaces = groceries["groupingWorkspaces"] def _cloneWorkspace(self, inputWorkspace: str, outputWorkspace: str) -> str: @@ -274,6 +276,14 @@ def execute(self): ) self._cloneIntermediateWorkspace(sampleClone, f"sample_ApplyNormalization_{groupingIndex}") + # 5. Replace the instrument with the effective instrument for this grouping + if Config["reduction.output.useEffectiveInstrument"]: + self._applyRecipe( + EffectiveInstrumentRecipe, + self.ingredients.effectiveInstrument(groupingIndex), + inputWorkspace=sampleClone, + ) + # Cleanup outputs.append(sampleClone) diff --git a/src/snapred/backend/service/ReductionService.py b/src/snapred/backend/service/ReductionService.py index 7f8b08069..1ad5a1d0c 100644 --- a/src/snapred/backend/service/ReductionService.py +++ b/src/snapred/backend/service/ReductionService.py @@ -1,7 +1,7 @@ import json from collections.abc import Iterable from pathlib import Path -from typing import Any, Dict, List +from typing import Any, Dict, List, Optional from snapred.backend.dao.ingredients import ( ArtificialNormalizationIngredients, @@ -183,10 +183,12 @@ def reduction(self, request: ReductionRequest): groupingResults = self.fetchReductionGroupings(request) request.focusGroups = groupingResults["focusGroups"] - ingredients = self.prepReductionIngredients(request) - ingredients.artificialNormalizationIngredients = request.artificialNormalizationIngredients + # Fetch groceries first: `prepReductionIngredients` will need the combined mask. groceries = self.fetchReductionGroceries(request) + + ingredients = self.prepReductionIngredients(request, groceries.get("combinedPixelMask")) + # attach the list of grouping workspaces to the grocery dictionary groceries["groupingWorkspaces"] = groupingResults["groupingWorkspaces"] @@ -306,18 +308,15 @@ def prepCombinedMask( return combinedMask @FromString - def prepReductionIngredients(self, request: ReductionRequest) -> ReductionIngredients: + def prepReductionIngredients( + self, request: ReductionRequest, combinedPixelMask: Optional[WorkspaceName] = None + ) -> ReductionIngredients: """ Prepare the needed ingredients for calculating reduction. Requires: - - runNumber - - lite mode flag - - timestamp - - at least one focus group specified - - a smoothing parameter - - a calibrant sample path - - a peak threshold + - reduction request + - an optional combined mask workspace :param request: a reduction request :type request: ReductionRequest @@ -334,7 +333,9 @@ def prepReductionIngredients(self, request: ReductionRequest) -> ReductionIngred versions=request.versions, ) # TODO: Skip calibrant sample if there is no calibrant - return self.sousChef.prepReductionIngredients(farmFresh) + ingredients = self.sousChef.prepReductionIngredients(farmFresh, combinedPixelMask) + ingredients.artificialNormalizationIngredients = request.artificialNormalizationIngredients + return ingredients @FromString def fetchReductionGroceries(self, request: ReductionRequest) -> Dict[str, Any]: @@ -353,7 +354,7 @@ def fetchReductionGroceries(self, request: ReductionRequest) -> Dict[str, Any]: - "inputworkspace" - "diffcalWorkspace" - "normalizationWorkspace" - - "maskWorkspace" + - "combinedPixelMask" :rtype: Dict[str, Any] """ @@ -372,7 +373,7 @@ def fetchReductionGroceries(self, request: ReductionRequest) -> Dict[str, Any]: # Fetch pixel masks residentMasks = {} - combinedMask = None + combinedPixelMask = None if request.pixelMasks: for mask in request.pixelMasks: match mask.tokens("workspaceType"): @@ -388,7 +389,7 @@ def fetchReductionGroceries(self, request: ReductionRequest) -> Dict[str, Any]: raise RuntimeError( f"reduction pixel mask '{mask}' has unexpected workspace-type '{mask.tokens('workspaceType')}'" # noqa: E501 ) - if calVersion: + if calVersion is not None: # WARNING: version may be _zero_! self.groceryClerk.name("diffcalMaskWorkspace").diffcal_mask(request.runNumber, calVersion).useLiteMode( request.useLiteMode ).add() @@ -398,19 +399,19 @@ def fetchReductionGroceries(self, request: ReductionRequest) -> Dict[str, Any]: **residentMasks, ) # combine all of the pixel masks, for application and final output - combinedMask = self.prepCombinedMask( + combinedPixelMask = self.prepCombinedMask( request.runNumber, request.useLiteMode, request.timestamp, maskGroceries.values() ) # gather the input workspace and the diffcal table self.groceryClerk.name("inputWorkspace").neutron(request.runNumber).useLiteMode(request.useLiteMode).add() - if calVersion: + if calVersion is not None: self.groceryClerk.name("diffcalWorkspace").diffcal_table(request.runNumber, calVersion).useLiteMode( request.useLiteMode ).add() - if normVersion: + if normVersion is not None: # WARNING: version may be _zero_! self.groceryClerk.name("normalizationWorkspace").normalization(request.runNumber, normVersion).useLiteMode( request.useLiteMode ).add() @@ -421,7 +422,7 @@ def fetchReductionGroceries(self, request: ReductionRequest) -> Dict[str, Any]: ) groceries = self.groceryService.fetchGroceryDict( groceryDict=self.groceryClerk.buildDict(), - **({"maskWorkspace": combinedMask} if combinedMask else {}), + **({"combinedPixelMask": combinedPixelMask} if combinedPixelMask else {}), ) self._markWorkspaceMetadata(request, groceries["inputWorkspace"]) diff --git a/src/snapred/backend/service/SousChef.py b/src/snapred/backend/service/SousChef.py index cc6182ae3..804b49aab 100644 --- a/src/snapred/backend/service/SousChef.py +++ b/src/snapred/backend/service/SousChef.py @@ -1,7 +1,7 @@ import os from copy import deepcopy from pathlib import Path -from typing import Dict, List, Tuple +from typing import Dict, List, Optional, Tuple import pydantic @@ -31,6 +31,7 @@ from snapred.backend.service.Service import Service from snapred.meta.Config import Config from snapred.meta.decorators.Singleton import Singleton +from snapred.meta.mantid.WorkspaceNameGenerator import WorkspaceName logger = snapredLogger.getLogger(__name__) @@ -94,13 +95,16 @@ def prepFocusGroup(self, ingredients: FarmFreshIngredients) -> FocusGroup: groupingMap = self.dataFactoryService.getGroupingMap(ingredients.runNumber) return groupingMap.getMap(ingredients.useLiteMode)[ingredients.focusGroup.name] - def prepPixelGroup(self, ingredients: FarmFreshIngredients) -> PixelGroup: + def prepPixelGroup( + self, ingredients: FarmFreshIngredients, pixelMask: Optional[WorkspaceName] = None + ) -> PixelGroup: groupingSchema = ingredients.focusGroup.name key = ( ingredients.runNumber, ingredients.useLiteMode, groupingSchema, ingredients.calibrantSamplePath, + pixelMask, ) if key not in self._pixelGroupCache: focusGroup = self.prepFocusGroup(ingredients) @@ -112,7 +116,7 @@ def prepPixelGroup(self, ingredients: FarmFreshIngredients) -> PixelGroup: self.groceryClerk.name("groupingWorkspace").fromRun(ingredients.runNumber).grouping( focusGroup.name ).useLiteMode(ingredients.useLiteMode).add() - groceries = self.groceryService.fetchGroceryDict(self.groceryClerk.buildDict()) + groceries = self.groceryService.fetchGroceryDict(self.groceryClerk.buildDict(), maskWorkspace=pixelMask) data = PixelGroupingParametersCalculationRecipe().executeRecipe(pixelIngredients, groceries) self._pixelGroupCache[key] = PixelGroup( @@ -123,12 +127,14 @@ def prepPixelGroup(self, ingredients: FarmFreshIngredients) -> PixelGroup: ) return deepcopy(self._pixelGroupCache[key]) - def prepManyPixelGroups(self, ingredients: FarmFreshIngredients) -> List[PixelGroup]: + def prepManyPixelGroups( + self, ingredients: FarmFreshIngredients, pixelMask: Optional[WorkspaceName] = None + ) -> List[PixelGroup]: pixelGroups = [] ingredients_ = ingredients.model_copy() for focusGroup in ingredients.focusGroups: ingredients_.focusGroup = focusGroup - pixelGroups.append(self.prepPixelGroup(ingredients_)) + pixelGroups.append(self.prepPixelGroup(ingredients_, pixelMask)) return pixelGroups def _getInstrumentDefinitionFilename(self, useLiteMode: bool) -> str: @@ -243,7 +249,7 @@ def _pullManyCalibrationDetectorPeaks( def _pullNormalizationRecordFFI( self, ingredients: FarmFreshIngredients, - ) -> Tuple[FarmFreshIngredients, float]: + ) -> Tuple[FarmFreshIngredients, float, Optional[str]]: normalizationRecord = self.dataFactoryService.getNormalizationRecord( ingredients.runNumber, ingredients.useLiteMode, ingredients.versions.normalization ) @@ -255,7 +261,9 @@ def _pullNormalizationRecordFFI( # TODO: Should smoothing parameter be an ingredient? return ingredients, smoothingParameter, calibrantSamplePath - def prepReductionIngredients(self, ingredients: FarmFreshIngredients) -> ReductionIngredients: + def prepReductionIngredients( + self, ingredients: FarmFreshIngredients, combinedPixelMask: Optional[WorkspaceName] = None + ) -> ReductionIngredients: ingredients_ = ingredients.model_copy() # some of the reduction ingredients MUST match those used in the calibration/normalization processes ingredients_ = self._pullCalibrationRecordFFI(ingredients_) @@ -266,7 +274,8 @@ def prepReductionIngredients(self, ingredients: FarmFreshIngredients) -> Reducti runNumber=ingredients_.runNumber, useLiteMode=ingredients_.useLiteMode, timestamp=ingredients_.timestamp, - pixelGroups=self.prepManyPixelGroups(ingredients_), + pixelGroups=self.prepManyPixelGroups(ingredients_, combinedPixelMask), + unmaskedPixelGroups=self.prepManyPixelGroups(ingredients_), smoothingParameter=smoothingParameter, calibrantSamplePath=ingredients_.calibrantSamplePath, peakIntensityThreshold=self._getThresholdFromCalibrantSample(ingredients_.calibrantSamplePath), diff --git a/src/snapred/resources/application.yml b/src/snapred/resources/application.yml index 047fd81a6..f21dc6865 100644 --- a/src/snapred/resources/application.yml +++ b/src/snapred/resources/application.yml @@ -102,6 +102,12 @@ calibration: fitting: minSignal2Noise: 0.0 +reduction: + output: + extension: .nxs + # convert the instrument for the output workspaces into the reduced form + useEffectiveInstrument: false + mantid: workspace: nameTemplate: diff --git a/src/snapred/ui/workflow/DiffCalWorkflow.py b/src/snapred/ui/workflow/DiffCalWorkflow.py index dbcacea81..b406c8c3b 100644 --- a/src/snapred/ui/workflow/DiffCalWorkflow.py +++ b/src/snapred/ui/workflow/DiffCalWorkflow.py @@ -408,7 +408,7 @@ def purgeBadPeaks(self, maxChiSq): self._tweakPeakView, "Too Few Peaks", "Purging would result in fewer than the required 2 peaks for calibration. " - "The current set of peaks will be retained.", + + "The current set of peaks will be retained.", QMessageBox.Ok, ) else: diff --git a/tests/cis_tests/effective_instrument_script.py b/tests/cis_tests/effective_instrument_script.py new file mode 100644 index 000000000..f858839d6 --- /dev/null +++ b/tests/cis_tests/effective_instrument_script.py @@ -0,0 +1,119 @@ +from datetime import datetime +from functools import partial +import math +import numpy as np +from pathlib import Path +import re +import sys + +from mantid.simpleapi import mtd + +import snapred +SNAPRed_module_root = Path(snapred.__file__).parent.parent + +from snapred.backend.dao.request.FarmFreshIngredients import FarmFreshIngredients +from snapred.backend.dao.request.ReductionRequest import ReductionRequest +from snapred.backend.data.DataFactoryService import DataFactoryService +from snapred.backend.service.SousChef import SousChef +from snapred.meta.mantid.WorkspaceNameGenerator import WorkspaceNameGenerator as wng +from snapred.meta.Config import Config + +# ----------------------------- +# Test helper utility routines: +sys.path.insert(0, str(Path(SNAPRed_module_root).parent / 'tests')) +from util.IPTS_override import IPTS_override +# from util.helpers import timestampFromString + +def timestampFromString(timestamp_str) -> float: + # Recover a float timestamp from a non-isoformat timestamp string + regx = re.compile(r"([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2})([0-9]{2})([0-9]{2})") + Y, M, D, H, m, s = tuple([int(s) for s in regx.match(timestamp_str).group(1, 2, 3, 4, 5, 6)]) + return datetime(Y, M, D, H, m, s).timestamp() + +########################################################### +# If necessary, override the IPTS search directories: ## +# remember to set your "IPTS.root" in "application.yml!## +########################################################### +with IPTS_override(): # defaults to `Config["IPTS.root"]` + + ####################################################################################################################### + # Step 1: Generate a set of reduction data. Take a look under the output folder and see what its timestamp string is.# + ####################################################################################################################### + + runNumber = "46680" + useLiteMode = True + timestamp_str = "2024-11-15T133125" # Unfortunately, not in iso format + timestamp = timestampFromString(timestamp_str) + + ################################################################################### + # Step 2: Reload the reduction record, and all of the reduction output workspaces.# + ################################################################################### + dataService = DataFactoryService() + sousChef = SousChef() + + reductionRecord = dataService.getReductionData(runNumber, useLiteMode, timestamp) + + ######################################################################################################### + # Step 3: Load the required grouping workspaces, and compute their _unmasked_ pixel-grouping parameters.# + # (Note here that the `ReductionRecord` itself retains only the _masked_ PGP.) # + ######################################################################################################### + + # ... this duplicates the setup part of the reduction process ... + groupingMap = dataService.getGroupingMap(runNumber).getMap(useLiteMode) + request = ReductionRequest( + runNumber=runNumber, + useLiteMode=useLiteMode, + timestamp=timestamp, + focusGroups = list(groupingMap.values()), + keepUnfocused=False, + convertUnitsTo="TOF" + ) + farmFresh = FarmFreshIngredients( + runNumber=request.runNumber, + useLiteMode=request.useLiteMode, + timestamp=request.timestamp, + focusGroups=request.focusGroups, + keepUnfocused=request.keepUnfocused, + convertUnitsTo=request.convertUnitsTo, + versions=request.versions, + ) + ingredients = sousChef.prepReductionIngredients(farmFresh) + # ... now the required PGP are available in `ingredients.unmaskedPixelGroups: List[PixelGroup]` ... + unmaskedPixelGroups = {pg.focusGroup.name: pg for pg in ingredients.unmaskedPixelGroups} + + ################################################################################################################## + # Step 4: For the output workspace corresponding to each grouping, verify that the effective instrument consists # + # of one pixel per group-id, with its location matching the _unmasked_ PGP for that grouping and group-id. # + ################################################################################################################## + + # For each grouping, verify that the output workspace's effective instrument has been set up as expected. + for grouping in unmaskedPixelGroups: + # We need to rebuild the workspace name, because the `WorkspaceName` of the loaded `ReductionRecord` will only retain its string component. + reducedOutputWs = wng.reductionOutput().runNumber(runNumber).group(grouping).timestamp(timestamp).build() + assert reducedOutputWs in reductionRecord.workspaceNames + assert mtd.doesExist(reducedOutputWs) + + outputWs = mtd[reducedOutputWs] + + effectiveInstrument = outputWs.getInstrument() + + # verify the new instrument name + assert effectiveInstrument.getName() == f"SNAP_{grouping}" + + # there should be one pixel per output spectrum + assert effectiveInstrument.getNumberDetectors(True) == outputWs.getNumberHistograms() + + detectorInfo = outputWs.detectorInfo() + pixelGroup = unmaskedPixelGroups[grouping] + + isclose = partial(math.isclose, rel_tol=10.0 * np.finfo(float).eps, abs_tol=10.0 * np.finfo(float).eps) + for n, gid in enumerate(pixelGroup.groupIDs): + # Spectra are in the same index order as the group IDs: + assert isclose(pixelGroup.L2[n], detectorInfo.l2(n)) + assert isclose(pixelGroup.twoTheta[n], detectorInfo.twoTheta(n)) + assert isclose(pixelGroup.azimuth[n], detectorInfo.azimuthal(n)) + + print("*************************************************************") + print("*** Test of effective instrument substitution successful! ***") + print("*************************************************************") + diff --git a/tests/resources/application.yml b/tests/resources/application.yml index 7dbf314fc..3229e6c2c 100644 --- a/tests/resources/application.yml +++ b/tests/resources/application.yml @@ -103,6 +103,11 @@ calibration: fitting: minSignal2Noise: 10 +reduction: + output: + extension: .nxs + # convert the instrument for the output workspaces into the reduced form + useEffectiveInstrument: false mantid: workspace: diff --git a/tests/resources/inputs/calibration/ReductionIngredients.json b/tests/resources/inputs/calibration/ReductionIngredients.json index ffabf2717..5439e6827 100644 --- a/tests/resources/inputs/calibration/ReductionIngredients.json +++ b/tests/resources/inputs/calibration/ReductionIngredients.json @@ -2,6 +2,7 @@ "runNumber": "57514", "useLiteMode": true, "timestamp": 1722893493.2375631, + "pixelGroups": [ { "pixelGroupingParameters": { @@ -200,6 +201,207 @@ "binningMode": -1 } ], + + "unmaskedPixelGroups": [ + { + "pixelGroupingParameters": { + "1": { + "groupID": 1, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.4888167719149363, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.35983386414596874, + "maximum": 4.355830260153734 + }, + "dRelativeResolution": 0.009342020503289764 + } + }, + "nBinsAcrossPeakWidth": 10, + "focusGroup": { + "name": "All", + "definition": "/SNS/users/wqp/SNAP/shared/Calibration/Powder/PixelGroupingDefinitions/SNAPFocGroup_All.lite.hdf" + }, + "timeOfFlight": { + "minimum": 2546.742533573784, + "binWidth": 0.0009342020503289763, + "maximum": 12000, + "binningMode": -1 + }, + "binningMode": -1 + }, + { + "pixelGroupingParameters": { + "1": { + "groupID": 1, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.8230747956560218, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.35983386414596874, + "maximum": 2.3643065241537378 + }, + "dRelativeResolution": 0.006510460909679324 + }, + "2": { + "groupID": 2, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.1545587481738246, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.46173472918300634, + "maximum": 4.355830260153734 + }, + "dRelativeResolution": 0.01149611207805968 + } + }, + "nBinsAcrossPeakWidth": 10, + "focusGroup": { + "name": "Bank", + "definition": "/SNS/users/wqp/SNAP/shared/Calibration/Powder/PixelGroupingDefinitions/SNAPFocGroup_Bank.lite.hdf" + }, + "timeOfFlight": { + "minimum": 2546.742533573784, + "binWidth": 0.0006510460909679324, + "maximum": 12000, + "binningMode": -1 + }, + "binningMode": -1 + }, + { + "pixelGroupingParameters": { + "1": { + "groupID": 1, + "isMasked": false, + "L2": 10.0, + "twoTheta": 2.1108948177427838, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.35983386414596874, + "maximum": 1.8446004591300944 + }, + "dRelativeResolution": 0.005285822142225365 + }, + "2": { + "groupID": 2, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.82310673131693, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.3897805853492433, + "maximum": 2.0555564929623205 + }, + "dRelativeResolution": 0.006281306679209721 + }, + "3": { + "groupID": 3, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.5352228379083572, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.43925160115545075, + "maximum": 2.3643065241537378 + }, + "dRelativeResolution": 0.007730690425302433 + }, + "4": { + "groupID": 4, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.440276389170165, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.46173472918300634, + "maximum": 2.5221224737819017 + }, + "dRelativeResolution": 0.008321708397955027 + }, + "5": { + "groupID": 5, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.1543988461042238, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.5352414154696261, + "maximum": 3.194134388808033 + }, + "dRelativeResolution": 0.01064966864886483 + }, + "6": { + "groupID": 6, + "isMasked": false, + "L2": 10.0, + "twoTheta": 0.8690010092470938, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.6591133268028296, + "maximum": 4.355830260153734 + }, + "dRelativeResolution": 0.014622431594735495 + } + }, + "nBinsAcrossPeakWidth": 10, + "focusGroup": { + "name": "Column", + "definition": "/SNS/users/wqp/SNAP/shared/Calibration/Powder/PixelGroupingDefinitions/SNAPFocGroup_Column.lite.hdf" + }, + "timeOfFlight": { + "minimum": 2546.742533573784, + "binWidth": 0.0005285822142225365, + "maximum": 12000, + "binningMode": -1 + }, + "binningMode": -1 + }, + { + "pixelGroupingParameters": { + "1": { + "groupID": 1, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.7273751940345703, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.35983386414596874, + "maximum": 2.5221224737819017 + }, + "dRelativeResolution": 0.007007302163279077 + }, + "2": { + "groupID": 2, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.011699927675655, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.5352414154696261, + "maximum": 4.355830260153734 + }, + "dRelativeResolution": 0.012791226447712764 + } + }, + "nBinsAcrossPeakWidth": 10, + "focusGroup": { + "name": "2_4", + "definition": "/SNS/users/wqp/SNAP/shared/Calibration/Powder/PixelGroupingDefinitions/SNAPFocGroup_2_4.lite.hdf" + }, + "timeOfFlight": { + "minimum": 2546.742533573784, + "binWidth": 0.0007007302163279077, + "maximum": 12000, + "binningMode": -1 + }, + "binningMode": -1 + } + ], + + "detectorPeaksMany": [ [ { diff --git a/tests/unit/backend/data/test_DataFactoryService.py b/tests/unit/backend/data/test_DataFactoryService.py index 4104325cc..e87bddd03 100644 --- a/tests/unit/backend/data/test_DataFactoryService.py +++ b/tests/unit/backend/data/test_DataFactoryService.py @@ -1,4 +1,5 @@ import hashlib +import time import unittest import unittest.mock as mock from pathlib import Path @@ -80,6 +81,7 @@ def setUpClass(cls): def setUp(self): self.version = randint(2, 120) + self.timestamp = time.time() self.instance = DataFactoryService() self.instance.lookupService = self.mockLookupService assert isinstance(self.instance, DataFactoryService) @@ -274,18 +276,18 @@ def test_getCompatibleReductionMasks(self): def test_getReductionDataPath(self): for useLiteMode in [True, False]: - actual = self.instance.getReductionDataPath("12345", useLiteMode, self.version) - assert actual == self.expected("12345", useLiteMode, self.version) + actual = self.instance.getReductionDataPath("12345", useLiteMode, self.timestamp) + assert actual == self.expected("12345", useLiteMode, self.timestamp) def test_getReductionRecord(self): for useLiteMode in [True, False]: - actual = self.instance.getReductionRecord("12345", useLiteMode, self.version) - assert actual == self.expected("12345", useLiteMode, self.version) + actual = self.instance.getReductionRecord("12345", useLiteMode, self.timestamp) + assert actual == self.expected("12345", useLiteMode, self.timestamp) def test_getReductionData(self): for useLiteMode in [True, False]: - actual = self.instance.getReductionData("12345", useLiteMode, self.version) - assert actual == self.expected("12345", useLiteMode, self.version) + actual = self.instance.getReductionData("12345", useLiteMode, self.timestamp) + assert actual == self.expected("12345", useLiteMode, self.timestamp) ##### TEST WORKSPACE METHODS #### diff --git a/tests/unit/backend/data/test_LocalDataService.py b/tests/unit/backend/data/test_LocalDataService.py index 2351facae..a54a86ea7 100644 --- a/tests/unit/backend/data/test_LocalDataService.py +++ b/tests/unit/backend/data/test_LocalDataService.py @@ -15,6 +15,7 @@ from typing import List, Literal, Set import h5py +import numpy as np import pydantic import pytest from mantid.api import ITableWorkspace, MatrixWorkspace @@ -25,8 +26,8 @@ CompareWorkspaces, CreateGroupingWorkspace, CreateSampleWorkspace, - DeleteWorkspace, DeleteWorkspaces, + EditInstrumentGeometry, GroupWorkspaces, LoadEmptyInstrument, LoadInstrument, @@ -532,6 +533,14 @@ def test_stateExists(): assert localDataService.stateExists("12345") +def test_stateExists_not(): + # Test that the 'stateExists' method returns False when the state doesn't exist. + localDataService = LocalDataService() + localDataService.constructCalibrationStateRoot = mock.Mock(return_value=Path("a/non-existent/path")) + localDataService.generateStateId = mock.Mock(return_value=(ENDURING_STATE_ID, None)) + assert not localDataService.stateExists("12345") + + @mock.patch(ThisService + "GetIPTS") def test_calibrationFileExists(GetIPTS): # noqa ARG002 localDataService = LocalDataService() @@ -1567,8 +1576,8 @@ def _createWorkspaces(wss: List[WorkspaceName]): OutputWorkspace=src, Function="One Peak", NumBanks=1, - NumMonitors=1, - BankPixelWidth=5, + NumMonitors=0, + BankPixelWidth=4, NumEvents=500, Random=True, XUnit="DSP", @@ -1581,18 +1590,49 @@ def _createWorkspaces(wss: List[WorkspaceName]): Filename=fakeInstrumentFilePath, RewriteSpectraMap=True, ) + + # Mask workspace uses legacy instrument + mask = mtd.unique_hidden_name() + createCompatibleMask(mask, src) + + if Config["reduction.output.useEffectiveInstrument"]: + # Convert the source workspace's instrument to the reduced form: + # * no monitors; + # * only one bank of detectors; + # * no parameter map. + + detectorInfo = mtd[src].detectorInfo() + l2s, twoThetas, azimuths = [], [], [] + for n in range(detectorInfo.size()): + if detectorInfo.isMonitor(n): + continue + + l2 = detectorInfo.l2(n) + twoTheta = detectorInfo.twoTheta(n) + + # See: defect EWM#7384 + try: + azimuth = detectorInfo.azimuthal(n) + except RuntimeError as e: + if not str(e).startswith("Failed to create up axis"): + raise + azimuth = 0.0 + l2s.append(l2) + twoThetas.append(twoTheta) + azimuths.append(azimuth) + + EditInstrumentGeometry(Workspace=src, L2=l2s, Polar=np.rad2deg(twoThetas), Azimuthal=np.rad2deg(azimuths)) assert mtd.doesExist(src) + for ws in wss: - wsType = ws.tokens("workspaceType") - match wsType: - case wngt.REDUCTION_PIXEL_MASK: - createCompatibleMask(ws, src) - case _: - CloneWorkspace(OutputWorkspace=ws, InputWorkspace=src) + CloneWorkspace( + OutputWorkspace=ws, + InputWorkspace=src if ws.tokens("workspaceType") != wngt.REDUCTION_PIXEL_MASK else mask, + ) assert mtd.doesExist(ws) cleanup_workspace_at_exit(ws) - DeleteWorkspace(Workspace=src) + DeleteWorkspaces([src, mask]) return wss yield _createWorkspaces @@ -1630,6 +1670,70 @@ def test_writeReductionData(readSyntheticReductionRecord, createReductionWorkspa assert reductionFilePath.exists() +def test_writeReductionData_legacy_instrument(readSyntheticReductionRecord, createReductionWorkspaces): + # Test that the special `Config` setting allows the saving of workspaces with non-reduced instruments + + # In order to facilitate parallel testing: any workspace name used by this test should be unique. + inputRecordFilePath = Path(Resource.getPath("inputs/reduction/ReductionRecord_20240614T130420.json")) + _uniqueTimestamp = 1731518208.172797 + testRecord = readSyntheticReductionRecord(inputRecordFilePath, _uniqueTimestamp) + + # Temporarily use a single run number + runNumber, useLiteMode, timestamp = testRecord.runNumber, testRecord.useLiteMode, testRecord.timestamp + stateId = ENDURING_STATE_ID + fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() + fileName += Config["nexus.file.extension"] + + with Config_override("reduction.output.useEffectiveInstrument", False): + wss = createReductionWorkspaces(testRecord.workspaceNames) # noqa: F841 + localDataService = LocalDataService() + with reduction_root_redirect(localDataService, stateId=stateId): + localDataService.instrumentConfig = mock.Mock() + localDataService.getIPTS = mock.Mock(return_value="IPTS-12345") + + # Important to this test: use a path that doesn't already exist + reductionFilePath = localDataService._constructReductionRecordFilePath(runNumber, useLiteMode, timestamp) + assert not reductionFilePath.exists() + + # `writeReductionRecord` must be called first + localDataService.writeReductionRecord(testRecord) + localDataService.writeReductionData(testRecord) + + assert reductionFilePath.exists() + + +def test_writeReductionData_effective_instrument(readSyntheticReductionRecord, createReductionWorkspaces): + # Test that the special `Config` setting allows the saving of workspaces with effective instruments + + # In order to facilitate parallel testing: any workspace name used by this test should be unique. + inputRecordFilePath = Path(Resource.getPath("inputs/reduction/ReductionRecord_20240614T130420.json")) + _uniqueTimestamp = 1733189687.0684218 + testRecord = readSyntheticReductionRecord(inputRecordFilePath, _uniqueTimestamp) + + # Temporarily use a single run number + runNumber, useLiteMode, timestamp = testRecord.runNumber, testRecord.useLiteMode, testRecord.timestamp + stateId = ENDURING_STATE_ID + fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() + fileName += Config["nexus.file.extension"] + + with Config_override("reduction.output.useEffectiveInstrument", True): + wss = createReductionWorkspaces(testRecord.workspaceNames) # noqa: F841 + localDataService = LocalDataService() + with reduction_root_redirect(localDataService, stateId=stateId): + localDataService.instrumentConfig = mock.Mock() + localDataService.getIPTS = mock.Mock(return_value="IPTS-12345") + + # Important to this test: use a path that doesn't already exist + reductionFilePath = localDataService._constructReductionRecordFilePath(runNumber, useLiteMode, timestamp) + assert not reductionFilePath.exists() + + # `writeReductionRecord` must be called first + localDataService.writeReductionRecord(testRecord) + localDataService.writeReductionData(testRecord) + + assert reductionFilePath.exists() + + def test_writeReductionData_no_directories(readSyntheticReductionRecord, createReductionWorkspaces): # noqa: ARG001 # In order to facilitate parallel testing: any workspace name used by this test should be unique. inputRecordFilePath = Path(Resource.getPath("inputs/reduction/ReductionRecord_20240614T130420.json")) @@ -1668,7 +1772,7 @@ def test_writeReductionData_metadata(readSyntheticReductionRecord, createReducti runNumber, useLiteMode, timestamp = testRecord.runNumber, testRecord.useLiteMode, testRecord.timestamp stateId = ENDURING_STATE_ID fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() - fileName += Config["nexus.file.extension"] + fileName += Config["reduction.output.extension"] wss = createReductionWorkspaces(testRecord.workspaceNames) # noqa: F841 localDataService = LocalDataService() @@ -1702,7 +1806,7 @@ def test_readWriteReductionData(readSyntheticReductionRecord, createReductionWor runNumber, useLiteMode, timestamp = testRecord.runNumber, testRecord.useLiteMode, testRecord.timestamp stateId = ENDURING_STATE_ID fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() - fileName += Config["nexus.file.extension"] + fileName += Config["reduction.output.extension"] wss = createReductionWorkspaces(testRecord.workspaceNames) # noqa: F841 localDataService = LocalDataService() @@ -1737,13 +1841,64 @@ def test_readWriteReductionData(readSyntheticReductionRecord, createReductionWor # please do _not_ replace this with one of the `assert_almost_equal` methods: # -- they do not necessarily do what you think they should do... for ws in actualRecord.workspaceNames: - equal, _ = CompareWorkspaces( - Workspace1=ws, - Workspace2=_uniquePrefix + ws, - ) + equal, _ = CompareWorkspaces(Workspace1=ws, Workspace2=_uniquePrefix + ws, CheckAllData=True) assert equal +def test_readWriteReductionData_legacy_instrument( + readSyntheticReductionRecord, createReductionWorkspaces, cleanup_workspace_at_exit +): + # In order to facilitate parallel testing: any workspace name used by this test should be unique. + _uniquePrefix = "_test_RWRD_" + inputRecordFilePath = Path(Resource.getPath("inputs/reduction/ReductionRecord_20240614T130420.json")) + _uniqueTimestamp = 1731519071.6706867 + testRecord = readSyntheticReductionRecord(inputRecordFilePath, _uniqueTimestamp) + + runNumber, useLiteMode, timestamp = testRecord.runNumber, testRecord.useLiteMode, testRecord.timestamp + stateId = ENDURING_STATE_ID + fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() + fileName += Config["reduction.output.extension"] + + with Config_override("reduction.output.useEffectiveInstrument", False): + wss = createReductionWorkspaces(testRecord.workspaceNames) # noqa: F841 + localDataService = LocalDataService() + with reduction_root_redirect(localDataService, stateId=stateId): + localDataService.instrumentConfig = mock.Mock() + localDataService.getIPTS = mock.Mock(return_value="IPTS-12345") + + # Important to this test: use a path that doesn't already exist + reductionRecordFilePath = localDataService._constructReductionRecordFilePath( + runNumber, useLiteMode, timestamp + ) + assert not reductionRecordFilePath.exists() + + # `writeReductionRecord` needs to be called first + localDataService.writeReductionRecord(testRecord) + localDataService.writeReductionData(testRecord) + + filePath = reductionRecordFilePath.parent / fileName + assert filePath.exists() + + # move the existing test workspaces out of the way: + # * this just adds the `_uniquePrefix` one more time. + RenameWorkspaces(InputWorkspaces=wss, Prefix=_uniquePrefix) + # append to the cleanup list + for ws in wss: + cleanup_workspace_at_exit(_uniquePrefix + ws) + + actualRecord = localDataService.readReductionData(runNumber, useLiteMode, timestamp) + assert actualRecord == testRecord + + # workspaces should have been reloaded with their original names + # Implementation note: + # * the workspaces must match _exactly_ here, so `CompareWorkspaces` must be used; + # please do _not_ replace this with one of the `assert_almost_equal` methods: + # -- they do not necessarily do what you think they should do... + for ws in actualRecord.workspaceNames: + equal, _ = CompareWorkspaces(Workspace1=ws, Workspace2=_uniquePrefix + ws, CheckAllData=True) + assert equal + + def test_readWriteReductionData_pixel_mask( readSyntheticReductionRecord, createReductionWorkspaces, cleanup_workspace_at_exit ): @@ -1756,7 +1911,7 @@ def test_readWriteReductionData_pixel_mask( runNumber, useLiteMode, timestamp = testRecord.runNumber, testRecord.useLiteMode, testRecord.timestamp stateId = ENDURING_STATE_ID fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() - fileName += Config["nexus.file.extension"] + fileName += Config["reduction.output.extension"] wss = createReductionWorkspaces(testRecord.workspaceNames) # noqa: F841 localDataService = LocalDataService() with reduction_root_redirect(localDataService, stateId=stateId): @@ -1810,7 +1965,7 @@ def test__constructReductionDataFilePath(): stateId = ENDURING_STATE_ID testIPTS = "IPTS-12345" fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() - fileName += Config["nexus.file.extension"] + fileName += Config["reduction.output.extension"] expectedFilePath = ( Path(Config["instrument.reduction.home"].format(IPTS=testIPTS)) diff --git a/tests/unit/backend/recipe/test_EffectiveInstrumentRecipe.py b/tests/unit/backend/recipe/test_EffectiveInstrumentRecipe.py new file mode 100644 index 000000000..79b46e8f0 --- /dev/null +++ b/tests/unit/backend/recipe/test_EffectiveInstrumentRecipe.py @@ -0,0 +1,190 @@ +from unittest import mock + +import numpy as np +import pytest +from util.SculleryBoy import SculleryBoy + +from snapred.backend.dao.ingredients import EffectiveInstrumentIngredients as Ingredients +from snapred.backend.dao.state.FocusGroup import FocusGroup +from snapred.backend.dao.state.PixelGroup import PixelGroup +from snapred.backend.error.AlgorithmException import AlgorithmException +from snapred.backend.recipe.algorithm.Utensils import Utensils +from snapred.backend.recipe.EffectiveInstrumentRecipe import EffectiveInstrumentRecipe +from snapred.meta.Config import Resource + + +class TestEffectiveInstrumentRecipe: + fakeInstrumentFilePath = Resource.getPath("inputs/testInstrument/fakeSNAP_Definition.xml") + sculleryBoy = SculleryBoy() + + @pytest.fixture(autouse=True) + def _setup(self): + self.ingredients = mock.Mock( + spec=Ingredients, + unmaskedPixelGroup=mock.Mock( + spec=PixelGroup, + L2=mock.Mock(), + twoTheta=mock.Mock(), + azimuth=mock.Mock(), + focusGroup=FocusGroup(name="a_grouping", definition="a/grouping/path"), + ), + ) + self.ingredients1 = mock.Mock( + spec=Ingredients, + unmaskedPixelGroup=mock.Mock( + spec=PixelGroup, + L2=mock.Mock(), + twoTheta=mock.Mock(), + azimuth=mock.Mock(), + focusGroup=FocusGroup(name="another_grouping", definition="another/grouping/path"), + ), + ) + self.ingredientss = [self.ingredients, self.ingredients1] + + yield + + # teardown follows ... + pass + + def test_chopIngredients(self): + recipe = EffectiveInstrumentRecipe() + ingredients = self.ingredients + recipe.chopIngredients(ingredients) + assert recipe.unmaskedPixelGroup == ingredients.unmaskedPixelGroup + + def test_unbagGroceries(self): + recipe = EffectiveInstrumentRecipe() + groceries = {"inputWorkspace": mock.Mock(), "outputWorkspace": mock.Mock()} + recipe.unbagGroceries(groceries) + assert recipe.inputWS == groceries["inputWorkspace"] + assert recipe.outputWS == groceries["outputWorkspace"] + + def test_unbagGroceries_output_default(self): + recipe = EffectiveInstrumentRecipe() + groceries = {"inputWorkspace": mock.Mock()} + recipe.unbagGroceries(groceries) + assert recipe.inputWS == groceries["inputWorkspace"] + assert recipe.outputWS == groceries["inputWorkspace"] + + def test_queueAlgos(self): + recipe = EffectiveInstrumentRecipe() + ingredients = self.ingredients + groceries = {"inputWorkspace": mock.Mock(), "outputWorkspace": mock.Mock()} + recipe.prep(ingredients, groceries) + recipe.queueAlgos() + + queuedAlgos = recipe.mantidSnapper._algorithmQueue + + cloneWorkspaceTuple = queuedAlgos[0] + assert cloneWorkspaceTuple[0] == "CloneWorkspace" + assert cloneWorkspaceTuple[2]["InputWorkspace"] == groceries["inputWorkspace"] + assert cloneWorkspaceTuple[2]["OutputWorkspace"] == groceries["outputWorkspace"] + + editInstrumentGeometryTuple = queuedAlgos[1] + assert editInstrumentGeometryTuple[0] == "EditInstrumentGeometry" + assert editInstrumentGeometryTuple[2]["Workspace"] == groceries["outputWorkspace"] + + def test_queueAlgos_default(self): + recipe = EffectiveInstrumentRecipe() + ingredients = self.ingredients + groceries = {"inputWorkspace": mock.Mock()} + recipe.prep(ingredients, groceries) + recipe.queueAlgos() + + queuedAlgos = recipe.mantidSnapper._algorithmQueue + + editInstrumentGeometryTuple = queuedAlgos[0] + assert editInstrumentGeometryTuple[0] == "EditInstrumentGeometry" + assert editInstrumentGeometryTuple[2]["Workspace"] == groceries["inputWorkspace"] + + def test_cook(self): + utensils = Utensils() + mockSnapper = mock.Mock() + utensils.mantidSnapper = mockSnapper + recipe = EffectiveInstrumentRecipe(utensils=utensils) + ingredients = self.ingredients + groceries = {"inputWorkspace": mock.Mock(), "outputWorkspace": mock.Mock()} + + output = recipe.cook(ingredients, groceries) + + assert output == groceries["outputWorkspace"] + + assert mockSnapper.executeQueue.called + mockSnapper.CloneWorkspace.assert_called_once_with( + "Clone workspace for reduced instrument", + OutputWorkspace=groceries["outputWorkspace"], + InputWorkspace=groceries["inputWorkspace"], + ) + mockSnapper.EditInstrumentGeometry.assert_called_once_with( + f"Editing instrument geometry for grouping '{ingredients.unmaskedPixelGroup.focusGroup.name}'", + Workspace=groceries["outputWorkspace"], + L2=ingredients.unmaskedPixelGroup.L2, + Polar=np.rad2deg(ingredients.unmaskedPixelGroup.twoTheta), + Azimuthal=np.rad2deg(ingredients.unmaskedPixelGroup.azimuth), + InstrumentName=f"SNAP_{ingredients.unmaskedPixelGroup.focusGroup.name}", + ) + + def test_cook_default(self): + utensils = Utensils() + mockSnapper = mock.Mock() + utensils.mantidSnapper = mockSnapper + recipe = EffectiveInstrumentRecipe(utensils=utensils) + ingredients = self.ingredients + groceries = {"inputWorkspace": mock.Mock()} + + output = recipe.cook(ingredients, groceries) + + assert output == groceries["inputWorkspace"] + + assert mockSnapper.executeQueue.called + mockSnapper.CloneWorkspace.assert_not_called() + mockSnapper.EditInstrumentGeometry.assert_called_once_with( + f"Editing instrument geometry for grouping '{ingredients.unmaskedPixelGroup.focusGroup.name}'", + Workspace=groceries["inputWorkspace"], + L2=ingredients.unmaskedPixelGroup.L2, + Polar=np.rad2deg(ingredients.unmaskedPixelGroup.twoTheta), + Azimuthal=np.rad2deg(ingredients.unmaskedPixelGroup.azimuth), + InstrumentName=f"SNAP_{ingredients.unmaskedPixelGroup.focusGroup.name}", + ) + + def test_cook_fail(self): + # Test that `AlgorithmException` is routed to `RuntimeError`. + utensils = Utensils() + mockSnapper = mock.Mock() + mockSnapper.executeQueue = mock.Mock(side_effect=AlgorithmException("EditInstrumentGeometry")) + utensils.mantidSnapper = mockSnapper + recipe = EffectiveInstrumentRecipe(utensils=utensils) + ingredients = self.ingredients + groceries = {"inputWorkspace": mock.Mock()} + + with pytest.raises(RuntimeError, match=r".*EditInstrumentGeometry.*"): + recipe.cook(ingredients, groceries) + + def test_cater(self): + untensils = Utensils() + mockSnapper = mock.Mock() + untensils.mantidSnapper = mockSnapper + recipe = EffectiveInstrumentRecipe(utensils=untensils) + ingredientss = self.ingredientss + + groceriess = [{"inputWorkspace": mock.Mock()}, {"inputWorkspace": mock.Mock()}] + + recipe.cater(zip(ingredientss, groceriess)) + + assert mockSnapper.EditInstrumentGeometry.call_count == 2 + mockSnapper.EditInstrumentGeometry.assert_any_call( + f"Editing instrument geometry for grouping '{ingredientss[0].unmaskedPixelGroup.focusGroup.name}'", + Workspace=groceriess[0]["inputWorkspace"], + L2=ingredientss[0].unmaskedPixelGroup.L2, + Polar=np.rad2deg(ingredientss[0].unmaskedPixelGroup.twoTheta), + Azimuthal=np.rad2deg(ingredientss[0].unmaskedPixelGroup.azimuth), + InstrumentName=f"SNAP_{ingredientss[0].unmaskedPixelGroup.focusGroup.name}", + ) + mockSnapper.EditInstrumentGeometry.assert_any_call( + f"Editing instrument geometry for grouping '{ingredientss[1].unmaskedPixelGroup.focusGroup.name}'", + Workspace=groceriess[1]["inputWorkspace"], + L2=ingredientss[1].unmaskedPixelGroup.L2, + Polar=np.rad2deg(ingredientss[1].unmaskedPixelGroup.twoTheta), + Azimuthal=np.rad2deg(ingredientss[1].unmaskedPixelGroup.azimuth), + InstrumentName=f"SNAP_{ingredientss[1].unmaskedPixelGroup.focusGroup.name}", + ) diff --git a/tests/unit/backend/recipe/test_PreprocessReductionRecipe.py b/tests/unit/backend/recipe/test_PreprocessReductionRecipe.py index d5866b4d6..ddd9e4e7f 100644 --- a/tests/unit/backend/recipe/test_PreprocessReductionRecipe.py +++ b/tests/unit/backend/recipe/test_PreprocessReductionRecipe.py @@ -9,8 +9,9 @@ from util.helpers import createCompatibleMask from util.SculleryBoy import SculleryBoy +from snapred.backend.dao.ingredients import PreprocessReductionIngredients as Ingredients from snapred.backend.recipe.algorithm.Utensils import Utensils -from snapred.backend.recipe.PreprocessReductionRecipe import Ingredients, PreprocessReductionRecipe +from snapred.backend.recipe.PreprocessReductionRecipe import PreprocessReductionRecipe from snapred.meta.Config import Resource @@ -19,9 +20,9 @@ class PreprocessReductionRecipeTest(unittest.TestCase): sculleryBoy = SculleryBoy() def _make_groceries(self): - sampleWS = mtd.unique_name(prefix="test_applynorm") - calibWS = mtd.unique_name(prefix="test_applynorm") - maskWS = mtd.unique_name(prefix="test_applynorm") + sampleWS = mtd.unique_name(prefix="test_preprocess_reduction") + calibWS = mtd.unique_name(prefix="test_preprocess_reduction") + maskWS = mtd.unique_name(prefix="test_preprocess_reduction") # Create sample workspace: # * warning: `createCompatibleMask` does not work correctly with @@ -98,10 +99,10 @@ def test_queueAlgos(self): assert applyDiffCalTuple[2]["CalibrationWorkspace"] == groceries["diffcalWorkspace"] def test_cook(self): - untensils = Utensils() + utensils = Utensils() mockSnapper = unittest.mock.Mock() - untensils.mantidSnapper = mockSnapper - recipe = PreprocessReductionRecipe(utensils=untensils) + utensils.mantidSnapper = mockSnapper + recipe = PreprocessReductionRecipe(utensils=utensils) ingredients = Ingredients() groceries = self._make_groceries() del groceries["maskWorkspace"] diff --git a/tests/unit/backend/recipe/test_ReductionRecipe.py b/tests/unit/backend/recipe/test_ReductionRecipe.py index 6a5b18a89..65ce78d4f 100644 --- a/tests/unit/backend/recipe/test_ReductionRecipe.py +++ b/tests/unit/backend/recipe/test_ReductionRecipe.py @@ -3,11 +3,13 @@ import pytest from mantid.simpleapi import CreateSingleValuedWorkspace, mtd +from util.Config_helpers import Config_override from util.SculleryBoy import SculleryBoy from snapred.backend.dao.ingredients import ReductionIngredients from snapred.backend.recipe.ReductionRecipe import ( ApplyNormalizationRecipe, + EffectiveInstrumentRecipe, GenerateFocussedVanadiumRecipe, PreprocessReductionRecipe, ReductionGroupProcessingRecipe, @@ -398,6 +400,9 @@ def test_execute(self, mockMtd): recipe.ingredients.applyNormalization = mock.Mock( return_value=lambda groupingIndex: f"applyNormalization_{groupingIndex}" ) + recipe.ingredients.effectiveInstrument = mock.Mock( + return_value=lambda groupingIndex: f"unmaskedPixelGroup_{groupingIndex}" + ) # Mock internal methods of recipe recipe._applyRecipe = mock.Mock() @@ -463,18 +468,138 @@ def test_execute(self, mockMtd): normalizationWorkspace="norm_grouped", ) - artNormCalls = recipe._prepareArtificialNormalization.call_args_list - anCall1 = artNormCalls[0] - anCall2 = artNormCalls[1] - assert anCall1[0][0] == "sample_grouped" - assert anCall2[0][0] == "sample_grouped" - assert anCall1[0][1] == 0 - assert anCall2[0][1] == 1 + recipe._prepareArtificialNormalization.call_count == 2 + recipe._prepareArtificialNormalization.assert_any_call("sample_grouped", 0) + recipe._prepareArtificialNormalization.assert_any_call("sample_grouped", 1) + + recipe.ingredients.effectiveInstrument.assert_not_called() recipe._deleteWorkspace.assert_called_with("norm_grouped") assert recipe._deleteWorkspace.call_count == len(recipe._prepGroupingWorkspaces.return_value) assert result["outputs"][0] == "sample_grouped" + @mock.patch("mantid.simpleapi.mtd", create=True) + def test_execute_useEffectiveInstrument(self, mockMtd): + with Config_override("reduction.output.useEffectiveInstrument", True): + mockMantidSnapper = mock.Mock() + + mockMaskworkspace = mock.Mock() + mockGroupWorkspace = mock.Mock() + + mockGroupWorkspace.getNumberHistograms.return_value = 10 + mockGroupWorkspace.readY.return_value = [0] * 10 + mockMaskworkspace.readY.return_value = [0] * 10 + + mockMtd.__getitem__.side_effect = ( + lambda ws_name: mockMaskworkspace if ws_name == "mask" else mockGroupWorkspace + ) + + recipe = ReductionRecipe() + recipe.mantidSnapper = mockMantidSnapper + recipe.mantidSnapper.mtd = mockMtd + recipe._prepareArtificialNormalization = mock.Mock() + recipe._prepareArtificialNormalization.return_value = "norm_grouped" + + # Set up ingredients and other variables for the recipe + recipe.groceries = {} + recipe.ingredients = mock.Mock() + recipe.ingredients.artificialNormalizationIngredients = "test" + recipe.ingredients.groupProcessing = mock.Mock( + return_value=lambda groupingIndex: f"groupProcessing_{groupingIndex}" + ) + recipe.ingredients.generateFocussedVanadium = mock.Mock( + return_value=lambda groupingIndex: f"generateFocussedVanadium_{groupingIndex}" + ) + recipe.ingredients.applyNormalization = mock.Mock( + return_value=lambda groupingIndex: f"applyNormalization_{groupingIndex}" + ) + recipe.ingredients.effectiveInstrument = mock.Mock( + return_value=lambda groupingIndex: f"unmaskedPixelGroup_{groupingIndex}" + ) + + # Mock internal methods of recipe + recipe._applyRecipe = mock.Mock() + recipe._cloneIntermediateWorkspace = mock.Mock() + recipe._deleteWorkspace = mock.Mock() + recipe._prepareUnfocusedData = mock.Mock() + recipe._prepGroupingWorkspaces = mock.Mock() + recipe._prepGroupingWorkspaces.return_value = ("sample_grouped", "norm_grouped") + + # Set up other recipe variables + recipe.sampleWs = "sample" + recipe.maskWs = "mask" + recipe.normalizationWs = "norm" + recipe.groupingWorkspaces = ["group1", "group2"] + recipe.keepUnfocused = True + recipe.convertUnitsTo = "TOF" + + # Execute the recipe + result = recipe.execute() + + # Perform assertions + recipe._applyRecipe.assert_any_call( + PreprocessReductionRecipe, + recipe.ingredients.preprocess(), + inputWorkspace=recipe.sampleWs, + maskWorkspace=recipe.maskWs, + ) + recipe._applyRecipe.assert_any_call( + PreprocessReductionRecipe, + recipe.ingredients.preprocess(), + inputWorkspace=recipe.normalizationWs, + maskWorkspace=recipe.maskWs, + ) + + recipe._applyRecipe.assert_any_call( + ReductionGroupProcessingRecipe, recipe.ingredients.groupProcessing(0), inputWorkspace="sample_grouped" + ) + recipe._applyRecipe.assert_any_call( + ReductionGroupProcessingRecipe, recipe.ingredients.groupProcessing(1), inputWorkspace="norm_grouped" + ) + + recipe._applyRecipe.assert_any_call( + GenerateFocussedVanadiumRecipe, + recipe.ingredients.generateFocussedVanadium(0), + inputWorkspace="norm_grouped", + ) + recipe._applyRecipe.assert_any_call( + GenerateFocussedVanadiumRecipe, + recipe.ingredients.generateFocussedVanadium(1), + inputWorkspace="norm_grouped", + ) + + recipe._applyRecipe.assert_any_call( + ApplyNormalizationRecipe, + recipe.ingredients.applyNormalization(0), + inputWorkspace="sample_grouped", + normalizationWorkspace="norm_grouped", + ) + recipe._applyRecipe.assert_any_call( + ApplyNormalizationRecipe, + recipe.ingredients.applyNormalization(1), + inputWorkspace="sample_grouped", + normalizationWorkspace="norm_grouped", + ) + + recipe._prepareArtificialNormalization.call_count == 2 + recipe._prepareArtificialNormalization.assert_any_call("sample_grouped", 0) + recipe._prepareArtificialNormalization.assert_any_call("sample_grouped", 1) + + recipe._applyRecipe.assert_any_call( + EffectiveInstrumentRecipe, + recipe.ingredients.effectiveInstrument(0), + inputWorkspace="sample_grouped", + ) + recipe._applyRecipe.assert_any_call( + EffectiveInstrumentRecipe, + recipe.ingredients.effectiveInstrument(1), + inputWorkspace="sample_grouped", + ) + + recipe._deleteWorkspace.assert_called_with("norm_grouped") + assert recipe._deleteWorkspace.call_count == len(recipe._prepGroupingWorkspaces.return_value) + assert result["outputs"][0] == "sample_grouped" + @mock.patch("mantid.simpleapi.mtd", create=True) def test_isGroupFullyMasked(self, mockMtd): mockMantidSnapper = mock.Mock() diff --git a/tests/unit/backend/service/test_ReductionService.py b/tests/unit/backend/service/test_ReductionService.py index ab1d08836..e66372a72 100644 --- a/tests/unit/backend/service/test_ReductionService.py +++ b/tests/unit/backend/service/test_ReductionService.py @@ -20,10 +20,12 @@ from snapred.backend.api.RequestScheduler import RequestScheduler from snapred.backend.dao import WorkspaceMetadata +from snapred.backend.dao.ingredients import ArtificialNormalizationIngredients from snapred.backend.dao.ingredients.ReductionIngredients import ReductionIngredients from snapred.backend.dao.reduction.ReductionRecord import ReductionRecord from snapred.backend.dao.request import ( CreateArtificialNormalizationRequest, + FarmFreshIngredients, ReductionExportRequest, ReductionRequest, ) @@ -75,7 +77,10 @@ def setUp(self): timestamp=self.instance.getUniqueTimestamp(), versions=(1, 2), pixelMasks=[], + keepUnfocused=True, + convertUnitsTo="TOF", focusGroups=[FocusGroup(name="apple", definition="path/to/grouping")], + artificialNormalizationIngredients=mock.Mock(spec=ArtificialNormalizationIngredients), ) def test_name(self): @@ -102,10 +107,22 @@ def test_fetchReductionGroupings(self): def test_prepReductionIngredients(self): # Call the method with the provided parameters - res = self.instance.prepReductionIngredients(self.request) + result = self.instance.prepReductionIngredients(self.request) + + farmFresh = FarmFreshIngredients( + runNumber=self.request.runNumber, + useLiteMode=self.request.useLiteMode, + timestamp=self.request.timestamp, + focusGroups=self.request.focusGroups, + keepUnfocused=self.request.keepUnfocused, + convertUnitsTo=self.request.convertUnitsTo, + versions=self.request.versions, + ) + expected = self.instance.sousChef.prepReductionIngredients(farmFresh) + expected.artificialNormalizationIngredients = self.request.artificialNormalizationIngredients - assert ReductionIngredients.model_validate(res) - assert res == self.instance.sousChef.prepReductionIngredients(self.request) + assert ReductionIngredients.model_validate(result) + assert result == expected def test_fetchReductionGroceries(self): self.instance.dataFactoryService.getThisOrLatestCalibrationVersion = mock.Mock(return_value=1) @@ -141,6 +158,50 @@ def test_reduction(self, mockReductionRecipe): mockReductionRecipe.return_value.cook.assert_called_once_with(ingredients, groceries) assert result.record.workspaceNames == mockReductionRecipe.return_value.cook.return_value["outputs"] + @mock.patch(thisService + "ReductionResponse") + @mock.patch(thisService + "ReductionRecipe") + def test_reduction_full_sequence(self, mockReductionRecipe, mockReductionResponse): + mockReductionRecipe.return_value = mock.Mock() + mockResult = {"result": True, "outputs": ["one", "two", "three"], "unfocusedWS": mock.Mock()} + mockReductionRecipe.return_value.cook = mock.Mock(return_value=mockResult) + self.instance.dataFactoryService.getThisOrLatestCalibrationVersion = mock.Mock(return_value=1) + self.instance.dataFactoryService.stateExists = mock.Mock(return_value=True) + self.instance.dataFactoryService.calibrationExists = mock.Mock(return_value=True) + self.instance.dataFactoryService.getThisOrLatestNormalizationVersion = mock.Mock(return_value=1) + self.instance.dataFactoryService.normalizationExists = mock.Mock(return_value=True) + self.instance._markWorkspaceMetadata = mock.Mock() + + self.instance.fetchReductionGroupings = mock.Mock( + return_value={"focusGroups": mock.Mock(), "groupingWorkspaces": mock.Mock()} + ) + self.instance.fetchReductionGroceries = mock.Mock(return_value={"combinedPixelMask": mock.Mock()}) + self.instance.prepReductionIngredients = mock.Mock(return_value=mock.Mock()) + self.instance._createReductionRecord = mock.Mock(return_value=mock.Mock()) + + request_ = self.request.model_copy() + self.instance.reduction(request_) + + self.instance.fetchReductionGroupings.assert_called_once_with(request_) + assert request_.focusGroups == self.instance.fetchReductionGroupings.return_value["focusGroups"] + self.instance.fetchReductionGroceries.assert_called_once_with(request_) + self.instance.prepReductionIngredients.assert_called_once_with( + request_, self.instance.fetchReductionGroceries.return_value["combinedPixelMask"] + ) + assert ( + self.instance.fetchReductionGroceries.return_value["groupingWorkspaces"] + == self.instance.fetchReductionGroupings.return_value["groupingWorkspaces"] + ) + + self.instance._createReductionRecord.assert_called_once_with( + request_, + self.instance.prepReductionIngredients.return_value, + mockReductionRecipe.return_value.cook.return_value["outputs"], + ) + mockReductionResponse.assert_called_once_with( + record=self.instance._createReductionRecord.return_value, + unfocusedData=mockReductionRecipe.return_value.cook.return_value["unfocusedWS"], + ) + def test_reduction_noState_withWritePerms(self): mockRequest = mock.Mock() self.instance.dataFactoryService.stateExists = mock.Mock(return_value=False) @@ -645,7 +706,7 @@ def trackFetchGroceryDict(*args, **kwargs): request.runNumber, request.versions.normalization ).useLiteMode(request.useLiteMode).add() loadableOtherGroceryItems = groceryClerk.buildDict() - residentOtherGroceryKwargs = {"maskWorkspace": combinedMaskName} + residentOtherGroceryKwargs = {"combinedPixelMask": combinedMaskName} self.service.fetchReductionGroceries(request) diff --git a/tests/unit/backend/service/test_SousChef.py b/tests/unit/backend/service/test_SousChef.py index 4e22f1dfe..83c05d9d1 100644 --- a/tests/unit/backend/service/test_SousChef.py +++ b/tests/unit/backend/service/test_SousChef.py @@ -26,6 +26,7 @@ def setUp(self): cifPath="path/to/cif", maxChiSq=100.0, ) + self.pixelMask = mock.Mock() def tearDown(self): del self.instance @@ -59,9 +60,9 @@ def test_prepManyDetectorPeaks_no_calibration(self): def test_prepManyPixelGroups(self): self.instance.prepPixelGroup = mock.Mock() - res = self.instance.prepManyPixelGroups(self.ingredients) + res = self.instance.prepManyPixelGroups(self.ingredients, self.pixelMask) assert res[0] == self.instance.prepPixelGroup.return_value - self.instance.prepPixelGroup.assert_called_once_with(self.ingredients) + self.instance.prepPixelGroup.assert_called_once_with(self.ingredients, self.pixelMask) def test_prepFocusGroup_exists(self): # create a temp file to be used a the path for the focus group @@ -172,13 +173,17 @@ def test_prepPixelGroup_nocache( ): self.instance = SousChef() self.instance.dataFactoryService.calibrationExists = mock.Mock(return_value=True) - # ensure there is no cached value + + # Warning: key now includes pixel mask name. key = ( self.ingredients.runNumber, self.ingredients.useLiteMode, self.ingredients.focusGroup.name, self.ingredients.calibrantSamplePath, + None, ) + + # ensure there is no cached value assert self.instance._pixelGroupCache == {} # mock the calibration, which will give the instrument state @@ -214,10 +219,11 @@ def test_prepPixelGroup_cache(self, PixelGroupingParametersCalculationRecipe): self.ingredients.useLiteMode, self.ingredients.focusGroup.name, self.ingredients.calibrantSamplePath, + self.pixelMask, ) self.instance._pixelGroupCache[key] = mock.sentinel.pixel - res = self.instance.prepPixelGroup(self.ingredients) + res = self.instance.prepPixelGroup(self.ingredients, self.pixelMask) assert not PixelGroupingParametersCalculationRecipe.called assert res == self.instance._pixelGroupCache[key] @@ -229,6 +235,7 @@ def test_prepPixelGroup_cache_not_altered(self): self.ingredients.useLiteMode, self.ingredients.focusGroup.name, self.ingredients.calibrantSamplePath, + None, ) self.instance._pixelGroupCache[key] = PixelGroup.construct(timeOfFlight={"minimum": 0}) @@ -463,49 +470,57 @@ def test_prepDetectorPeaks_cache_not_altered(self): @mock.patch(thisService + "ReductionIngredients") def test_prepReductionIngredients(self, ReductionIngredients, mockOS): # noqa: ARG002 calibrationCalibrantSamplePath = "a/sample.x" - record = mock.Mock( - smoothingParamter=1.0, + calibrationRecord = mock.Mock( + smoothingParameter=mock.Mock(), calculationParameters=mock.Mock( calibrantSamplePath=calibrationCalibrantSamplePath, ), - calibrantSamplePath=calibrationCalibrantSamplePath, ) - normalRecord = mock.Mock( - smoothingParamter=1.0, - calculationParameters=mock.Mock( - calibrantSamplePath=calibrationCalibrantSamplePath, - ), - normalizationCalibrantSamplePath=calibrationCalibrantSamplePath, + normalizationCalibrantSamplePath = "b/sample.x" + normalizationRecord = mock.Mock( + smoothingParameter=mock.Mock(), + calculationParameters=mock.Mock(), + normalizationCalibrantSamplePath=normalizationCalibrantSamplePath, ) self.instance.prepCalibrantSample = mock.Mock() self.instance.prepRunConfig = mock.Mock() - self.instance.prepManyPixelGroups = mock.Mock() + prepPixelGroupsReturnValues = [mock.Mock(), mock.Mock()] + self.instance.prepManyPixelGroups = mock.Mock(side_effect=prepPixelGroupsReturnValues) self.instance.prepManyDetectorPeaks = mock.Mock() + self.instance._getThresholdFromCalibrantSample = mock.Mock(return_value=mock.Mock()) self.instance.dataFactoryService.getCifFilePath = mock.Mock() self.instance.dataFactoryService.getReductionState = mock.Mock() - self.instance.dataFactoryService.getNormalizationRecord = mock.Mock(return_value=normalRecord) - self.instance.dataFactoryService.getCalibrationRecord = mock.Mock(return_value=record) + self.instance.dataFactoryService.getCalibrationRecord = mock.Mock(return_value=calibrationRecord) + self.instance.dataFactoryService.getNormalizationRecord = mock.Mock(return_value=normalizationRecord) + # Modifications to a copy of `ingredients` during the first part of `prepReductionIngredients`, + # before the `prepManyPixelGroups` calls: ingredients_ = self.ingredients.model_copy() # ... from calibration record: - ingredients_.calibrantSamplePath = calibrationCalibrantSamplePath ingredients_.cifPath = self.instance.dataFactoryService.getCifFilePath.return_value # ... from normalization record: - ingredients_.peakIntensityThreshold = self.instance._getThresholdFromCalibrantSample( - "calibrationCalibrantSamplePath" - ) - result = self.instance.prepReductionIngredients(ingredients_) + ingredients_.calibrantSamplePath = normalizationCalibrantSamplePath + + combinedMask = mock.Mock() + # Note that `prepReductionIngredients` is called with the _unmodified_ ingredients. + result = self.instance.prepReductionIngredients(self.ingredients, combinedMask) + + assert self.instance.prepManyPixelGroups.call_count == 2 + + self.instance.prepManyPixelGroups.assert_any_call(ingredients_) + self.instance.prepManyPixelGroups.assert_any_call(ingredients_, combinedMask) - self.instance.prepManyPixelGroups.assert_called_once_with(ingredients_) self.instance.dataFactoryService.getCifFilePath.assert_called_once_with("sample") + ReductionIngredients.assert_called_once_with( runNumber=ingredients_.runNumber, useLiteMode=ingredients_.useLiteMode, timestamp=ingredients_.timestamp, - pixelGroups=self.instance.prepManyPixelGroups.return_value, - smoothingParameter=normalRecord.smoothingParameter, + pixelGroups=prepPixelGroupsReturnValues[0], + unmaskedPixelGroups=prepPixelGroupsReturnValues[1], + smoothingParameter=normalizationRecord.smoothingParameter, calibrantSamplePath=ingredients_.calibrantSamplePath, - peakIntensityThreshold=ingredients_.peakIntensityThreshold, + peakIntensityThreshold=self.instance._getThresholdFromCalibrantSample.return_value, detectorPeaksMany=self.instance.prepManyDetectorPeaks.return_value, keepUnfocused=ingredients_.keepUnfocused, convertUnitsTo=ingredients_.convertUnitsTo, diff --git a/tests/unit/ui/workflow/test_DiffCalWorkflow.py b/tests/unit/ui/workflow/test_DiffCalWorkflow.py index f5a53ad02..5915ff458 100644 --- a/tests/unit/ui/workflow/test_DiffCalWorkflow.py +++ b/tests/unit/ui/workflow/test_DiffCalWorkflow.py @@ -1,4 +1,3 @@ -import threading from random import randint from unittest.mock import MagicMock, patch @@ -8,8 +7,7 @@ GroupWorkspaces, mtd, ) -from qtpy.QtCore import Qt -from qtpy.QtWidgets import QApplication, QMessageBox +from qtpy.QtWidgets import QMessageBox from snapred.meta.mantid.FitPeaksOutput import FIT_PEAK_DIAG_SUFFIX, FitOutputEnum from snapred.meta.pointer import create_pointer @@ -136,16 +134,29 @@ def test_purge_bad_peaks_too_few(workflowRequest, qtbot): # noqa: ARG001 ) diffcalWorkflow.fitPeaksDiagnostic = diagWS - def execute_click(): - w = QApplication.activeWindow() - if isinstance(w, QMessageBox): - close_button = w.button(QMessageBox.Ok) - qtbot.mouseClick(close_button, Qt.LeftButton) - # setup the qtbot to intercept the window qtbot.addWidget(diffcalWorkflow._tweakPeakView) - threading.Timer(0.2, execute_click).start() + + # + # Using a mock here bypasses the following issues: + # + # * which thread the messagebox will be running on (may cause a segfault); + # + # * how long to wait for the messagebox to instantiate. + # + def _tooFewPeaksQuery(_parent, title, text, _buttons): + if title == "Too Few Peaks": + return QMessageBox.Ok + raise RuntimeError(f"unexpected `QMessageBox.critical`: title: {title}, text: {text}") + + mockTooFewPeaksQuery = patch("qtpy.QtWidgets.QMessageBox.critical", _tooFewPeaksQuery) + + # Use `start` and `stop` rather than `with patch...` in order to apply the mock even in the case of exceptions. + mockTooFewPeaksQuery.start() diffcalWorkflow.purgeBadPeaks(maxChiSq) + # Remember to remove the mock. + mockTooFewPeaksQuery.stop() + assert diffcalWorkflow.ingredients.groupedPeakLists[0].peaks == peaks assert diffcalWorkflow.ingredients.groupedPeakLists[0].peaks != good_peaks diff --git a/tests/util/SculleryBoy.py b/tests/util/SculleryBoy.py index d8c4382b5..21167e55f 100644 --- a/tests/util/SculleryBoy.py +++ b/tests/util/SculleryBoy.py @@ -1,4 +1,4 @@ -from typing import Dict, List +from typing import Dict, List, Optional from unittest import mock import pydantic @@ -18,6 +18,7 @@ from snapred.backend.dao.state.PixelGroupingParameters import PixelGroupingParameters from snapred.backend.recipe.GenericRecipe import DetectorPeakPredictorRecipe from snapred.meta.Config import Resource +from snapred.meta.mantid.WorkspaceNameGenerator import WorkspaceName from snapred.meta.redantic import parse_file_as @@ -92,7 +93,9 @@ def prepDetectorPeaks(self, ingredients: FarmFreshIngredients, purgePeaks=False) except (TypeError, AttributeError): return [mock.Mock(spec_set=GroupPeakList)] - def prepReductionIngredients(self, ingredients: FarmFreshIngredients): # noqa ARG002 + def prepReductionIngredients( + self, _ingredients: FarmFreshIngredients, _combinedPixelMask: Optional[WorkspaceName] = None + ): path = Resource.getPath("/inputs/calibration/ReductionIngredients.json") return parse_file_as(ReductionIngredients, path)