Skip to content

Commit

Permalink
Merge branch 'next' into ewm6378-fix-full-masking-in-reduction
Browse files Browse the repository at this point in the history
  • Loading branch information
darshdinger committed Oct 22, 2024
2 parents 7896830 + 656110f commit 0920d5e
Show file tree
Hide file tree
Showing 16 changed files with 962 additions and 207 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@

from snapred.backend.dao.calibration.CalibrationMetric import CalibrationMetric
from snapred.backend.dao.state import PixelGroup
from snapred.backend.recipe.algorithm.FitMultiplePeaksAlgorithm import FitOutputEnum
from snapred.backend.recipe.algorithm.MantidSnapper import MantidSnapper
from snapred.meta.mantid.FitPeaksOutput import FitOutputEnum


class CalibrationMetricExtractionAlgorithm(PythonAlgorithm):
Expand Down Expand Up @@ -54,6 +54,7 @@ def PyExec(self):
pixelGroup = PixelGroup.model_validate_json(self.getPropertyValue("PixelGroup"))
pixelGroupingParameters = list(pixelGroup.pixelGroupingParameters.values())
# collect all params and peak positions
inputWorkspace.sortByName()
peakPos = inputWorkspace.getItem(FitOutputEnum.PeakPosition.value)
parameters = inputWorkspace.getItem(FitOutputEnum.Parameters.value)
workspace = inputWorkspace.getItem(FitOutputEnum.Workspace.value) # noqa: F841
Expand Down
149 changes: 149 additions & 0 deletions src/snapred/backend/recipe/algorithm/ConjoinDiagnosticWorkspaces.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
from typing import List

from mantid.api import MatrixWorkspace, PythonAlgorithm, WorkspaceGroupProperty
from mantid.dataobjects import TableWorkspace
from mantid.kernel import Direction, IntPropertyWithValue
from mantid.simpleapi import (
BufferMissingColumnsAlgo,
CloneWorkspace,
ConjoinTableWorkspaces,
ConjoinWorkspaces,
DeleteWorkspace,
ExtractSingleSpectrum,
GroupWorkspaces,
RenameWorkspace,
UnGroupWorkspace,
mtd,
)

from snapred.meta.mantid.FitPeaksOutput import FIT_PEAK_DIAG_SUFFIX


class ConjoinDiagnosticWorkspaces(PythonAlgorithm):
"""
Given the grouped diagnostic output from PDCalibration run on one spectum at a time,
combine the sub-workspaces in an intelligent way.
"""

INPUTGRPPROP1 = "DiagnosticWorkspace"
OUTPUTGRPPROP = "TotalDiagnosticWorkspace"

def category(self):
return "SNAPRed Diffraction Calibration"

def newNamesFromOld(self, oldNames: List[str], newName: str) -> List[str]:
selectedNames = set(self.diagnosticSuffix.values())
suffixes = []
for oldName in oldNames:
elements = oldName.split("_")
suffix = next((f"_{x}" for x in elements if f"_{x}" in selectedNames), None)
if suffix is not None:
suffixes.append(suffix)
return [f"{newName}{suffix}" for suffix in suffixes]

def PyInit(self):
# declare properties
self.declareProperty(
WorkspaceGroupProperty(self.INPUTGRPPROP1, "", direction=Direction.Input),
doc="Table workspace from peak-fitting diagnosis.",
)
self.declareProperty(IntPropertyWithValue("AddAtIndex", 0))
self.declareProperty(
WorkspaceGroupProperty(self.OUTPUTGRPPROP, "", direction=Direction.Output),
doc="Result of conjoining the diagnostic workspaces",
)
self.declareProperty("AutoDelete", False)
self.setRethrows(True)
# NOTE must be in alphabetical order
self.diagnosticSuffix = FIT_PEAK_DIAG_SUFFIX.copy()

def PyExec(self) -> None:
self.autoDelete = self.getProperty("AutoDelete").value
index = self.getProperty("AddAtIndex").value
diag1 = self.getPropertyValue(self.INPUTGRPPROP1)
outws = self.getPropertyValue(self.OUTPUTGRPPROP)

# sort by name to pevent bad things from happening
mtd[diag1].sortByName()
oldNames = mtd[diag1].getNames()
newNames = self.newNamesFromOld(oldNames, outws)

# if the input is expected to autodelete, it must be ungrouped first
if self.autoDelete:
UnGroupWorkspace(diag1)

if index == 0:
for old, new in zip(oldNames, newNames):
if self.autoDelete:
RenameWorkspace(
InputWorkspace=old,
OutputWorkspace=new,
)
else:
CloneWorkspace(
InputWorkspace=old,
OutputWorkspace=new,
)
if isinstance(mtd[new], MatrixWorkspace) and index < mtd[new].getNumberHistograms():
ExtractSingleSpectrum(
InputWorkspace=new,
OutputWorkspace=new,
WorkspaceIndex=index,
)
GroupWorkspaces(
InputWorkspaces=newNames,
OutputWorkspace=outws,
)
else:
for old, new in zip(oldNames, newNames):
ws = mtd[old]
if isinstance(ws, MatrixWorkspace):
self.conjoinMatrixWorkspaces(old, new, index)
elif isinstance(ws, TableWorkspace):
self.conjoinTableWorkspaces(old, new, index)
else:
raise RuntimeError(f"Unrecognized workspace type {type(ws)}")

if self.autoDelete:
for oldName in oldNames:
if oldName in mtd:
DeleteWorkspace(oldName)

self.setProperty(self.OUTPUTGRPPROP, mtd[outws])

def conjoinMatrixWorkspaces(self, inws, outws, index):
tmpws = f"{inws}_{index}"
if index < mtd[inws].getNumberHistograms():
ExtractSingleSpectrum(
InputWorkspace=inws,
Outputworkspace=tmpws,
WorkspaceIndex=index,
)
else:
CloneWorkspace(
InputWorkspace=inws,
OutputWorkspace=tmpws,
)
ConjoinWorkspaces(
InputWorkspace1=outws,
InputWorkspace2=tmpws,
CheckOverlapping=False,
)
if self.autoDelete and inws in mtd:
DeleteWorkspace(inws)
assert outws in mtd

def conjoinTableWorkspaces(self, inws, outws, index): # noqa: ARG002
BufferMissingColumnsAlgo(
Workspace1=inws,
Workspace2=outws,
)
BufferMissingColumnsAlgo(
Workspace1=outws,
Workspace2=inws,
)
ConjoinTableWorkspaces(
InputWorkspace1=outws,
InputWorkspace2=inws,
AutoDelete=self.autoDelete,
)
50 changes: 50 additions & 0 deletions src/snapred/backend/recipe/algorithm/CreateTableWorkspace.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
from mantid.api import PythonAlgorithm
from mantid.dataobjects import TableWorkspaceProperty
from mantid.kernel import Direction
from mantid.kernel import ULongLongPropertyWithValue as PointerProperty
from mantid.simpleapi import CreateEmptyTableWorkspace

from snapred.meta.pointer import access_pointer


class CreateTableWorkspace(PythonAlgorithm):
def category(self):
return "SNAPRed Internal"

def PyInit(self):
self.declareProperty(
TableWorkspaceProperty("OutputWorkspace", "", Direction.Output),
doc="The table workspace created from the input",
)
self.declareProperty(
PointerProperty("Data", id(None), Direction.Input),
doc="A dictionary with column names as keys, corresponding to a list for the column values",
)
self.setRethrows(True)

def PyExec(self):
data = access_pointer(self.getProperty("Data").value)
colnames = list(data.keys())
length = len(data[colnames[0]])
for col in data.values():
if len(col) != length:
raise RuntimeError(f"Column mismatch: length {len(col)} vs {length}")

outputWorkspace = self.getPropertyValue("OutputWorkspace")
ws = CreateEmptyTableWorkspace(
OutputWorkspace=outputWorkspace,
)
# add the columns
for colname in colnames:
coltype = type((data[colname][-1:] or [""])[0])
if coltype is float:
coltype = "double"
else:
coltype = coltype.__name__

ws.addColumn(type=coltype, name=colname)
# now add all the data in the columns
for i in range(length):
ws.addRow({colname: data[colname][i] for colname in colnames})

self.setProperty("OutputWorkspace", ws)
100 changes: 24 additions & 76 deletions src/snapred/backend/recipe/algorithm/FitMultiplePeaksAlgorithm.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
from enum import Enum
from typing import Dict, List

import numpy as np
import pydantic
from mantid.api import (
AlgorithmFactory,
MatrixWorkspaceProperty,
PropertyMode,
PythonAlgorithm,
WorkspaceFactory,
WorkspaceGroup,
mtd,
)
Expand All @@ -20,17 +17,11 @@
from snapred.backend.recipe.algorithm.MantidSnapper import MantidSnapper
from snapred.meta.Config import Config
from snapred.meta.mantid.AllowedPeakTypes import allowed_peak_type_list
from snapred.meta.mantid.FitPeaksOutput import FIT_PEAK_DIAG_SUFFIX, FitOutputEnum

logger = snapredLogger.getLogger(__name__)


class FitOutputEnum(Enum):
PeakPosition = 0
Parameters = 1
Workspace = 2
ParameterError = 3


class FitMultiplePeaksAlgorithm(PythonAlgorithm):
NOYZE_2_MIN = Config["calibration.fitting.minSignal2Noise"]

Expand All @@ -56,13 +47,6 @@ def PyInit(self):
self.setRethrows(True)
self.mantidSnapper = MantidSnapper(self, __name__)

def listToWorkspace(self, aList, name):
ws = WorkspaceFactory.create("Workspace2D", NVectors=1, XLength=len(aList), YLength=len(aList))
ws.setX(0, np.asarray(aList))
# register ws in mtd
mtd.addOrReplace(name, ws)
return ws

def validateInputs(self) -> Dict[str, str]:
errors = {}
return errors
Expand All @@ -74,11 +58,7 @@ def chopIngredients(self, ingredients: List[GroupPeakList]):
self.groupIDs.append(groupPeakList.groupID)
self.reducedList[groupPeakList.groupID] = groupPeakList.peaks
# suffixes to name diagnostic output
self.outputSuffix = [None] * len(FitOutputEnum)
self.outputSuffix[FitOutputEnum.PeakPosition.value] = "_peakpos"
self.outputSuffix[FitOutputEnum.Parameters.value] = "_fitparam"
self.outputSuffix[FitOutputEnum.Workspace.value] = "_fitted"
self.outputSuffix[FitOutputEnum.ParameterError.value] = "_fiterror"
self.outputSuffix = FIT_PEAK_DIAG_SUFFIX.copy()

def unbagGroceries(self):
self.inputWorkspaceName = self.getPropertyValue("Inputworkspace")
Expand All @@ -96,14 +76,10 @@ def PyExec(self):
self.chopIngredients(reducedPeakList)
self.unbagGroceries()

outputNames = [None] * len(FitOutputEnum)
for x in FitOutputEnum:
outputNames[x.value] = f"{self.outputWorkspaceName}{self.outputSuffix[x.value]}"

for index, groupID in enumerate(self.groupIDs):
outputNamesTmp = [None] * len(FitOutputEnum)
for x in FitOutputEnum:
outputNamesTmp[x.value] = f"{self.outputWorkspaceName}{self.outputSuffix[x.value]}_{index}"
tmpSpecName = mtd.unique_name(prefix=f"tmp_fitspec_{index}_")
outputNameTmp = mtd.unique_name(prefix=f"tmp_fitdiag_{index}_")
outputNamesTmp = {x: f"{outputNameTmp}{self.outputSuffix[x]}_{index}" for x in FitOutputEnum}

peakCenters = []
peakLimits = []
Expand All @@ -114,14 +90,14 @@ def PyExec(self):
self.mantidSnapper.ExtractSingleSpectrum(
"Extract Single Spectrum...",
InputWorkspace=self.inputWorkspaceName,
OutputWorkspace="ws2fit",
OutputWorkspace=tmpSpecName,
WorkspaceIndex=index,
)

self.mantidSnapper.FitPeaks(
"Fit Peaks...",
# in common with PDCalibration
InputWorkspace="ws2fit",
InputWorkspace=tmpSpecName,
PeakFunction=peakFunction,
PeakCenters=",".join(np.array(peakCenters).astype("str")),
FitWindowBoundaryList=",".join(np.array(peakLimits).astype("str")),
Expand All @@ -130,55 +106,27 @@ def PyExec(self):
ConstrainPeakPositions=True,
HighBackground=True, # vanadium must use high background
# outputs -- in PDCalibration combined in workspace group
FittedPeaksWorkspace=outputNamesTmp[FitOutputEnum.Workspace.value],
OutputWorkspace=outputNamesTmp[FitOutputEnum.PeakPosition.value],
OutputPeakParametersWorkspace=outputNamesTmp[FitOutputEnum.Parameters.value],
OutputParameterFitErrorsWorkspace=outputNamesTmp[FitOutputEnum.ParameterError.value],
FittedPeaksWorkspace=outputNamesTmp[FitOutputEnum.Workspace],
OutputWorkspace=outputNamesTmp[FitOutputEnum.PeakPosition],
OutputPeakParametersWorkspace=outputNamesTmp[FitOutputEnum.Parameters],
OutputParameterFitErrorsWorkspace=outputNamesTmp[FitOutputEnum.ParameterError],
)
self.mantidSnapper.GroupWorkspaces(
"Group diagnosis workspaces for output",
InputWorkspaces=list(outputNamesTmp.values()),
OutputWorkspace=outputNameTmp,
)
self.mantidSnapper.ConjoinDiagnosticWorkspaces(
"Conjoin the diagnostic group workspaces",
DiagnosticWorkspace=outputNameTmp,
TotalDiagnosticWorkspace=self.outputWorkspaceName,
AddAtIndex=index,
AutoDelete=True,
)
if index == 0:
self.cloneWorkspaces(outputNamesTmp, outputNames)
else:
self.conjoinWorkspaces(outputNames, outputNamesTmp)
self.mantidSnapper.WashDishes(
"Deleting fitting workspace...",
Workspace="ws2fit",
Workspace=tmpSpecName,
)

self.mantidSnapper.executeQueue()
for output in outputNames:
self.outputWorkspace.add(output)

self.mantidSnapper.executeQueue()
self.setProperty("OutputWorkspaceGroup", self.outputWorkspace.name())

def cloneWorkspaces(self, inputs: List[str], outputs: List[str]):
self.mantidSnapper.RenameWorkspaces(
"Copying tmp workspace data",
InputWorkspaces=inputs,
WorkspaceNames=outputs,
)

def conjoinWorkspaces(self, input1: List[str], input2: List[str]):
# combine the matrix workspaces
for x in [FitOutputEnum.Workspace.value, FitOutputEnum.PeakPosition.value]:
self.mantidSnapper.ConjoinWorkspaces(
"Conjoin peak position workspaces",
InputWorkspace1=input1[x],
InputWorkspace2=input2[x],
CheckOverlapping=False,
)
self.mantidSnapper.WashDishes(
"Clear temporary workspace",
Workspace=input2[x],
)
# combine the table workspaces
for x in [FitOutputEnum.Parameters.value, FitOutputEnum.ParameterError.value]:
self.mantidSnapper.ConjoinTableWorkspaces(
"Conjoin peak fit parameter workspaces",
InputWorkspace1=input1[x],
InputWorkspace2=input2[x],
AutoDelete=True,
)


AlgorithmFactory.subscribe(FitMultiplePeaksAlgorithm)
Loading

0 comments on commit 0920d5e

Please sign in to comment.