diff --git a/mantidlog.txt b/mantidlog.txt deleted file mode 100644 index c7e97d7..0000000 --- a/mantidlog.txt +++ /dev/null @@ -1 +0,0 @@ -analysis-node23 - 2025-11-11 10:22:45,171 - ERROR - Mantid - logging set to error priority diff --git a/src/snapwrap/application.yml b/src/snapwrap/application.yml index 8454902..1acca45 100644 --- a/src/snapwrap/application.yml +++ b/src/snapwrap/application.yml @@ -9,3 +9,4 @@ SEE: materials: database: ${SEE.home}/materials/materials.db spectra: ${SEE.home}/materials/spectra +cleanTree: true diff --git a/src/snapwrap/io.py b/src/snapwrap/io.py index ba7a552..0cdf9e7 100644 --- a/src/snapwrap/io.py +++ b/src/snapwrap/io.py @@ -8,12 +8,15 @@ import shutil from mantid.simpleapi import * from mantid.api import WorkspaceGroup +import mantid.kernel + import datetime import json from snapred.meta.Config import Config import snapwrap.snapStateMgr as ssm import snapwrap.maskUtils as mut +from .wrapConfig import WrapConfig #Mantid interface @@ -24,36 +27,118 @@ class redObject: #and then builds further attributes from these - def __init__(self, wsName,exportFormats=[], - requiredPrefix='reduced_dsp', + def __init__(self, wsName, + requiredPrefix='reduced', + requiredUnits='dsp', #allow override of expected units + requiredPGS=None, #allow processing of specific pixel groups only + requiredRunNumber=None, #allow processing of specific run numbers only iptsOverride=None, - fileTag=None): + exportFormats=[], + fileTag=None, + cleanTreeOverride=None): + + if WrapConfig.get("cleanTree"): #new variable to ignore timestamps + cleanTree = True + else: + cleanTree = False + + if cleanTreeOverride is not None: + cleanTree = cleanTreeOverride + + self.wsName = wsName #need to keep this too + + # reject everything that is inconsistent with the schema + # and requested filters + + # schema + # cleanTree: + # ___ (4 elements) + # not cleanTree: + # ____ (5 elements) if '_' not in wsName: + self.isReducedDataWorkspace = False #necessary by not sufficient condition + return + + #manage special case where a hidden workspace prefix is specified + if requiredPrefix.startswith('__'): + parsed = wsName[2:].split('_') + parsed[0] = '__' + parsed[0] #ensure dunder is included in prefix + else: + parsed = wsName.split('_') + + if cleanTree: + nElem = 4 + else: + nElem = 5 + + #process prefix + # prefix = parsed[0] + if parsed[0] != requiredPrefix: self.isReducedDataWorkspace = False return + else: + self.prefix = parsed[0] - parsed = wsName.split('_') - prefix = f"{parsed[0]}_{parsed[1]}" + #process units + units = parsed[1] + if units != requiredUnits: + self.isReducedDataWorkspace = False + return + else: + self.units = parsed[1] - # print(f"prefix: {prefix}, required prefix: {requiredPrefix}") + # AT THIS POINT need to manage 2_4 instances. A terrible mistake where PGS has an underscore in its name :( + if parsed[2] == "2" and parsed[3] == "4": + twoFour = True + nElem += 1 # this adds an additional element to total count. + indexShift = 1 + else: + twoFour = False + indexShift = 0 - if prefix != requiredPrefix: + # filter on parsed length + if len(parsed) != nElem: self.isReducedDataWorkspace = False return - - #get useful workspace properties + + #process pixel group + if twoFour: + self.pixelGroup = "2_4" + else: + self.pixelGroup = parsed[2] + + if requiredPGS is not None: + if self.pixelGroup != requiredPGS: + self.isReducedDataWorkspace = False + return + + #process run number ensure it is an int retain original string + self.runNumberString = parsed[3+indexShift] # indexShift should handle 2_4 case. + self.runNumber=int(self.runNumberString) + + if requiredRunNumber is not None: + if self.runNumber != int(requiredRunNumber): + self.isReducedDataWorkspace = False + return + + # At this point we have passed all available filters + + # aquire timestamp only if it exists + if cleanTree: + self.timeStamp = None + else: + self.timeStamp = parsed[4+indexShift] + + #get useful workspace spectral properties (e.g. number histograms, binning etc) self.wsProperties(wsName) if not self.isReducedDataWorkspace: return self.isReducedDataWorkspace = True - self.suffix = f"{parsed[2]}_{parsed[3]}_{parsed[4]}" - self.pixelGroup = parsed[2] - self.runNumber = parsed[3] - self.timeStamp = parsed[4] - self.wsName = wsName #need to keep this too + # self.suffix = f"{parsed[2]}_{parsed[3]}_{parsed[4]}" + if iptsOverride is None: self.ipts = GetIPTS(RunNumber=self.runNumber, Instrument='SNAP') @@ -67,8 +152,11 @@ def __init__(self, wsName,exportFormats=[], self.exportFormats = exportFormats self.exportPaths = self.buildExportPaths() - self.dateTime = datetime.datetime.strptime(self.timeStamp,'%Y-%m-%dT%H%M%S') + if self.timeStamp is not None: + self.dateTime = datetime.datetime.strptime(self.timeStamp,'%Y-%m-%dT%H%M%S') + else: + self.dateTime = None #create a dictionary to hold metadata to include as a comment in output files @@ -194,10 +282,11 @@ def buildExportPaths(self): class reductionGroup: #instantiated with a list of redObject classes and a run number, it reparses the list into - #a dictionary where the keys are the pixel group and the values are a list of redObjects + #a dictionary where the keys are the pixel group and the values are a list of redObjects + # if a timestamp is present these are ordered with latest first. - def __init__(self,runNumber,redObjectList): + def __init__(self,runNumber,redObjectList,cleanTreeOverride = None): self.runNumber = runNumber @@ -213,7 +302,7 @@ def __init__(self,runNumber,redObjectList): pgsList.append(run.pixelGroup) allPixelGroups = set(pgsList) - print(f"run {runNumber} has {len(allPixelGroups)} pixel groups") + print(f"run {runNumber} has {len(allPixelGroups)} pixel group(s)") redObjects = {} #populate dictionaries with empty lists to hold contents @@ -225,16 +314,20 @@ def __init__(self,runNumber,redObjectList): key = run.pixelGroup redObjects[key].append(run) - #sort lists for each key in order of decreasing time + cleanTree = WrapConfig.get("cleanTree") + if cleanTreeOverride is not None: + cleanTree = cleanTreeOverride # need this option so tree can be cleaned + + #if not cleanTree need to sort lists for each key in order of decreasing time for pgs in allPixelGroups: - objects = redObjects[pgs] # a list of redObjects un sorted in time - sortedObjects = sorted( - objects, - key = lambda obj: obj.timeStamp, - reverse=True - ) #This list sorted according to timestamp of objects - - redObjects[pgs]=sortedObjects #replace list with sorted list + if not cleanTree: + objects = redObjects[pgs] # a list of redObjects un sorted in time + sortedObjects = sorted( + objects, + key = lambda obj: obj.timeStamp, + reverse=True + ) #This list sorted according to timestamp of objects + redObjects[pgs]=sortedObjects #replace list with sorted list self.objectDict = redObjects @@ -254,22 +347,46 @@ def convertToQ(): #TODO: rebin S(Q) once I know how to do this -def reducedRuns(exportFormats,prefix,iptsOverride=None, fileTag=None):#,latestOnly=True,gsaInstPrm=True): +def reducedRuns(prefix='reduced', + units = 'dsp', + PGS = None, + runNumber = None, + iptsOverride=None, + exportFormats=[], + fileTag=None, + cleanTreeOverride=None):#,latestOnly=True,gsaInstPrm=True): - #generates a list of reductionGroups. Each of these has a .runNumber attribute + #generates a list of reductionGroups. Each of these has a `runNumber` attribute #and contains a dictionary with keys for each pixel groups. The corresponding values #are a list of available reduction object for that group (each with all attributes needed #to export requested files) - - allWorkspaces = mtd.getObjectNames() + # if prefix starts with dunder then assume we need to check hidden workspaces + if prefix.startswith('__'): + # print("looking for hidden") + # with mantid.kernel.amend_config(**{"InvisibleWorkspaces": "1"}): # TODO: try to fix this. + config.setString('MantidOptions.InvisibleWorkspaces','1') + allWorkspaces = mtd.getObjectNames() + # print(allWorkspaces) + config.setString('MantidOptions.InvisibleWorkspaces','0') + else: + allWorkspaces = mtd.getObjectNames() #filter out and parse reduced workspaces redObjectList = [] redRuns = [] for ws in allWorkspaces: - red = redObject(ws,exportFormats,prefix,iptsOverride,fileTag) + red = redObject(ws, + requiredPrefix=prefix, + requiredUnits=units, + requiredPGS=PGS, + requiredRunNumber=runNumber, + iptsOverride=iptsOverride, + exportFormats=exportFormats, + fileTag=fileTag, + cleanTreeOverride=cleanTreeOverride) + if red.isReducedDataWorkspace: redObjectList.append(red) redRuns.append(red.runNumber) @@ -277,7 +394,7 @@ def reducedRuns(exportFormats,prefix,iptsOverride=None, fileTag=None):#,latestOn nReduced = len(redObjectList) uniqueRuns = set(redRuns) nUnique = len(uniqueRuns) - print(f"Found total of {nReduced} reduced workspaces these were parsed into {nUnique} run reduction groups") + print(f"Found total of {nReduced} reduced workspaces these were parsed into {nUnique} run reduction group(s)") #parse these creating "reductionGroup" for each run numbner reducedGroups = [] @@ -302,14 +419,11 @@ def exportReducedGroup(redGroup,latestOnly,gsaInstPrm): print(f"Exporting run: {runNumber} with {len(runDict)} pixel group(s)") for pgs in runDict.keys(): #each key is a pixel group and each pixel group has a list of objects (each is a workspace) - print(f"processing {pgs} with {len(runDict[pgs])} associated workspaces") - listOfDates = [x.dateTime for x in runDict[pgs]] - mostRecent = max(listOfDates) - mostRecentIndex = listOfDates.index(mostRecent) + print(f"processing pixel group {pgs} with {len(runDict[pgs])} associated workspaces") if latestOnly: - processIndices = [mostRecentIndex] + processIndices = [0] else: - processIndices = np.arange(len(listOfDates)) + processIndices = np.arange(len(runDict[pgs])).tolist() exportRecipe(runDict,pgs,processIndices,gsaInstPrm) diff --git a/src/snapwrap/sampleMeta/latticeFittingFunctions.py b/src/snapwrap/sampleMeta/latticeFittingFunctions.py index 6bd501c..a58d34b 100644 --- a/src/snapwrap/sampleMeta/latticeFittingFunctions.py +++ b/src/snapwrap/sampleMeta/latticeFittingFunctions.py @@ -61,6 +61,14 @@ def residual_hex(params, reflectionList): a, c = params residuals = [] + # validate required inputs + for ref in reflectionList: + if not isinstance(ref.dObs, (float, np.floating)): + raise TypeError(f"Error: d-spacing {ref.dObs!r} is not a float") + if ref.extentOverPosition is None or not isinstance(ref.extentOverPosition, (float, np.floating)): + raise TypeError(f"Error: extentOverPosition {ref.extentOverPosition!r} is not a float") + # print("debug:", ref.dObs, ref.extentOverPosition) + for ref in reflectionList: d2Inv_calc = hex_d2Inv(ref, a, c) d2Inv_obs = 1 / ref.dObs**2 diff --git a/src/snapwrap/sampleMeta/utils.py b/src/snapwrap/sampleMeta/utils.py index b45f611..78927af 100644 --- a/src/snapwrap/sampleMeta/utils.py +++ b/src/snapwrap/sampleMeta/utils.py @@ -13,7 +13,9 @@ # from snapwrap.wrapConfig import WrapConfig # from snapwrap.SEEMeta.material import material # from snapwrap.SEEMeta.db import engine +import importlib import snapwrap.sampleMeta.latticeFittingFunctions as lff +importlib.reload(lff) #is this needed? class crystalReflection: @@ -29,16 +31,6 @@ def __init__(self, self.dObs = dObs self.extentOverPosition = extentOverPosition #full extent of peak divided by its position - # prototype idea is to ditch extentOverPosition, which - in retrospect - is a dumb idea as - # of course, this is not just a property of the reflection, but how and where it's measured - # Here, I'm testing a new parameter "sig" (not sure about the name, but it's inspired by the - # GSAS param, reflecting a gaussian component of peak profile). - # My initial plan is to use this as a multiplier for the separately-calculated resolution - # of a pixel group - # original extentOverPosition is kept for now, but should be removed in the future - - self.sig = None # this is a prototype parameter - def to_dict(self): return self.__dict__ #returns a json string of attributes @@ -108,7 +100,8 @@ def __init__(self, spaceGroup, observedReflections, name = None, #a name for this species - scatterers = ""): #fraction of maximum intensity to use as threshold for d-spacing generation + scatterers = "", + dLimits = None): #fraction of maximum intensity to use as threshold for d-spacing generation # spaceGroups is string with H-M space group # observedReflections is a list of crystalReflection objects @@ -145,7 +138,10 @@ def __init__(self, self.unitCell = self._cellFromReflections() #attempt to build crystalStructure object - self.hasCrystalStructure = self._buildCrystalStructure() + self.hasCrystalStructure = self._buildCrystalStructure() + + #encountered a need to specify d-range to use when calculating d-spacings + self.dLimits = dLimits def _systemFromSG(self): @@ -199,7 +195,8 @@ def _cellFromReflections(self): #TODO count independent reflections and check there are enough. nRef = len(reflectionList) if nRef >= cell.minRefs: - print(f"{len(reflectionList)} reflections provided. This is sufficient for {cell.crystalSystem} system (assuming independence).") + pass + # print(f"{len(reflectionList)} reflections provided. This is sufficient for {cell.crystalSystem} system (assuming independence).") else: print(f"Error: you provided {nRef} refs, but {cell.minRefs} are required for {cell.crystalSystem} system") return @@ -308,7 +305,10 @@ def _buildCrystalStructure(self): scatterers=self.scatterers) return True except: - print("Failed to generate crystalStructure object") + print("Failed to generate crystalStructure object with provided input:") + print(f" unitCell: {a} {b} {c} {alpha} {beta} {gamma}") + print(f" spaceGroup: {self.spaceGroup}") + print(f" scatterers: {self.scatterers}") return False def printCrystal(self): @@ -387,6 +387,8 @@ def calcDSpacings(self,dMin=0.5,dMax=10.0,minFractionOfMaxIntensity=0.8): return generator = ReflectionGenerator(self.crystalStructure) + + allRefs = generator.getUniqueHKLs(dMin,dMax) # Create list of unique reflections between 0.7 and 3.0 Angstrom hkls = generator.getUniqueHKLsUsingFilter(dMin, dMax, ReflectionConditionFilter.StructureFactor) @@ -418,7 +420,7 @@ def calcDSpacings(self,dMin=0.5,dMax=10.0,minFractionOfMaxIntensity=0.8): dFiltered.append(ref[1]) - print(f"after filtering have {len(dFiltered)} reflections") + print(f"after filtering have {len(dFiltered)} reflections (this is {100*len(dFiltered)/len(reflections):.2f}%)") self.dSpacings = sorted(dFiltered,reverse=True) @@ -455,7 +457,8 @@ def to_dict(self): "valid": self.valid, "extentOverPosition": self.extentOverPosition, "unitCell": unit_cell_dict, - "hasCrystalStructure": self.hasCrystalStructure + "hasCrystalStructure": self.hasCrystalStructure, + "dLimits": self.dLimits # <-- preserve dLimits } return species_dict @@ -473,13 +476,15 @@ def from_dict(cls, d): space_group = d.get("spaceGroup", "") name = d.get("name") scatterers = d.get("scatterers", "") + dLimits = d.get("dLimits", None) # <-- restore dLimits # Create a crystalSpecies object species = cls( spaceGroup=space_group, observedReflections=observed_reflections, name=name, - scatterers=scatterers + scatterers=scatterers, + dLimits=dLimits ) # Restore additional attributes diff --git a/src/snapwrap/snapStateMgr.py b/src/snapwrap/snapStateMgr.py index f152f17..689c973 100644 --- a/src/snapwrap/snapStateMgr.py +++ b/src/snapwrap/snapStateMgr.py @@ -137,7 +137,7 @@ def VBRunNumberFromVersion(calDict,calFolder): return normRec["backgroundRunNumber"] -def isCalibrated(runNumber,isLite=True): +def isCalibrated(runNumber,isLite=True,silent=False): # returns tuple of booleans for difcal and normcal status respectively # values will only be true if valid calibration exists @@ -149,7 +149,11 @@ def isCalibrated(runNumber,isLite=True): nrmcal = checkCalibrationStatus(runNumber, stateID=None, isLite=isLite, calType="normcal") - + + if silent: + return (difcal["runIsCalibrated"],nrmcal["runIsCalibrated"]) + + #otherwise print calibration status if difcal['runIsCalibrated']: print(f"difcal: is calibrated: {difcal['runIsCalibrated']} with run {difcal['latestValidCalibrationDict']['runNumber']} ") else: diff --git a/src/snapwrap/spectralTools/tools.py b/src/snapwrap/spectralTools/tools.py index 069a500..fef6dfa 100644 --- a/src/snapwrap/spectralTools/tools.py +++ b/src/snapwrap/spectralTools/tools.py @@ -4,17 +4,48 @@ from mantid.simpleapi import * from snapwrap.utils import workspaceHandles -def excludeROI(wsName,wsIndex,roiList): - #takes a list of [xMin,xMax] values and for specified index in WS sets corresponding Y-values +def excludeROI(wsName, wsIndex, roiList, createExcludedWS=False): + # takes a list of [xMin,xMax] values and for specified index in WS sets corresponding Y-values # to NAN + ws = mtd[wsName] - x = ws.readX(wsIndex) - y = ws.dataY(wsIndex) - for roi in roiList: - roiIndices = np.argwhere( (x[:-2]>=roi[0]) & (x[:-2]<=roi[1]) ) #x[:-2] is to avoid index error - y[roiIndices] = np.nan - ws.setY(wsIndex,y) + x = ws.readX(wsIndex) # bin edges length = nBins+1 + y = ws.dataY(wsIndex).copy() # work on a copy + + if createExcludedWS: + y_excl = np.full_like(y, np.nan) + + excludedPoints = 0 + original_y = ws.dataY(wsIndex) # keep original for excluded_ws + # use x[:-1] to align with y (bins) + x_bin_edges = x[:-1] + + for roi in roiList: + # boolean mask for bins whose left-edge falls inside ROI + mask = (x_bin_edges >= roi[0]) & (x_bin_edges <= roi[1]) + if not np.any(mask): + continue + indices = np.where(mask)[0] # 1D index array + y[indices] = np.nan + excludedPoints += len(indices) + if createExcludedWS: + y_excl[indices] = original_y[indices] + + # write back once + ws.setY(wsIndex, y) + + if createExcludedWS: + out_name = "excluded_" + wsName + CloneWorkspace(InputWorkspace=wsName, OutputWorkspace=out_name) + excluded_ws = mtd[out_name] # get workspace object + excluded_ws.setY(wsIndex, y_excl) + excluded_ws.setX(wsIndex, x) + excluded_ws.setTitle("Excluded data from " + wsName) + excluded_ws.setPlotType("marker") + excluded_ws.setMarkerStyle("circle") + + print(f"Debug: excluded {excludedPoints} points in total from spectrum {wsIndex} of workspace {wsName}") def replacePrefix(wsName,newPrefix): #update SNAPRed style ws name @@ -105,17 +136,20 @@ def compatibleWorkspaces(handles): return True -def compositeBackground(handles,dMin=0.65,dMax=10.0,minFractionOfMaxIntensity=0.00): +def compositeBackground(handles,dMin=0.65, + dMax=10.0, + minFractionOfMaxIntensity=0.00, + createExcludedWS=False): #calculate exclusion ROI's using crystalSpecies #calculation runs as a for loop over number of input spectra that are specified by the handles list # Need to ensure all input workspaces have same number of spectra if not compatibleWorkspaces(handles): - print("Submitted workspaces not compatible") + print("compositeBackground: Submitted workspaces not compatible") return else: - print("workspaces are compatible") + print("compositeBackground: workspaces are compatible") #create clones of input workspace to hold the de-peaked spectra # calculate the exclusion regions for each spectrum and set the corresponding @@ -123,8 +157,7 @@ def compositeBackground(handles,dMin=0.65,dMax=10.0,minFractionOfMaxIntensity=0. for runID,handle in enumerate(handles): wsName = handle.wsName - runNo = handle.runNumber - runInt = int(runNo.strip()) + runInt = handle.runNumber dePeakWS = replacePrefix(wsName,"dePeak") CloneWorkspace(InputWorkspace=wsName, @@ -142,9 +175,21 @@ def compositeBackground(handles,dMin=0.65,dMax=10.0,minFractionOfMaxIntensity=0. excludeList = [] for creature in handle.crystalSpeciesList: - - creature.calcDSpacings(dMin=dMin, - dMax=dMax, + + print(f"\nProcessing species {creature.name} for spectrum {specID} of run {runInt}") + + #apply dLimit override if specified taking care to retain original dMin,dMax values + if creature.dLimits is not None: + print(f"applying dLimits for species {creature.name}") + dMinOverride = creature.dLimits[0] + dMaxOverride = creature.dLimits[1] + else: + dMinOverride = dMin + dMaxOverride = dMax + + print(f"using dMin: {dMinOverride} and dMax: {dMaxOverride} for d-spacing calculation") + creature.calcDSpacings(dMin=dMinOverride, + dMax=dMaxOverride, minFractionOfMaxIntensity=minFractionOfMaxIntensity) for d in creature.dSpacings: @@ -152,14 +197,14 @@ def compositeBackground(handles,dMin=0.65,dMax=10.0,minFractionOfMaxIntensity=0. roi = [d-extent/2,d+extent/2] excludeList.append(roi) - excludeROI(dePeakWS,specID,excludeList) #will set x-ranges in excludeList in spectrum specID to NAN - print(f"for spec: {specID} {len(excludeList)} regions were excluded") + excludeROI(dePeakWS,specID,excludeList,createExcludedWS=createExcludedWS) #will set x-ranges in excludeList in spectrum specID to NAN + print(f"for spec: {specID}, {len(excludeList)} regions were excluded") #now need to determine the average of all depeaked spectra ignoring NAN values #first need handles on the dePeaked workspaces - dePeakedHandles = workspaceHandles(prefix="dePeak_dsp", - pgs = handles[0].pixelGroup) + dePeakedHandles = workspaceHandles(prefix="dePeak", + PGS = handles[0].pixelGroup) CloneWorkspace(InputWorkspace=handles[0].wsName, OutputWorkspace="avgBgnd") diff --git a/src/snapwrap/statusPrinter.py b/src/snapwrap/statusPrinter.py new file mode 100644 index 0000000..d2e13e7 --- /dev/null +++ b/src/snapwrap/statusPrinter.py @@ -0,0 +1,224 @@ +# to tidy up the code in utils.reduce I've pulled out some print statements into functions here +from mantid.kernel import PhysicalConstants +import snapwrap.snapStateMgr as ssm + +def printWarning(warningType,runNumber): + + snapHome = ssm.SNAPHome() + if warningType == 'noDifcal': + + print(f""" +ERROR: NO DIFFRACTION CALIBRATION FOUND. To proceed either:: + 1. Run a diffraction calibration or + 2. Set "continueNoDifcal = True" to proceed with diagnostic reduction + +INFO: + - Calibration home: {snapHome.calib} + - StateID: {ssm.stateDef(runNumber)[0]} + - State Definition:""") + + stateDict = ssm.stateDef(runNumber)[1] + for key in stateDict: + print(f" {key}: {stateDict[key]}") + + + elif warningType == 'noNormcal': + + print(f""" +ERROR: NO NORMALISATION CALIBRATION FOUND. To proceed either:: + 1. Run a normalisation calibration or + 2. Set "continueNoVan = True" to proceed with diagnostic reduction using artificial normalisation + 3. Set "noNorm = True" to proceed without any normalisation + + +INFO: + - Calibration home: {snapHome.calib} + - StateID: {ssm.stateDef(runNumber)[0]} + - State Definition:""") + + stateDict = ssm.stateDef(runNumber)[1] + for key in stateDict: + print(f" {key}: {stateDict[key]}") + + + +def citation(): + print("\nIf you use SNAPRed or snapwrap in your work please cite:\n") + print("SNAPRed: Reduction of multidimensional neutron time-of-flight diffraction data") + print("M. Guthrie, M. Walsh, K. Travis, R. Boston, D. Caballero, D. Dinger, G. ElsarBoukh, J. Hetrick, A.T. Savici and P. Peterson") + print("SoftwareX (2025) Manuscript in press\n") + +def printStatus(status): + + ingredients = status["ingredients"] + stateID = status["stateID"] + stateDict = status["stateDict"] + allPixelGroups = status["allPixelGroups"] + calibrationRecord = status["calibrationRecord"] + calibrationPath = status["calibrationPath"] + normalizationRecord = status["normalizationRecord"] + normalizationPath = status["normalizationPath"] + # runNumber = status["runNumber"] + pixelMasks = status["pixelMasks"] + binMaskList = status["binMaskList"] + continueNoDifcal = status["continueNoDifcal"] + continueNoVan = status["continueNoVan"] + + snapHome = ssm.SNAPHome() + + print(f""" +SNAPRed reduction status: +- Run Number: {ingredients.runNumber} +- ID: {stateID} + """) + if calibrationRecord.version==0 and continueNoDifcal: + print(""" + WARNING: DIAGNOSTIC MODE! DEFAULT GEOMETRY USED. + """) + else: + print(f"""- Calibration: home: {snapHome.calib} +- difcal version: {calibrationRecord.version} runNumber: {calibrationRecord.runNumber} +- comment: {calibrationRecord.indexEntry.comments}""") + + if continueNoVan: + print(""" + WARNING: DIAGNOSTIC MODE! VANADIUM CORRECTION NOT USED + DATA WILL BE ARTIFICIALLY NORMALISED BY DIVISION BY BACKGROUND. + """) + else: + print(f"""- normCal version: {normalizationRecord.version} runNumber: {normalizationRecord.runNumber} +- comment: {normalizationRecord.indexEntry.comments} +""") + + #optional arguments provided... + + if pixelMasks not in ('none', []): + print(f""" + Mask workspace(s) specified: {pixelMasks} + """) + + if binMaskList != []: + print(f""" + Bin Mask workspace(s) specified: {binMaskList} + """) + +def completionMessage(status): + + ingredients = status["ingredients"] + stateID = status["stateID"] + stateDict = status["stateDict"] + allPixelGroups = status["allPixelGroups"] + + print(f""" +Reduction COMPLETE + +- Run Number: {ingredients.runNumber} + +- state: + - ID: {stateID}, + - definition: {stateDict} + - Pixel Groups processed: {allPixelGroups} + +""") + +def verboseStatus(Config, instrumentState, ingredients): + + print("\nINSTRUMENT PARAMETERS") + print(f"- Calib.home: {Config['instrument.calibration.home']}") + # print("\nParams in SNAPInstPrm:") + print("- L1: ",instrumentState.instrumentConfig.L1) + print("- L2: ",instrumentState.instrumentConfig.L2) + L = instrumentState.instrumentConfig.L1+instrumentState.instrumentConfig.L2 + print("- bandwidth: ",instrumentState.instrumentConfig.bandwidth) + print("- lowWavelengthCrop: ",instrumentState.instrumentConfig.lowWavelengthCrop) + + # print("\nParams in application.yml") + print("- low d-Spacing crop: ",Config["constants.CropFactors.lowdSpacingCrop"]) + print("- high d-Spacing crop: ",Config["constants.CropFactors.highdSpacingCrop"]) + + # print("\nParams from state") + wav = instrumentState.detectorState.wav + print("- Central wavelength: ",wav) + + print("\n") + bandwidth = instrumentState.instrumentConfig.bandwidth + lowWavelengthCrop = instrumentState.instrumentConfig.lowWavelengthCrop + lamMin = instrumentState.particleBounds.wavelength.minimum + lamMax = instrumentState.particleBounds.wavelength.maximum + tofMin = instrumentState.particleBounds.tof.minimum + tofMax = instrumentState.particleBounds.tof.maximum + + print(f"- wavelength limits: {lamMin:.4f}, {lamMax:.4f}") + # print(f"- TOF limits: {tofMin:.1f}, {tofMax:.1f}") + + # some tests to confirm that these numbers are being calculated as expected + convFactor = Config["constants.m2cm"] * PhysicalConstants.h / PhysicalConstants.NeutronMass + +# print(f""" SOME TESTING... +# calculated lamMin is {wav - bandwidth/2 + lowWavelengthCrop}:.4f, {} +# """) + + assert lamMin == wav - bandwidth/2 + lowWavelengthCrop + assert lamMax == wav + bandwidth/2 + # print(f"calculated tof limits: {lamMin*L/convFactor:.1f}, {lamMax*L/convFactor:.1f}") + assert tofMin == lamMin*L/convFactor + assert tofMax == lamMax*L/convFactor + # calcTofM + # calcTofMax + + pgs = ingredients.pixelGroups #ingredients.pixelGroups is a list of pgs + print("\nPIXEL GROUP PARAMETERS") +# print(f"""TOF limits {pgs[0].timeOfFlight.minimum:.1f} - {pgs[0].timeOfFlight.maximum:.1f} +# Requested Bins across halfWidth: {pgs[0].nBinsAcrossPeakWidth}""") + + for pgs in ingredients.pixelGroups: #ingredients.pixelGroups is a list of pgs + + #pgs are pixel group classes, they are iterable with each item in the class are + #tuples with the first value of the tuple being its name + + print(f""" +----------------------------------------------- +pixel grouping scheme: {pgs.focusGroup.name} +with {len(pgs.pixelGroupingParameters)} subGroup(s) + """) + dMins = [] + dMaxs = [] + dBins = [] + L2s = [] + twoThetas = [] + + for subGroup in pgs.pixelGroupingParameters: + + params = pgs.pixelGroupingParameters[subGroup] + dMaxs.append(params.dResolution.maximum) + dBins.append(params.dRelativeResolution/pgs.nBinsAcrossPeakWidth) + dMins.append(params.dResolution.minimum) + L2s.append(params.L2) + twoThetas.append(params.twoTheta) + + twoThetasDeg = [180.0*x/np.pi for x in twoThetas] + cropDMins = [d+Config["constants.CropFactors.lowdSpacingCrop"] for d in dMins] + cropDMaxs = [d-Config["constants.CropFactors.highdSpacingCrop"] for d in dMaxs] + #reduce precision for pretty printing + + + + dMaxs = [round(num,4) for num in dMaxs] + dMins = [round(num,4) for num in dMins] + dBins = [round(num,4) for num in dBins] + cropDMins = [round(num,4) for num in cropDMins] + cropDMaxs = [round(num,4) for num in cropDMaxs] + + L2s = [round(num,4) for num in L2s] + twoThetas = [round(num,4) for num in twoThetas] + twoThetasDeg = [round(num,1) for num in twoThetasDeg] + + just = 20 + print("L2 (m)".ljust(just),L2s) + print("twoTheta (rad)".ljust(just),twoThetas) + print("twoTheta (deg)".ljust(just),twoThetasDeg) + print("dMin (Å)".ljust(just),dMins) + print("dMax (Å)".ljust(just),dMaxs) + print("dMin (Å) - cropped".ljust(just),cropDMins) + print("dMax (Å) - cropped".ljust(just),cropDMaxs) + print("dBin".ljust(just),dBins) \ No newline at end of file diff --git a/src/snapwrap/utils.py b/src/snapwrap/utils.py index 73d0c69..570a796 100755 --- a/src/snapwrap/utils.py +++ b/src/snapwrap/utils.py @@ -1,18 +1,24 @@ # some helpful functions for use with SNAPRed script version import yaml from mantid.simpleapi import * -from mantid.kernel import PhysicalConstants import numpy as np import matplotlib.pyplot as plt import json import os import shutil +import inspect import importlib import copy import time import importlib.resources as resources from .wrapConfig import WrapConfig +from snapwrap.statusPrinter import (printWarning, + citation, + printStatus, + completionMessage, + verboseStatus) + import snapwrap.snapStateMgr as ssm import snapwrap.io as io import snapwrap.maskUtils as mut @@ -134,6 +140,11 @@ def filterLite(runNumber, boundaries, **reduce_kwargs): print("Error: boundaries must be a dictionary") return + # check current value for qsp parameter + sig = inspect.signature(reduce) + qsp = reduce_kwargs.get('qsp', sig.parameters['qsp'].default) + + # process boundaries if boundaries["type"] == "time": # require list of times in seconds as floats. @@ -154,8 +165,10 @@ def filterLite(runNumber, boundaries, **reduce_kwargs): # obtain original nexus file path iptsPath = GetIPTS(RunNumber=runNumber, Instrument="SNAP") + # iptsNumber = iptsPath.split("IPTS-")[-1].split("/")[0] nexusPath = f"{iptsPath}/nexus/SNAP_{runNumber}.nxs.h5" + # override SNAPRed parameters lite location params s = getConfigPath("nexusDefinitionFilterOverride") reloadRedConfig(s) @@ -166,7 +179,7 @@ def filterLite(runNumber, boundaries, **reduce_kwargs): liteYml = f"SNAP_{runNumber}.lite.yml" litePars = {"TOFTol" : -0.0001, #default for no TOF compression "clockTol": None, - "liteGroupMapFile":Config['instrument']['lite']['map']['file'], + "liteGroupMapFile": Config['instrument']['lite']['map']['file'], "liteIDF":Config['instrument']['lite']['definition']['file'], "liteDir":liteDir, "liteFilename":liteFilename, @@ -176,8 +189,10 @@ def filterLite(runNumber, boundaries, **reduce_kwargs): for key in litePars.keys(): print(f"{key}: {litePars[key]}") - #loop through boundaries and reduce + #loop through boundaries, create lite file for each, then reduce that outputWSNames = [] + if qsp: + outputWSNames_qsp = [] for sliceID in range(len(secondBoundaries)-1): startTime = secondBoundaries[sliceID] @@ -202,21 +217,21 @@ def filterLite(runNumber, boundaries, **reduce_kwargs): # reduce this filtered lite data. print(f"Reducing run {runNumber} from {startTime}s to {stopTime}s") - wsNames = reduce(runNumber=runNumber, **reduce_kwargs) - # rename output workspace to indicate sequence - - print(f"Renaming operation...for slice {sliceID}") + # rename output workspace to indicate sequencess for name in wsNames: print("Original name: ",name) - newName = f"{name[:-17]}slice_{str(sliceID).zfill(3)}" - print("New name: ",newName) + if qsp: + handle=io.redObject(name,requiredUnits="qsp") + else: + handle = io.redObject(name) + + newName = f"slice{str(sliceID).zfill(3)}_{handle.units}_{handle.pixelGroup}_{handle.runNumberString}" RenameWorkspace(InputWorkspace=name, OutputWorkspace=newName) outputWSNames.append(newName) # update label for plotting - from mantid.api import TextAxis ws = mtd[newName] @@ -231,19 +246,30 @@ def filterLite(runNumber, boundaries, **reduce_kwargs): DeleteWorkspace(Workspace="tmp") DeleteWorkspace(Workspace="tmpLite") + + + # group output names into workspace group #First group alphabetically def sortKey(name): parts = name.split("_") - string_id = parts[2] # "all", "bank", "column" + string_id = parts[2] # "all", "bank", "column slice_num = int(parts[-1]) # "000" -> 0 return (string_id, slice_num) sortedOutputWSNames = sorted(outputWSNames, key=sortKey) + if qsp: + groupUnits = "qsp" + else: + groupUnits = "dsp" + + GroupWorkspaces(InputWorkspaces=sortedOutputWSNames, OutputWorkspace=f"slice_{groupUnits}_{runNumber}") + # if qsp: + # sortedOutputWSNames_qsp = sorted(outputWSNames_qsp, key=sortKey) + # GroupWorkspaces(InputWorkspaces=sortedOutputWSNames_qsp, OutputWorkspace=f"slice_qsp_{runNumber}") - GroupWorkspaces(InputWorkspaces=sortedOutputWSNames, OutputWorkspace=f"slice_{runNumber}") #Reset config to original reloadRedConfig() @@ -1012,17 +1038,146 @@ def file(nameKeys,operation="add",cabinetName="File_Cabinet"): wsGroup = mtd[cabinetName] print(f"{cabinetName} has {wsGroup.getNumberOfEntries()} total workspaces") -def resample(sampleFactor=1): +def cleanTheTree(prefix="reduced",removePGS=None,deleteWorkspaces=False): + + # finds files with timestamps, creates a clone of the latest workspace without a timestamp + # if cleanMode = "hide" the older workspaces are hidden else + # if cleanMode = "delete" the older workspaces are deleted + # if pgs is not None, specified pixel groups will also be cleaned. + + reducedGroups = io.reducedRuns(prefix=prefix, + cleanTreeOverride=False) #by setting cleanTreeOverride to False, we get all workspaces + + for redGroup in reducedGroups: + + runDict = redGroup.objectDict + for pgs in runDict.keys(): + print(f"Found pixel group: {pgs}") + + # identify latest workspace in group and rename it. + latest = runDict[pgs][0] #redObject for most recent workspace + wsKeep = f"{latest.prefix}_{latest.units}_{latest.pixelGroup}_{latest.runNumberString}" + if deleteWorkspaces: + RenameWorkspace(InputWorkspace=latest.wsName, + OutputWorkspace=wsKeep) + else: + CloneWorkspace(InputWorkspace=latest.wsName, + OutputWorkspace=wsKeep) + RenameWorkspace(InputWorkspace=latest.wsName, + OutputWorkspace=f"__{latest.wsName}") + + #Delete or hide any remaining workspaces. + if len(runDict[pgs]) > 1: + for i in range(1,len(runDict[pgs])): + redObj = runDict[pgs][i] + if deleteWorkspaces == False: + RenameWorkspace(InputWorkspace=redObj.wsName, + OutputWorkspace=f"__{redObj.wsName}") + else: + DeleteWorkspace(redObj.wsName) + + if removePGS is None: + continue + + # ensure pgs is a list + if type(removePGS) != list: + removePGS = [removePGS] + + #if pgs is specified, also clean those workspaces + if pgs in removePGS: + DeleteWorkspace(wsKeep) + + # to also support removing workspaces without timestamps that match removePGS need a second + # pass through all workspaces + + if removePGS is None: + return + + reducedGroupsNoTimestamp = io.reducedRuns(prefix=prefix, + cleanTreeOverride=True) #will only find workspaces without timestamps + + for redGroup in reducedGroupsNoTimestamp: + + runDict = redGroup.objectDict + for pgs in runDict.keys(): + if pgs in removePGS: + for redObj in runDict[pgs]: + DeleteWorkspace(redObj.wsName) + +def revealHidden(prefix='reduced', + units='dsp', + PGS = None, + runNumber=None): + + # function to unhide previously hidden workspaces + + handles = workspaceHandles(prefix=f"__{prefix}", + units=units, + PGS=PGS, + runNumber=runNumber, + latestOnly=False, + cleanTreeOverride=False) #finds all hidden workspaces with timestamps + + if handles is None: + print("No hidden workspaces found") + return + + for handle in handles: + hiddenWSName = handle.wsName + unhiddenWSName = hiddenWSName.replace(f"__{prefix}_",f"{prefix}_") + # print(f"Unhiding workspace: {hiddenWSName} to {unhiddenWSName}") + RenameWorkspace(InputWorkspace=hiddenWSName, + OutputWorkspace=unhiddenWSName) + + # if we are unhiding workspaces with timestamps we no longer need to keep any copies without timestamps + + reducedGroupsTS = io.reducedRuns(prefix=prefix, + cleanTreeOverride=False) + + reducedGroupsNoTS = io.reducedRuns(prefix=prefix, + cleanTreeOverride=True) + + for redGroupTS in reducedGroupsTS: + + runDictTS = redGroupTS.objectDict + for pgs in runDictTS.keys(): + # for each pixel group in the timestamped group, check if there are any workspaces without timestamps + redGroupNoTS = None + for redGroupNT in reducedGroupsNoTS: + if redGroupNT.runNumber == redGroupTS.runNumber: + redGroupNoTS = redGroupNT + break + if redGroupNoTS is None: + continue + runDictNoTS = redGroupNoTS.objectDict + if pgs in runDictNoTS.keys(): + for redObj in runDictNoTS[pgs]: + # print(f"Deleting non-timestamped workspace: {redObj.wsName}") + DeleteWorkspace(redObj.wsName) + +def resample(sampleFactor=1, + prefix='reduced', + units='dsp', + PGS = None, + runNumber=None): # function to downsample reduced workspaces - reducedGroups = io.reducedRuns(exportFormats=[],prefix="reduced_dsp") + reducedGroups = io.reducedRuns(prefix=prefix, + units=units, + PGS = PGS, + runNumber=runNumber, + exportFormats=[]) + + if sampleFactor > 1: + print(f"Warning: sampleFactor is > 1. This will upsample data which is lossy!") + for redGroup in reducedGroups: runNumber = redGroup.runNumber runDict = redGroup.objectDict - print(f"Down sampling run: {runNumber} with {len(runDict)} pixel group(s)") + print(f"Resampling run: {runNumber} with {len(runDict)} pixel group(s)") for pgs in runDict.keys(): #each key is a pixel group and each pixel group has a list of objects (each is a workspace) @@ -1040,7 +1195,13 @@ def resample(sampleFactor=1): print("DS Delta: ",dsDelta) print(f"inputWorkspace is: {redObj.wsName}") - outWSName = f"resampled_dsp_{redObj.suffix}" + + cleanTree= WrapConfig.get("cleanTree") + if cleanTree: + outWSName = f"resampled_{redObj.units}_{redObj.pixelGroup}_{redObj.runNumber}" + else: + outWSName = f"resampled_{redObj.units}_{redObj.pixelGroup}_{redObj.runNumber}_{redObj.timeStamp}" + print(f"outputWorkspace is: {outWSName}") RebinRagged(InputWorkspace=redObj.wsName, OutputWorkspace=outWSName, @@ -1049,64 +1210,60 @@ def resample(sampleFactor=1): Delta = dsDelta, ) -def exportData(exportFormats=['gsa','xye','csv'], - prefix='reduced_dsp', +def exportData(prefix='reduced', + units='dsp', + PGS = None, + runNumber=None, + iptsOverride=None, + exportFormats=['gsa','xye','csv'], + fileTag=None, latestOnly=True, gsaInstPrm=True, - iptsOverride=None, - fileTag=None): - #creates reducedGroups and then exports these using the requested export formats - - reducedGroups = io.reducedRuns(exportFormats, - prefix, - iptsOverride=iptsOverride, - fileTag=fileTag) + ): + + #creates a list of reducedGroups then export using the requested export formats + reducedGroups = io.reducedRuns(prefix = prefix, + units = units, + PGS = PGS, + runNumber = runNumber, + iptsOverride = iptsOverride, + exportFormats = exportFormats, + fileTag = fileTag) io.exportReducedGroups(reducedGroups,latestOnly,gsaInstPrm) -def workspaceHandles(prefix="reduced_dsp",pgs=None,runNumber=None): +def workspaceHandles(prefix="reduced", + units="dsp", + PGS=None, + runNumber=None, + latestOnly=True, + cleanTreeOverride = None): # returns a list of redObjects for the requested workspaces matching arguments - # 20250530 modified to allow specific pgs or run number to be optionally specified - # otherwise everything will be found. - - #currently only the latest timestamp is returned. - - reducedList = io.reducedRuns([],prefix=prefix) #first argument is a list that isn't used but needs to exist + reducedList = io.reducedRuns(prefix=prefix, + units=units, + PGS=PGS, + runNumber=runNumber, + cleanTreeOverride=cleanTreeOverride) if not reducedList: - print("No matching workspaces found") + print("No matching workspaces found. Check filters") return - #if a pgs is specified filter only those matching, otherwise do nothing here - + # process reducedRun dictionaries to create a list of of redObjects handleList = [] for red in reducedList: + pgsList = red.objectDict.keys() - if runNumber == None: - for p in pgsList: - redObj = red.objectDict[p][0] + for p in pgsList: + if latestOnly: + redObj = red.objectDict[p][0] # selects most recent workspace only handleList.append(redObj) - else: - if int(red.runNumber) == runNumber: - for p in pgsList: - redObj = red.objectDict[p][0] - handleList.append(redObj) - - # at this point all, pgs are included. If none is specified, return here otherwise - # purge to match requested pgs - - if pgs == None: - print(f"Found {len(handleList)} matching workspaces") - return handleList - else: - purgeHandleList = [] - for h in handleList: - if h.pixelGroup == pgs: - purgeHandleList.append(h) - print(f"Found {len(purgeHandleList)} matching workspaces") - return purgeHandleList + else: + redObjects = [red.objectDict[p][i] for i in range(len(red.objectDict[p]))] #this is a list now + handleList.extend(redObjects) + return handleList def confirmIPTS(ipts,comment="SNAPRed/snapwrap", subNum=1, redType="Scripts"): @@ -1327,6 +1484,9 @@ def reload(runNumber, keepMask=keepMask, pixelGroup=pixelGroup, ) + if WrapConfig.get("cleanTree"): + cleanTheTree(prefix="reduced", + deleteWorkspaces=False) def autoMask(inputWorkspace,maskType="PE",plotOn=True): @@ -1477,12 +1637,6 @@ def cheeseMask(context, binMaskList): context.mantidSnapper.executeQueue() -def citation(): - print("\nIf you use SNAPRed or snapwrap in your work please cite:\n") - print("SNAPRed: Reduction of multidimensional neutron time-of-flight diffraction data") - print("M. Guthrie, M. Walsh, K. Travis, R. Boston, D. Caballero, D. Dinger, G. ElsarBoukh, J. Hetrick, A.T. Savici and P. Peterson") - print("Manuscript in preparation (2025)\n") - def reduce(runNumber, sampleEnv='none', pixelMaskIndex='none', @@ -1501,10 +1655,33 @@ def reduce(runNumber, singlePixelGroup=None, qsp=False, linBin=0.01, + removePGS=None, save=True): from mantid import config + # Helper for consistent, graceful aborts when called from Mantid Workbench. + # IMPORTANT: this does not raise, it just logs/prints and returns a sentinel + # so that no traceback is produced by the surrounding interpreter. + def _abort(msg: str): + """Print/log a friendly error and signal that reduction failed. + + Callers must immediately return the result of this function from + ``reduce`` so that the error does not propagate further. + """ + try: + from mantid.kernel import Logger # type: ignore + Logger("snapwrap").error(msg) + except Exception: + # Logger not critical in Workbench script context + pass + if msg == "": + print(f"\nReduction aborted.\n") + else: + print(f"\nERROR: {msg}\nReduction aborted.\n") + # Use None as a simple "aborted" sentinel + return None + if verbose: config.setLogLevel(5, quiet=True) else: @@ -1536,8 +1713,27 @@ def reduce(runNumber, # keepUnfocussed = snapwrapGlob.keepUnfocussed convertUnitsTo = snapwrapGlob.convertUnitsTo - #process continue flags - continueFlags = ContinueWarning.Type.UNSET #by default do not continue + # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> + # process calibration status and continue flags + # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> + + #first catch dead ends, abort and return useful information + #difcal + calibrationStatus = ssm.isCalibrated(runNumber=runNumber,silent=True) + if not any([calibrationStatus[0],continueNoDifcal]): # difcal is absent and fallback not requested + printWarning('noDifcal',runNumber) + return _abort( + f"" + ) + + #normcal + if not any([calibrationStatus[1],continueNoVan,noNorm]): # van is absent and fallback not requested + printWarning('noNormcal',runNumber) + return _abort( + f"" + ) + + continueFlags = ContinueWarning.Type.UNSET # by default do not continue if continueNoVan and not noNorm: artificialNormalizationIngredients = ArtificialNormalizationIngredients( @@ -1647,9 +1843,6 @@ def reduce(runNumber, ) snapRequest = SNAPRequest(path="/reduction",payload=reductionRequest,hooks=hooks) - - print(reductionRequest) - reductionService.validateReduction(reductionRequest) # 1. load default grouping workspaces from the state folder @@ -1666,7 +1859,6 @@ def reduce(runNumber, print(f"Setting single focus group: {focGroup.name}") reductionRequest.focusGroups.append(focGroup) - print("request",reductionRequest.focusGroups) # 2. Load Calibration (error out if it doesnt exist, comment out if continue anyway) @@ -1687,14 +1879,12 @@ def reduce(runNumber, print("groceries") print(groceries) - # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> # Load the metadata i.e. ingredients # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> # 1. load reduction ingredients - ingredients = reductionService.prepReductionIngredients(reductionRequest, groceries.get("combinedPixelMask","")) - + ingredients = reductionService.prepReductionIngredients(reductionRequest, groceries.get("combinedPixelMask","")) ingredients.artificialNormalizationIngredients = artificialNormalizationIngredients # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> @@ -1718,14 +1908,8 @@ def reduce(runNumber, ) if calibrationRecord.version == 0 and not continueNoDifcal: - print(""" - - - WARNING: NO DIFFRACTION CALIBRATION FOUND. TO PROCEED EITHER: - 1. RUN A DIFFRACTION CALIBRATION OR - 2. SET "continueNoDifcal = True" TO PROCEED WITH DEFAULT GEOMETRY - - """) - assert False + printWarning('noDifcal') + _abort("")#No diffraction calibration found. Provide calibration or set continueNoDifcal=True to proceed in diagnostic mode.") # print(calibrationRecord.version) normalizationPath = dataFactoryService.getNormalizationDataPath( @@ -1741,13 +1925,10 @@ def reduce(runNumber, state = stateID ) - if type(normalizationRecord) == None: - print(""" - - WARNING: NO VANADIUM FOUND. TO PROCEED EITHER: - 1. RUN A VANADIUM CALIBRATION OR - 2. SET "continueNoVan = True" TO USE ARTIFICIAL NORMALISATION - """) - + if normalizationRecord is None and not (continueNoVan or noNorm): + printWarning('noNormcal') + _abort("No normalization (vanadium) calibration found. Provide normalization or set continueNoVan=True / noNorm=True to bypass.") + # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> # Pretty print useful information regarding reduction status # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> @@ -1758,52 +1939,26 @@ def reduce(runNumber, for item in ingredient[1]: allPixelGroups.append(item.focusGroup.name) - print(f""" -READY TO REDUCE. SNAPRed status: -- Run Number: {ingredients.runNumber} -- state: - - ID: {stateID}, - - definition: {stateDict} - - Pixel Groups to process: {allPixelGroups} - """) - if calibrationRecord.version==0 and continueNoDifcal: - print(""" - WARNING: DIAGNOSTIC MODE! DEFAULT GEOMETRY USED. - """) - else: - print(f""" - - Diffraction Calibration: - - .h5 path: {calibrationPath} - - .h5 version: {calibrationRecord.version} - """) - - if continueNoVan: - print(""" - WARNING: DIAGNOSTIC MODE! VANADIUM CORRECTION NOT USED - DATA WILL BE ARTIFICIALLY NORMALISED BY DIVISION BY BACKGROUND. - """) - else: - print(f""" - - Normalisation Calibration: - - raw vanadium path: {normalizationPath} - - raw vanadium version: {normalizationRecord.version} - """) - - #optional arguments provided... - - if pixelMasks not in ('none', []): - print(f""" - Mask workspace(s) specified: {pixelMasks} - """) - - if binMaskList != []: - print(f""" - Bin Mask workspace(s) specified: {binMaskList} - """) + status = { + "ingredients": ingredients, + "stateID": stateID, + "stateDict": stateDict, + "allPixelGroups": allPixelGroups, + "calibrationRecord": calibrationRecord, + "calibrationPath": calibrationPath, + "normalizationRecord": normalizationRecord, + "normalizationPath": normalizationPath, + "runNumber": runNumber, + "pixelMasks": pixelMasks, + "binMaskList": binMaskList, + "continueNoDifcal": continueNoDifcal, + "continueNoVan": continueNoVan, + } + printStatus(status) time.sleep(5) #pause to allow user to read status info - #obtain useful values from instrument state + #obtain useful values from instrument state farmFresh = FarmFreshIngredients( runNumber=runNumber, useLiteMode=useLiteMode, @@ -1816,16 +1971,7 @@ def reduce(runNumber, #prior to reduction, need to determine appropriate binning to match requested #Q-space binning - originalIngredients,ingredients = updateBinForQ(ingredients,0.01) - - # for pgs in ingredients.pixelGroups: - # print(f"processing pgs: {pgs.focusGroup.name} with {len(pgs.pixelGroupingParameters)} subgroups") - - # for subGroup in pgs.pixelGroupingParameters: - # params = pgs.pixelGroupingParameters[subGroup] - # dMax = params.dResolution.maximum - # dMin = params.dResolution.minimum - # dBin = params.dRelativeResolution/pgs.nBinsAcrossPeakWidth + originalIngredients,ingredients = updateBinForQ(ingredients,linBin) pgs = ingredients.pixelGroups print("UPDATED") @@ -1838,23 +1984,20 @@ def reduce(runNumber, dBin = params.dRelativeResolution/pg.nBinsAcrossPeakWidth print(f"{dMin:.4f} {dBin:.6f} {dMax:.4f}") - if reduceData: # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> # Execute reduction here # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> - data = interfaceController.executeRequest(snapRequest).data try: - record=data.record + data = interfaceController.executeRequest(snapRequest).data + except Exception as e: + _abort(f"Reduction execution failed: {e}") + try: + record = data.record except AttributeError: - print("ERROR: reduction failed") - assert False - - # print("\n\ndata\n\n") - # print(data.record.workspaceNames) - + _abort("Reduction failed: response missing record attribute.") # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> # Save the data @@ -1866,199 +2009,48 @@ def reduce(runNumber, reductionService.saveReduction(saveReductionRequest) - print(f""" - Reduction COMPLETE - - - Run Number: {ingredients.runNumber} - - - state: - - ID: {stateID[0]}, - - definition: {stateID[1]} - - - Pixel Groups to process: {allPixelGroups} - - """) + printStatus(status) - if calibrationRecord.version==0 and continueNoDifcal: - print(""" - - WARNING: DIAGNOSTIC MODE! DEFAULT GEOMETRY USED TO CONVERT UNITS. - """) - else: - print(f""" - Calibration Status: - - Diffraction Calibration: - - .h5 path: {calibrationPath} - - .h5 version: {calibrationRecord.version} - - """) + # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> + # if cleanTree, hide timstamped reduced workspaces. + # >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> + cleanTree = WrapConfig.get("cleanTree") + if cleanTree: + cleanTheTree(removePGS=removePGS) + cleanTheTree(prefix="diagnostic",removePGS=removePGS) #also clean diagnostic workspaces + print("Time-stamped Workspaces have been hidden") + outputWSList = [ws[:-18] for ws in data.record.workspaceNames] + else: + outputWSList = data.record.workspaceNames - if continueNoVan: - print(""" - - WARNING: DIAGNOSTIC MODE! VANADIUM CORRECTION NOT USED - DATA WILL BE ARTIFICIALLY NORMALISED USING DIVISION BY BACKGROUND - """) else: - print(f""" - - Normalisation Calibration: - - raw vanadium path: {normalizationPath} - - raw vanadium version: {normalizationRecord.version} - - """) - - #optional arguments provided... - - if sampleEnv != 'none': - print(f""" - Sample environment was specified. - - - name: {seeDict["name"]} - - id: {seeDict["id"]} - - type: {seeDict["type"]} - - mask: {seeDict["masks"]["maskFilenameList"]} NOT YET IMPLEMENTED - - """) - - if pixelMasks != 'none' or []: - print(f""" - Mask workspace(s) specified: - """) - for mask in pixelMasks: - print(f""" - {mask} - """) - + outputWSList = None if verbose: - - print("\nINSTRUMENT PARAMETERS") - print(f"- Calib.home: {Config['instrument.calibration.home']}") - # print("\nParams in SNAPInstPrm:") - print("- L1: ",instrumentState.instrumentConfig.L1) - print("- L2: ",instrumentState.instrumentConfig.L2) - L = instrumentState.instrumentConfig.L1+instrumentState.instrumentConfig.L2 - print("- bandwidth: ",instrumentState.instrumentConfig.bandwidth) - print("- lowWavelengthCrop: ",instrumentState.instrumentConfig.lowWavelengthCrop) - - # print("\nParams in application.yml") - print("- low d-Spacing crop: ",Config["constants.CropFactors.lowdSpacingCrop"]) - print("- high d-Spacing crop: ",Config["constants.CropFactors.highdSpacingCrop"]) - - # print("\nParams from state") - wav = instrumentState.detectorState.wav - print("- Central wavelength: ",wav) - - print("\n") - bandwidth = instrumentState.instrumentConfig.bandwidth - lowWavelengthCrop = instrumentState.instrumentConfig.lowWavelengthCrop - lamMin = instrumentState.particleBounds.wavelength.minimum - lamMax = instrumentState.particleBounds.wavelength.maximum - tofMin = instrumentState.particleBounds.tof.minimum - tofMax = instrumentState.particleBounds.tof.maximum - - print(f"- wavelength limits: {lamMin:.4f}, {lamMax:.4f}") - # print(f"- TOF limits: {tofMin:.1f}, {tofMax:.1f}") - - # some tests to confirm that these numbers are being calculated as expected - convFactor = Config["constants.m2cm"] * PhysicalConstants.h / PhysicalConstants.NeutronMass - -# print(f""" SOME TESTING... -# calculated lamMin is {wav - bandwidth/2 + lowWavelengthCrop}:.4f, {} -# """) - - assert lamMin == wav - bandwidth/2 + lowWavelengthCrop - assert lamMax == wav + bandwidth/2 - # print(f"calculated tof limits: {lamMin*L/convFactor:.1f}, {lamMax*L/convFactor:.1f}") - assert tofMin == lamMin*L/convFactor - assert tofMax == lamMax*L/convFactor - # calcTofM - # calcTofMax - - pgs = ingredients.pixelGroups #ingredients.pixelGroups is a list of pgs - print("\nPIXEL GROUP PARAMETERS") -# print(f"""TOF limits {pgs[0].timeOfFlight.minimum:.1f} - {pgs[0].timeOfFlight.maximum:.1f} -# Requested Bins across halfWidth: {pgs[0].nBinsAcrossPeakWidth}""") - - for pgs in ingredients.pixelGroups: #ingredients.pixelGroups is a list of pgs - - #pgs are pixel group classes, they are iterable with each item in the class are - #tuples with the first value of the tuple being its name - - print(f""" ------------------------------------------------ -pixel grouping scheme: {pgs.focusGroup.name} -with {len(pgs.pixelGroupingParameters)} subGroup(s) - """) - dMins = [] - dMaxs = [] - dBins = [] - L2s = [] - twoThetas = [] - - for subGroup in pgs.pixelGroupingParameters: - - params = pgs.pixelGroupingParameters[subGroup] - dMaxs.append(params.dResolution.maximum) - dBins.append(params.dRelativeResolution/pgs.nBinsAcrossPeakWidth) - dMins.append(params.dResolution.minimum) - L2s.append(params.L2) - twoThetas.append(params.twoTheta) - - twoThetasDeg = [180.0*x/np.pi for x in twoThetas] - cropDMins = [d+Config["constants.CropFactors.lowdSpacingCrop"] for d in dMins] - cropDMaxs = [d-Config["constants.CropFactors.highdSpacingCrop"] for d in dMaxs] - #reduce precision for pretty printing - - - - dMaxs = [round(num,4) for num in dMaxs] - dMins = [round(num,4) for num in dMins] - dBins = [round(num,4) for num in dBins] - cropDMins = [round(num,4) for num in cropDMins] - cropDMaxs = [round(num,4) for num in cropDMaxs] - - L2s = [round(num,4) for num in L2s] - twoThetas = [round(num,4) for num in twoThetas] - twoThetasDeg = [round(num,1) for num in twoThetasDeg] - - just = 20 - print("L2 (m)".ljust(just),L2s) - print("twoTheta (rad)".ljust(just),twoThetas) - print("twoTheta (deg)".ljust(just),twoThetasDeg) - print("dMin (Å)".ljust(just),dMins) - print("dMax (Å)".ljust(just),dMaxs) - print("dMin (Å) - cropped".ljust(just),cropDMins) - print("dMax (Å) - cropped".ljust(just),cropDMaxs) - print("dBin".ljust(just),dBins) - - # if reduceData: - # print(data) - # for dat in data: - # print(dat) + verboseStatus(Config,instrumentState,ingredients) if qsp: - # snapwrapIO.convertToQ() - # first generate list of redObjects for this run: - - redWSList = [] - for ws in data.record.workspaceNames: - redObj = io.redObject(ws) - if redObj.isReducedDataWorkspace: - redWSList.append(redObj) - - for redObj in redWSList: - dspName = redObj.wsName - qspName = dspName.replace("_dsp_","_qsp_") - ConvertUnits(InputWorkspace=dspName, - OutputWorkspace=qspName, - Target="MomentumTransfer") - - rebPrm = linBin*np.ones(mtd[qspName].getNumberHistograms()) - RebinRagged(InputWorkspace = qspName, - OutputWorkspace= qspName, - Delta = rebPrm) #ragged is needed as Qmin/max vary - - #lastly, downsample d-space data back to original request - restoreDBins(redObj,originalIngredients) + # post reduction, need to convert d-space reduced data to Q-space + + outputWSList_Q = [] #reset to hold q-space names + if outputWSList is not None: + for dspName in outputWSList: + qspName = dspName.replace("_dsp_","_qsp_") + ConvertUnits(InputWorkspace=dspName, + OutputWorkspace=qspName, + Target="MomentumTransfer") + + rebPrm = linBin*np.ones(mtd[qspName].getNumberHistograms()) + RebinRagged(InputWorkspace = qspName, + OutputWorkspace= qspName, + Delta = rebPrm) #ragged is needed as Qmin/max vary + + #lastly, downsample d-space data back to original request + handle = io.redObject(dspName) + restoreDBins(handle,originalIngredients) + + outputWSList_Q.append(qspName) + outputWSList = outputWSList_Q #clean up after myself @@ -2082,7 +2074,7 @@ def reduce(runNumber, citation() config.setLogLevel(3, quiet=True) - return data.record.workspaceNames #ingredients.pixelGroups + return outputWSList