diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 0000000..e1804fe
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,4 @@
+[submodule "tests/data/snapred-data"]
+ path = tests/data/snapred-data
+ url = https://code.ornl.gov/sns-hfir-scse/infrastructure/test-data/snapred-data.git
+ branch = main
\ No newline at end of file
diff --git a/environment.yml b/environment.yml
index 03579e9..b24ce56 100644
--- a/environment.yml
+++ b/environment.yml
@@ -8,7 +8,9 @@ channels:
dependencies:
- snapred==1.2.0rc2
# SNAPBlue specific dependencies
+ - snapred==1.1.0rc5
- scikit-image
+ - git-lfs
# -- Runtime dependencies
# base: list all base dependencies here
- python>=3.8 # please specify the minimum version of python here
diff --git a/pyproject.toml b/pyproject.toml
index f2e2697..0d8857d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -74,7 +74,7 @@ packagename-cli = "packagenamepy.packagename:main"
packagenamepy = "packagenamepy.packagename:gui"
[tool.pytest.ini_options]
-addopts = "-v --cov=packagenamepy --cov-report=term-missing"
+addopts = "-m 'not (integration or datarepo)' -v --cov=packagenamepy --cov-report=term-missing"
pythonpath = [
".", "src", "scripts"
]
@@ -82,7 +82,10 @@ testpaths = ["tests"]
python_files = ["test*.py"]
norecursedirs = [".git", "tmp*", "_tmp*", "__pycache__", "*dataset*", "*data_set*"]
markers = [
- "mymarker: example markers goes here"
+ "integration: mark a test as an integration test",
+ "mount_snap: mark a test as using /SNS/SNAP/ data mount",
+ "golden_data(*, path=None, short_name=None, date=None): mark golden data to use with a test",
+ "datarepo: mark a test as using snapred-data repo"
]
[tool.ruff]
diff --git a/src/snapblue/meta/Config.py b/src/snapblue/meta/Config.py
new file mode 100644
index 0000000..8bbc359
--- /dev/null
+++ b/src/snapblue/meta/Config.py
@@ -0,0 +1,86 @@
+import importlib.resources as resources
+
+import os
+import sys
+
+from pathlib import Path
+
+from snapred.meta.Config import Resource as RedResource, Config
+
+def _find_root_dir():
+ try:
+ MODULE_ROOT = Path(sys.modules["snapblue"].__file__).parent
+
+ # Using `"test" in env` here allows different versions of "[category]_test.yml" to be used for different
+ # test categories: e.g. unit tests use "test.yml" but integration tests use "integration_test.yml".
+ env = os.environ.get("snapblue_env")
+ if env and "test" in env and "conftest" in sys.modules:
+ # WARNING: there are now multiple "conftest.py" at various levels in the test hierarchy.
+ MODULE_ROOT = MODULE_ROOT.parent.parent / "tests"
+ except Exception as e:
+ raise RuntimeError("Unable to determine SNAPBlue module-root directory") from e
+
+ return str(MODULE_ROOT)
+
+class _Resource:
+ _packageMode: bool
+ _resourcesPath: str
+
+ def __init__(self):
+ # where the location of resources are depends on whether or not this is in package mode
+ self._packageMode = not self._existsInPackage("application.yml")
+ if self._packageMode:
+ self._resourcesPath = "/resources/"
+ else:
+ self._resourcesPath = os.path.join(_find_root_dir(), "resources/")
+
+ def _existsInPackage(self, subPath) -> bool:
+ with resources.path("snapblue.resources", subPath) as path:
+ return os.path.exists(path)
+
+ def exists(self, subPath) -> bool:
+ if self._packageMode:
+ return self._existsInPackage(subPath)
+ else:
+ return os.path.exists(self.getPath(subPath))
+
+ def getPath(self, subPath):
+ if subPath.startswith("/"):
+ return os.path.join(self._resourcesPath, subPath[1:])
+ else:
+ return os.path.join(self._resourcesPath, subPath)
+
+ def read(self, subPath):
+ with self.open(subPath, "r") as file:
+ return file.read()
+
+ def open(self, subPath, mode): # noqa: A003
+ if self._packageMode:
+ with resources.path("snapblue.resources", subPath) as path:
+ return open(path, mode)
+ else:
+ return open(self.getPath(subPath), mode)
+
+
+Resource = _Resource()
+RedResource._resourcesPath = Resource._resourcesPath
+RedResource._packageMode = Resource._packageMode
+# use refresh to do initial load, clearing shouldn't matter
+Config.refresh("application.yml")
+
+# ---------- SNAPRed-internal values: --------------------------
+# allow "resources" relative paths to be entered into the "yml"
+# using "${module.root}"
+Config._config["module"] = {}
+Config._config["module"]["root"] = _find_root_dir()
+
+Config._config["version"] = Config._config.get("version", {})
+Config._config["version"]["default"] = -1
+# ---------- end: internal values: -----------------------------
+
+# see if user used environment injection to modify what is needed
+# this will get from the os environment or from the currently loaded one
+# first case wins
+env = os.environ.get("snapblue_env", Config._config.get("environment", None))
+if env is not None:
+ Config.refresh(env)
\ No newline at end of file
diff --git a/src/snapblue/resources/application.yml b/src/snapblue/resources/application.yml
new file mode 100644
index 0000000..27f6aa3
--- /dev/null
+++ b/src/snapblue/resources/application.yml
@@ -0,0 +1,4 @@
+# put snapred application.yml overrides here
+IPTS:
+ default: /SNS
+ root: /SNS
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..af3b8a4
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,143 @@
+import os
+
+import pytest
+import unittest.mock as mock
+
+from snapred.meta.decorators import Resettable
+from snapred.meta.decorators.Singleton import reset_Singletons
+
+# import sys
+# sys.path.append('.')
+# os.environ['PYTHONPATH'] = './src'
+
+# Allow override: e.g. `env=dev pytest ...`
+# if not os.environ.get("env"):
+# os.environ["env"] = "test"
+
+from mantid.kernel import ConfigService # noqa: E402
+from snapblue.meta.Config import ( # noqa: E402
+ Config, # noqa: E402
+ Resource, # noqa: E402
+)
+
+
+# PATCH the `unittest.mock.Mock` class: BANNED FUNCTIONS
+def banned_function(function_name: str):
+ _error_message: str = f"`Mock.{function_name}` is a mock, it always evaluates to True. Use `Mock.assert_{function_name}` instead."
+
+ def _banned_function(self, *args, **kwargs):
+ nonlocal _error_message # this line should not be necessary!
+
+ # Ensure that the complete message is in the pytest-captured output stream:
+ print(_error_message)
+
+ raise RuntimeError(_error_message)
+
+ return _banned_function
+
+# `mock.Mock.called` is OK: it exists as a boolean attribute
+mock.Mock.called_once = banned_function("called_once")
+mock.Mock.called_once_with = banned_function("called_once_with")
+mock.Mock.called_with = banned_function("called_with")
+mock.Mock.not_called = banned_function("not_called")
+
+
+def mock_decorator(orig_cls):
+ return orig_cls
+
+###### PATCH THE DECORATORS HERE ######
+
+mockResettable = mock.Mock()
+mockResettable.Resettable = mock_decorator
+mock.patch.dict("sys.modules", {"snapred.meta.decorators.Resettable": mockResettable}).start()
+mock.patch.dict("sys.modules", {"snapred.meta.decorators._Resettable": Resettable}).start()
+
+mantidConfig = config = ConfigService.Instance()
+mantidConfig["CheckMantidVersion.OnStartup"] = "0"
+mantidConfig["UpdateInstrumentDefinitions.OnStartup"] = "0"
+mantidConfig["usagereports.enabled"] = "0"
+
+#######################################
+
+# this at teardown removes the loggers, eliminating logger-related error printouts
+# see https://github.com/pytest-dev/pytest/issues/5502#issuecomment-647157873
+@pytest.fixture(autouse=True, scope="session")
+def clear_loggers(): # noqa: PT004
+ """Remove handlers from all loggers"""
+ import logging
+
+ yield # ... teardown follows:
+ loggers = [logging.getLogger()] + list(logging.Logger.manager.loggerDict.values())
+ for logger in loggers:
+ handlers = getattr(logger, "handlers", [])
+ for handler in handlers:
+ logger.removeHandler(handler)
+
+########################################################################################################################
+# In combination, the following autouse fixtures allow unit tests and integration tests
+# to be run successfully without mocking out the `@Singleton` decorator.
+#
+# * The main objective is to allow the `@Singleton` classes to function as singletons, for the
+# duration of the single-test scope. This functionality is necessary, for example, in order for
+# the `Indexer` class to function correctly during the state-initialization sequence.
+#
+# * There are some fine points involved with using `@Singleton` classes during testing at class and module scope.
+# Such usage should probably be avoided whenever possible. It's a bit tricky to get this to work correctly
+# within the test framework.
+#
+# * TODO: Regardless of these fixtures, at the moment the `@Singleton` decorator must be completely turned ON during
+# integration tests, without any modification (e.g. or "reset"). There is something going on at "session" scope
+# with specific singletons not being deleted between tests, which results in multiple singleton instances when the
+# fixtures are used. This behavior does not seem to be an issue for the unit tests.
+# We can track this down by turning on the garbage collector `gc`, but this work has not yet been completed.
+#
+# Implementation notes:
+#
+# * Right now, there are > 36 `@Singleton` decorated classes. Probably, there should be far fewer.
+# Almost none of these classes are compute-intensive to initialize, or retain any cached data.
+# These would be the normal justifications for the use of this pattern.
+#
+# * Applying the `@Singleton` decorator changes the behavior of the classes,
+# so we don't want to mock the decorator out during testing. At present, the key class where this is important
+# is the `Indexer` class, which is not itself a singleton, but which is owned and cached
+# by the `LocalDataService` singleton. `Indexer` instances retain local data about indexing events
+# that have occurred since their initialization.
+#
+
+@pytest.fixture(autouse=True)
+def _reset_Singletons(request):
+ if not "integration" in request.keywords:
+ reset_Singletons()
+ yield
+
+@pytest.fixture(scope="class", autouse=True)
+def _reset_class_scope_Singletons(request):
+ if not "integration" in request.keywords:
+ reset_Singletons()
+ yield
+
+@pytest.fixture(scope="module", autouse=True)
+def _reset_module_scope_Singletons(request):
+ if not "integration" in request.keywords:
+ reset_Singletons()
+ yield
+
+########################################################################################################################
+
+
+## Import various `pytest.fixture` defined in separate `tests/util` modules:
+# -------------------------------------------------------------------------
+# *** IMPORTANT WARNING: these must be included _after_ the `Singleton` decorator is patched ! ***
+# * Otherwise, the modules imported by these will not have the patched decorator applied to them.
+
+# from util.golden_data import goldenData, goldenDataFilePath
+# from util.state_helpers import state_root_fixture
+# from util.IPTS_override import IPTS_override_fixture
+from util.Config_helpers import Config_override_fixture
+from util.pytest_helpers import (
+ calibration_home_from_mirror,
+ cleanup_workspace_at_exit,
+ cleanup_class_workspace_at_exit,
+ get_unique_timestamp,
+ reduction_home_from_mirror
+)
diff --git a/tests/data/snapred-data b/tests/data/snapred-data
new file mode 160000
index 0000000..60ee8c5
--- /dev/null
+++ b/tests/data/snapred-data
@@ -0,0 +1 @@
+Subproject commit 60ee8c536ed0fffffc0ecb103940ab04b5938106
diff --git a/tests/integration/test_filesystem.py b/tests/integration/test_filesystem.py
new file mode 100644
index 0000000..950bad9
--- /dev/null
+++ b/tests/integration/test_filesystem.py
@@ -0,0 +1,16 @@
+
+
+import pytest
+from util.pytest_helpers import calibration_home_from_mirror, handleStateInit, reduction_home_from_mirror # noqa: F401
+from snapblue.meta.Config import Config
+from pathlib import Path
+
+
+@pytest.mark.integration
+@pytest.mark.datarepo
+def test_calibrationHomeExists(calibration_home_from_mirror):
+ tmpCalibrationHomeDirectory = calibration_home_from_mirror()
+ calibrationHomePath = Path(Config["instrument.calibration.home"])
+ assert calibrationHomePath.exists()
+ iptsHomePath = Path(Config["IPTS.root"])
+ assert iptsHomePath.exists()
\ No newline at end of file
diff --git a/tests/integration/test_reduction.py b/tests/integration/test_reduction.py
new file mode 100644
index 0000000..19173c7
--- /dev/null
+++ b/tests/integration/test_reduction.py
@@ -0,0 +1,10 @@
+
+
+import pytest
+from util.pytest_helpers import calibration_home_from_mirror, handleStateInit, reduction_home_from_mirror # noqa: F401
+
+
+@pytest.mark.integration
+@pytest.mark.datarepo
+def test_reduction(reduction_home_from_mirror):
+ pass
\ No newline at end of file
diff --git a/tests/resources/application.yml b/tests/resources/application.yml
new file mode 100644
index 0000000..27f6aa3
--- /dev/null
+++ b/tests/resources/application.yml
@@ -0,0 +1,4 @@
+# put snapred application.yml overrides here
+IPTS:
+ default: /SNS
+ root: /SNS
diff --git a/tests/resources/integration_test.yml b/tests/resources/integration_test.yml
new file mode 100644
index 0000000..f45a74e
--- /dev/null
+++ b/tests/resources/integration_test.yml
@@ -0,0 +1,86 @@
+# environment: integration_test
+# At present:
+# * this "integration_test.yml" overrides "IPTS.root", and "constants.maskedPixelThreshold";
+# * "module.root" will still be defined as in "test.yml".
+
+IPTS:
+ # Eventually, for SNAPRed's test framework:
+ # this should be a shared location on "analysis.sns.gov".
+ # For the moment, each developer needs to set this individually to their local path.
+ root: ${module.root}/data/snapred-data/SNS
+
+constants:
+ # For tests with '46680' this seems to be necessary.
+ maskedPixelThreshold: 1.0
+
+ DetectorPeakPredictor:
+ fwhm: 1.17741002252 # used to convert gaussian to fwhm (2 * log_e(2))
+ CropFactors:
+ lowWavelengthCrop: 0.05
+ lowdSpacingCrop: 0.1
+ highdSpacingCrop: 0.15
+ RawVanadiumCorrection:
+ numberOfSlices: 1
+ numberOfAnnuli: 1
+
+instrument:
+ native:
+ pixelResolution: 72
+ definition:
+ file: ${module.root}/resources/ultralite/CRACKLE_Definition.xml
+ lite:
+ pixelResolution: 18
+ definition:
+ file: ${module.root}/resources/ultralite/CRACKLELite_Definition.xml
+ map:
+ file: ${module.root}/resources/ultralite/CRACKLELiteDataMap.xml
+
+ PVLogs:
+ # Swap these when running with ultralite data
+ # rootGroup: "entry/DASlogs"
+ rootGroup: "/mantid_workspace_1/logs"
+
+ # PV-log keys relating to instrument settings:
+ instrumentPVKeys:
+ - "BL3:Chop:Gbl:WavelengthReq"
+ - "BL3:Chop:Skf1:WavelengthUserReq"
+ - "det_arc1"
+ - "det_arc2"
+ - "BL3:Det:TH:BL:Frequency"
+ - "BL3:Mot:OpticsPos:Pos"
+ - "det_lin1"
+ - "det_lin2"
+
+mantid:
+ workspace:
+ nameTemplate:
+ delimiter: "_"
+ template:
+ run: "{unit},{group},{lite},{auxiliary},{runNumber}"
+ diffCal:
+ input: "{unit},{runNumber},raw"
+ table: "diffract_consts,{runNumber},{version}"
+ output: "{unit},{group},{runNumber},{version}"
+ diagnostic: "diagnostic,{group},{runNumber},{version}"
+ mask: "diffract_consts,mask,{runNumber},{version}"
+ metric: "calib_metrics,{metricName},{runNumber},{version}"
+ timedMetric: "calib_metrics,{metricName},{runNumber},{timestamp}"
+ normCal:
+ rawVanadium: "{unit},{group},{runNumber},raw_van_corr,{version}"
+ focusedRawVanadium: "{unit},{group},{runNumber},raw_van_corr,{version}"
+ smoothedFocusedRawVanadium: "{unit},{group},{runNumber},fitted_van_corr,{version}"
+
+calibration:
+ parameters:
+ default:
+ alpha: 0.1
+ # alpha: 1.1
+ beta:
+ - 0.02
+ - 0.05
+ # beta:
+ # - 1
+ # - 2
+ fitting:
+ # minSignal2Noise: 0.0
+ minSignal2Noise: 10
\ No newline at end of file
diff --git a/tests/resources/ultralite/CRACKLEFocGroup_Column.xml b/tests/resources/ultralite/CRACKLEFocGroup_Column.xml
new file mode 100644
index 0000000..3a6fe2f
--- /dev/null
+++ b/tests/resources/ultralite/CRACKLEFocGroup_Column.xml
@@ -0,0 +1,21 @@
+
+
+
+ 0-11
+
+
+ 12-23
+
+
+ 24-35
+
+
+ 36-47
+
+
+ 48-59
+
+
+ 60-71
+
+
diff --git a/tests/resources/ultralite/CRACKLELiteDataMap.xml b/tests/resources/ultralite/CRACKLELiteDataMap.xml
new file mode 100644
index 0000000..81f4bcd
--- /dev/null
+++ b/tests/resources/ultralite/CRACKLELiteDataMap.xml
@@ -0,0 +1,59 @@
+
+
+
+
+ 0,1,2,3
+
+
+ 4,5,6,7
+
+
+ 8,9,10,11
+
+
+ 12,13,14,15
+
+
+ 16,17,18,19
+
+
+ 20,21,22,23
+
+
+ 24,25,26,27
+
+
+ 28,29,30,31
+
+
+ 32,33,34,35
+
+
+
+ 36,37,38,39
+
+
+ 40,41,42,43
+
+
+ 44,45,46,47
+
+
+ 48,49,50,51
+
+
+ 52,53,54,55
+
+
+ 56,57,58,59
+
+
+ 60,61,62,63
+
+
+ 64,65,66,67
+
+
+ 68,69,70,71
+
+
diff --git a/tests/resources/ultralite/CRACKLELite_Definition.xml b/tests/resources/ultralite/CRACKLELite_Definition.xml
new file mode 100644
index 0000000..f849c97
--- /dev/null
+++ b/tests/resources/ultralite/CRACKLELite_Definition.xml
@@ -0,0 +1,237 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/resources/ultralite/CRACKLE_Definition.xml b/tests/resources/ultralite/CRACKLE_Definition.xml
new file mode 100644
index 0000000..05e80e4
--- /dev/null
+++ b/tests/resources/ultralite/CRACKLE_Definition.xml
@@ -0,0 +1,221 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/resources/ultralite/create_ultralite_data.py b/tests/resources/ultralite/create_ultralite_data.py
new file mode 100644
index 0000000..4986322
--- /dev/null
+++ b/tests/resources/ultralite/create_ultralite_data.py
@@ -0,0 +1,141 @@
+# import mantid algorithms, numpy and matplotlib
+
+from mantid.simpleapi import *
+
+from snapred.backend.dao.ingredients.GroceryListItem import GroceryListItem
+from snapred.backend.data.DataFactoryService import DataFactoryService
+from snapred.backend.data.GroceryService import GroceryService
+from snapblue.meta.Config import Resource
+
+Resource._resourcesPath = os.path.expanduser("~/SNS/SNAP/shared/Calibration_next/Powder/")
+liteInstrumentFile = Resource.getPath("CRACKLE_Definition.xml")
+dfs = DataFactoryService()
+
+
+def superID(nativeID, xdim, ydim):
+ # accepts a numpy array of native ID from standard SNAP nexus file and returns a numpy array with
+ # super pixel ID according to provided dimensions xdim and ydim of the super pixel.
+ # xdim and ydim shall be multiples of 2
+
+ Nx = 256 # native number of horizontal pixels
+ Ny = 256 # native number of vertical pixels
+ NNat = Nx * Ny # native number of pixels per panel
+
+ firstPix = (nativeID // NNat) * NNat
+ redID = nativeID % NNat # reduced ID beginning at zero in each panel
+
+ (i, j) = divmod(redID, Ny) # native (reduced) coordinates on pixel face
+ superi = divmod(i, xdim)[0]
+ superj = divmod(j, ydim)[0]
+
+ # some basics of the super panel
+ superNx = Nx / xdim # 32 running from 0 to 31
+ superNy = Ny / ydim
+ superN = superNx * superNy
+
+ superFirstPix = (firstPix / NNat) * superN
+
+ superVal = superi * superNy + superj + superFirstPix
+
+ return superVal
+
+
+# create the mapping
+LoadEmptyInstrument(
+ Filename="/SNS/SNAP/shared/Malcolm/dataFiles/SNAP_Definition.xml",
+ OutputWorkspace="SNAP",
+)
+
+mapToCrackle = "map_from_SNAP_to_CRACKLE"
+if mapToCrackle not in mtd:
+ # create the lite grouping ws using input run as template
+ CreateGroupingWorkspace(
+ InputWorkspace="SNAP",
+ GroupDetectorsBy="All",
+ OutputWorkspace=mapToCrackle,
+ )
+ ws = mtd[mapToCrackle]
+ nHst = ws.getNumberHistograms()
+ for spec in range(nHst):
+ ws.setY(spec, [superID(spec, 128, 128) + 1])
+
+# select run to convert to ultralite data, can convert multiple runs at once
+runs_to_reduce = ["58882"] # ["46680", "58810", "58813", "57514"]
+
+clerk = GroceryListItem.builder()
+for x in runs_to_reduce:
+ clerk.neutron(x).native().add()
+groceries = GroceryService().fetchGroceryList(clerk.buildList())
+
+
+# The FileName should point to a "diffract_consts__v#.h5 file, this gets saved at the end of a diffcal run
+LoadDiffCal(
+ InputWorkspace=groceries[0],
+ FileName="/SNS/users/8l2/SNS/SNAP/shared/Calibration_next/Powder/04bd2c53f6bf6754/native/diffraction/v_0003/diffract_consts_057514_v0003.h5",
+ WorkspaceName="57514",
+)
+# If set to False, will output data as histograms
+eventMode = True
+
+for grocery in groceries:
+ ws = mtd[grocery]
+ ultralite = f"{grocery}_ULTRALITE"
+ CloneWorkspace(
+ InputWorkspace=grocery,
+ OutputWorkspace=ultralite,
+ )
+ ConvertUnits(
+ InputWorkspace=ultralite,
+ OutputWorkspace=ultralite,
+ Target="dSpacing",
+ )
+ if not eventMode:
+ uws = mtd[ultralite]
+ Rebin(InputWorkspace=ultralite, OutputWorkspace=ultralite, Params=(uws.getTofMin(), -0.001, uws.getTofMax()))
+ DiffractionFocussing(
+ InputWorkspace=ultralite,
+ OutputWorkspace=ultralite,
+ GroupingWorkspace=mapToCrackle,
+ PreserveEvents=eventMode,
+ )
+ LoadInstrument(
+ Workspace=ultralite,
+ Filename=liteInstrumentFile,
+ RewriteSpectraMap=True,
+ )
+ ConvertUnits(
+ InputWorkspace=ultralite,
+ OutputWorkspace=ultralite,
+ Target="TOF",
+ )
+ if eventMode:
+ CompressEvents(
+ InputWorkspace=ultralite,
+ OutputWorkspace=ultralite,
+ BinningMode="Logarithmic",
+ Tolerance=-0.0001,
+ )
+ uws = mtd[ultralite]
+ Rebin(InputWorkspace=ultralite, OutputWorkspace=ultralite, Params=(uws.getTofMax() - uws.getTofMin()))
+ logs = (
+ "BL3:Det:TH:BL:Frequency",
+ "BL3:Mot:OpticsPos:Pos",
+ "BL3:Chop:Gbl:WavelengthReq",
+ "BL3:Chop:Skf1:WavelengthUserReq",
+ "BL3:Chop:Gbl:WavelengthReq",
+ "BL3:Chop:Skf1:WavelengthUserReq",
+ "det_arc1",
+ "det_arc2",
+ "BL3:Det:TH:BL:Frequency",
+ "BL3:Mot:OpticsPos:Pos",
+ "det_lin1",
+ "det_lin2",
+ "proton_charge",
+ "gd_prtn_chrg",
+ )
+ RemoveLogs(Workspace=ultralite, KeepLogs=logs)
+ SaveNexusProcessed(
+ InputWorkspace=ultralite,
+ Filename=f"~/Documents/ultralite/{ultralite}.nxs.h5",
+ CompressNexus=True,
+ )
diff --git a/tests/test_import.py b/tests/unit/test_import.py
similarity index 100%
rename from tests/test_import.py
rename to tests/unit/test_import.py
diff --git a/tests/test_version.py b/tests/unit/test_version.py
similarity index 100%
rename from tests/test_version.py
rename to tests/unit/test_version.py
diff --git a/tests/util/Config_helpers.py b/tests/util/Config_helpers.py
new file mode 100644
index 0000000..4ec2108
--- /dev/null
+++ b/tests/util/Config_helpers.py
@@ -0,0 +1,64 @@
+from collections import namedtuple
+from contextlib import ExitStack, contextmanager
+from typing import Any, Dict, Tuple
+
+import pytest
+
+from snapblue.meta.Config import Config
+
+Node = namedtuple("Node", "dict key")
+
+# Implementation notes:
+# * In order to allow convenient usage within CIS-test scripts,
+# `Config_override` is deliberately _not_ implemented as a test fixture.
+# * Multi-level substitution is not implemented: in general this can _effectively_ result in a
+# period-delimited key corresponding to values from _multiple_ `Config` nodes.
+# It's assumed that this does not pose much limitation, and that it should be possible
+# to accomplish any required "override" usage with (possibly multiple) single-node subsitutions.
+
+
+@contextmanager
+def Config_override(key: str, value: Any):
+ # Context manager to safely override a `Config` entry:
+ # * `__enter__` returns the `Config` instance.
+
+ # Find the _primary_ node associated with a period-delimited key.
+ def lookupNode(dict_: Dict[str, Any], key: str) -> Tuple[Dict[str, Any], str]:
+ # key_1.key_2. ... key_(n-1) lookup
+ ks = key.split(".")
+ val = dict_
+ for k in ks[0:-1]:
+ val = val.get(k)
+ if not isinstance(val, dict):
+ # Anything else may not correspond to a _single_ `Config` node
+ raise RuntimeError(
+ f"not implemented: probable multilevel substitution with key: '{key}' for value: {value}"
+ )
+
+ return Node(val, ks[-1])
+
+ # __enter__
+ # test failing with exepction will pollute other tests with this config change if not caught up the stack
+ try:
+ _savedNode: Tuple[Dict[str, Any], str] = lookupNode(Config._config, key)
+ _savedValue: Any = _savedNode.dict[_savedNode.key]
+ _savedNode.dict[_savedNode.key] = value
+ yield Config
+ finally:
+ # __exit__
+ del _savedNode.dict[_savedNode.key]
+ if _savedValue is not None:
+ _savedNode.dict[_savedNode.key] = _savedValue
+
+
+@pytest.fixture
+def Config_override_fixture():
+ _stack = ExitStack()
+
+ def _Config_override_fixture(key: str, value: Any):
+ return _stack.enter_context(Config_override(key, value))
+
+ yield _Config_override_fixture
+
+ # teardown => __exit__
+ _stack.close()
diff --git a/tests/util/pytest_helpers.py b/tests/util/pytest_helpers.py
new file mode 100644
index 0000000..e6d02ef
--- /dev/null
+++ b/tests/util/pytest_helpers.py
@@ -0,0 +1,265 @@
+## Python standard imports
+import os
+import tempfile
+import time
+from contextlib import ExitStack
+from pathlib import Path
+from typing import List, Optional
+
+##
+## Test-related imports go *LAST*!
+## ----------------
+from unittest import mock
+
+import pytest
+
+## Mantid imports
+from mantid.simpleapi import DeleteWorkspaces, mtd
+
+## Qt imports
+from qtpy.QtCore import Qt
+from qtpy.QtWidgets import (
+ QMessageBox,
+)
+from util.Config_helpers import Config_override
+
+## SNAPRed imports
+# I would prefer not to access `LocalDataService` within an integration test,
+# however, for the moment, the reduction-data output relocation fixture is defined in the current file.
+from snapred.backend.data.LocalDataService import LocalDataService
+from snapblue.meta.Config import Config
+from snapred.ui.view import InitializeStateCheckView
+
+## REMINDER: Import required test fixtures at the end of either the main `conftest.py`,
+## or any `conftest.py` at the test-module level directory.
+
+
+## WARNING:
+# * The following two methods duplicate code, however the _closures_ need to be distinct!
+
+
+@pytest.fixture(scope="function") # noqa: PT003
+def cleanup_workspace_at_exit():
+ # Allow cleanup of workspaces in the ADS
+ # in a manner compatible with _parallel_ testing.
+ _workspaces: List[str] = []
+
+ def _cleanup_workspace_at_exit(wsName: str):
+ _workspaces.append(wsName)
+
+ yield _cleanup_workspace_at_exit
+
+ # teardown
+ try:
+ if _workspaces:
+ # Warning: `DeleteWorkspaces`' input validator throws an exception
+ # if a specified workspace doesn't exist in the ADS;
+
+ # Provide an error diagnostic message, but do not bypass the error:
+ # the workspaces list must be correct.
+ non_existent_workspaces = set([ws for ws in _workspaces if not mtd.doesExist(ws)])
+ print(f"Non-existent workspaces: {non_existent_workspaces}.")
+
+ DeleteWorkspaces(_workspaces)
+ except RuntimeError:
+ pass
+
+
+@pytest.fixture(scope="class")
+def cleanup_class_workspace_at_exit():
+ # Allow cleanup of workspaces in the ADS
+ # in a manner compatible with _parallel_ testing.
+ _workspaces: List[str] = []
+
+ def _cleanup_workspace_at_exit(wsName: str):
+ _workspaces.append(wsName)
+
+ yield _cleanup_workspace_at_exit
+
+ # teardown
+ try:
+ if _workspaces:
+ # Warning: `DeleteWorkspaces`' input validator throws an exception
+ # if a specified workspace doesn't exist in the ADS;
+
+ # Provide an error diagnostic message, but do not bypass the error:
+ # the workspaces list must be correct.
+ print(f"Non-existent workspaces: {set([ws for ws in _workspaces if not mtd.doesExist(ws)])}.")
+
+ DeleteWorkspaces(_workspaces)
+ except RuntimeError:
+ pass
+
+
+@pytest.fixture
+def get_unique_timestamp():
+ """
+ This method re-uses code from `LocalDataService.getUniqueTimestamp`.
+
+ Generate a unique timestamp:
+
+ * on some operating systems `time.time()` only has resolution to seconds;
+
+ * this method checks its own most-recently returned value, and if necessary,
+ increments it.
+
+ * the complete `float` representation of the unix timestamp is retained,
+ in order to allow arbitrary formatting.
+
+ """
+ _previousTimestamp = None
+
+ def _get_unique_timestamp() -> float:
+ nextTimestamp = time.time()
+ nonlocal _previousTimestamp
+ if _previousTimestamp is not None:
+ # compare as `time.struct_time`
+ if nextTimestamp < _previousTimestamp or time.gmtime(nextTimestamp) == time.gmtime(_previousTimestamp):
+ nextTimestamp = _previousTimestamp + 1.0
+ _previousTimestamp = nextTimestamp
+ return nextTimestamp
+
+ yield _get_unique_timestamp
+
+ # teardown ...
+ pass
+
+
+@pytest.fixture
+def calibration_home_from_mirror():
+ # Test fixture to create a copy of the calibration home directory from an existing mirror:
+ # * creates a temporary calibration home directory under the optional `prefix` path;
+ # when not specified, the temporary directory is created under the existing
+ # `Config["instrument.calibration.powder.home"]`;
+ # * creates symlinks within the directory to required metadata files and directories
+ # from the already existing `Config["instrument.calibration.powder.home"]`;
+ # * ignores any existing diffraction-calibration and normalization-calibration subdirectories;
+ # * and finally, overrides the `Config` entry for "instrument.calibration.powder.home".
+
+ # IMPLEMENTATION notes:
+ # * The functionality of this fixture is deliberately NOT implemented as a context manager,
+ # although certain context-manager features are used.
+ # * If this were a context manager, it would be terminated at any exception throw. For example,
+ # it would be terminated by the "initialize state" `RecoverableException`. Such termination would interfere with
+ # the requirements of the integration tests.
+ _stack = ExitStack()
+
+ def _calibration_home_from_mirror(prefix: Optional[Path] = None):
+ originalCalibrationHome: Path = Path(Config["instrument.calibration.powder.home"])
+ if prefix is None:
+ prefix = originalCalibrationHome
+
+ # Override calibration home directory:
+ tmpCalibrationHome = Path(_stack.enter_context(tempfile.TemporaryDirectory(dir=prefix, suffix=os.sep)))
+ assert tmpCalibrationHome.exists()
+ _stack.enter_context(Config_override("instrument.calibration.powder.home", str(tmpCalibrationHome)))
+
+ # WARNING: for these integration tests `LocalDataService` is a singleton.
+ # The Indexer's `lru_cache` MUST be reset after the Config override, otherwise
+ # it will return indexers synched to the previous `Config["instrument.calibration.powder.home"]`.
+ LocalDataService()._indexer.cache_clear()
+
+ # Create symlinks to metadata files and directories.
+ metadatas = [Path("LiteGroupMap.hdf"), Path("PixelGroupingDefinitions"), Path("SNAPLite.xml")]
+ for path_ in metadatas:
+ os.symlink(originalCalibrationHome / path_, tmpCalibrationHome / path_)
+ return tmpCalibrationHome
+
+ yield _calibration_home_from_mirror
+
+ # teardown => __exit__
+ _stack.close()
+ LocalDataService()._indexer.cache_clear()
+
+
+@pytest.fixture
+def reduction_home_from_mirror():
+ # Test fixture to write reduction data to a temporary directory under `Config["instrument.reduction.home"]`.
+ # * creates a temporary reduction state root directory under the optional `prefix` path;
+ # when not specified, the temporary directory is created under the existing
+ # `Config["instrument.reduction.home"]` (with the substituted 'IPTS' tag).
+ # * overrides the `Config` entry for "instrument.reduction.home".
+
+ # IMPLEMENTATION notes: (see previous).
+ _stack = ExitStack()
+
+ def _reduction_home_from_mirror(runNumber: str, prefix: Optional[Path] = None):
+ if prefix is None:
+ dataService = LocalDataService()
+ originalReductionHome = dataService._constructReductionStateRoot(runNumber)
+
+ # WARNING: this 'mkdir' step will not be reversed at exit,
+ # but that shouldn't matter very much.
+ originalReductionHome.mkdir(parents=True, exist_ok=True)
+ prefix = originalReductionHome
+
+ tmpReductionHome = Path(_stack.enter_context(tempfile.TemporaryDirectory(dir=prefix, suffix=os.sep)))
+
+ # Ensure that `_createReductionStateRoot` will return the temporary directory,
+ # while still exercising it's IPTS-substitution functionality.
+ _stack.enter_context(
+ Config_override(
+ "instrument.reduction.home", Config["instrument.reduction.home"] + os.sep + tmpReductionHome.name
+ )
+ )
+
+ # No `LocalDataService._indexer.cache_clear()` should be required here, but keep it in mind, just in case!
+
+ else:
+ # Specified prefix => just use that, without any substitution.
+ # In this case `_constructReductionStateRoot` will return a path
+ # which does not depend on the IPTS-directory for the run number.
+ tmpReductionHome = Path(_stack.enter_context(tempfile.TemporaryDirectory(dir=prefix, suffix=os.sep)))
+ _stack.enter_context(Config_override("instrument.reduction.home", str(tmpReductionHome)))
+
+ assert tmpReductionHome.exists()
+ return tmpReductionHome
+
+ yield _reduction_home_from_mirror
+
+ # teardown => __exit__
+ _stack.close()
+
+
+def handleStateInit(waitForStateInit, stateId, qtbot, qapp, actionCompleted, workflowNodeTabs):
+ if (Path(Config["instrument.calibration.powder.home"]) / stateId).exists():
+ # raise RuntimeError(
+ # f"The state root directory for '{stateId}' already exists! "\
+ # + "Please move it out of the way."
+ # )
+ waitForStateInit = False
+ if waitForStateInit:
+ # ---------------------------------------------------------------------------
+ # IMPORTANT: "initialize state" dialog is triggered by an exception throw:
+ # => do _not_ patch using a with clause!
+ questionMessageBox = mock.patch( # noqa: PT008
+ "qtpy.QtWidgets.QMessageBox.question",
+ lambda *args, **kwargs: QMessageBox.Yes, # noqa: ARG005
+ )
+ questionMessageBox.start()
+ successPrompt = mock.patch(
+ "snapred.ui.widget.SuccessPrompt.SuccessPrompt.prompt",
+ lambda parent: parent.close() if parent is not None else None,
+ )
+ successPrompt.start()
+ # --------------------------------------------------------------------------
+
+ # (1) respond to the "initialize state" request
+ with qtbot.waitSignal(actionCompleted, timeout=60000):
+ qtbot.mouseClick(workflowNodeTabs.currentWidget().continueButton, Qt.MouseButton.LeftButton)
+ qtbot.waitUntil(
+ lambda: len(
+ [o for o in qapp.topLevelWidgets() if isinstance(o, InitializeStateCheckView.InitializationMenu)]
+ )
+ > 0,
+ timeout=1000,
+ )
+ stateInitDialog = [
+ o for o in qapp.topLevelWidgets() if isinstance(o, InitializeStateCheckView.InitializationMenu)
+ ][0]
+ stateInitDialog.stateNameField.setText("my happy state")
+
+ qtbot.mouseClick(stateInitDialog.beginFlowButton, Qt.MouseButton.LeftButton)
+ # State initialization dialog is "application modal" => no need to explicitly wait
+ questionMessageBox.stop()
+ successPrompt.stop()