diff --git a/Fitter/python/AnomalousCouplingEFTNegative.py b/Fitter/python/AnomalousCouplingEFTNegative.py index ede2f41..47ac346 100644 --- a/Fitter/python/AnomalousCouplingEFTNegative.py +++ b/Fitter/python/AnomalousCouplingEFTNegative.py @@ -29,11 +29,15 @@ def __init__(self): def loadOperators(self,fpath): - print("Loading operators from {fpath}".format(fpath=fpath)) - jsn = open(fpath,'r').read() - operators = json.loads(jsn) - self.alloperators = [] + print(f"Loading operators from {fpath}") + with open(fpath) as f: + operators = json.load(f) + available = set(operators) + print("\tAvailable signals:", available) for sig in self.sgnl_known: + if sig not in available: + print(f"\tWARNING: no operators for '{sig}', skipping.") + continue self.Operators[sig] = operators[sig] self.numOperators[sig] = len(operators[sig]) self.alloperators.extend(operators[sig]) diff --git a/Fitter/scripts/EFTFitter.py b/Fitter/scripts/EFTFitter.py index 10b54ce..e32e7b8 100644 --- a/Fitter/scripts/EFTFitter.py +++ b/Fitter/scripts/EFTFitter.py @@ -147,13 +147,33 @@ def log_subprocess_output(self, pipe, level): # if level=='info': logging.info(line.rstrip('\n')) # if level=='err': logging.error(line.rstrip('\n')) + def __override_CMSSW_BASE(self): + """If CMSSW_BASE points to an /afs/ path but cwd is under /users/, + climb back up from cwd until you hit the CMSSW_* directory and reset CMSSW_BASE.""" + cmssw_base = os.environ.get('CMSSW_BASE', '') + cwd = os.getcwd() + cmssw_dir = os.path.basename(cmssw_base) + + if cmssw_base.startswith('/afs/') and cwd.startswith('/users/'): + path = cwd + # walk up until we find the CMSSW_* dir + while os.path.basename(path) != cmssw_dir and path not in ('', os.path.sep): + path = os.path.dirname(path) + + if os.path.basename(path) == cmssw_dir: + cmssw_base = path + + return cmssw_base + def makeWorkspaceSM(self, datacard='EFT_MultiDim_Datacard.txt'): ### Generates a workspace from a datacard ### logging.info("Creating workspace") if not os.path.isfile(datacard): logging.error("Datacard does not exist!") return - CMSSW_BASE = os.getenv('CMSSW_BASE') + + CMSSW_BASE = self.__override_CMSSW_BASE() + args = ['text2workspace.py',datacard,'-P','HiggsAnalysis.CombinedLimit.PhysicsModel:multiSignalModel', '--channel-masks', #'--PO','map=.*/ttll:mu_ttll[1]','--PO','map=.*/tHq:mu_ttH[1,0,3]','--PO','map=.*/ttlnu:mu_ttlnu[1,0,3]','--PO','map=.*/ttH:mu_ttH[1,0,3]','--PO','map=.*/tllq:mu_tllq[1,0,3]', @@ -177,7 +197,9 @@ def makeWorkspaceSM(self, datacard='EFT_MultiDim_Datacard.txt'): def bestFitSM(self, name='.test', freeze=[], autoMaxPOIs=True, other=[], mask=[], mask_syst=[]): ### Multidimensional fit ### - CMSSW_BASE = os.getenv('CMSSW_BASE') + + CMSSW_BASE = self.__override_CMSSW_BASE() + args=['combine','-d',CMSSW_BASE+'/src/EFTFit/Fitter/test/SMWorkspace.root','-v','2','--saveFitResult','-M','MultiDimFit','--cminPoiOnlyFit','--cminDefaultMinimizerStrategy=2'] if freeze: params_all=['mu_ttll','mu_ttlnu','mu_ttH','mu_tllq'] @@ -218,7 +240,8 @@ def gridScanSM(self, name='.test', batch='', scan_params=['mu_ttll'], params_tra ### Can be used to do 2D scans as well ### logging.info("Doing grid scan...") - CMSSW_BASE = os.getenv('CMSSW_BASE') + CMSSW_BASE = self.__override_CMSSW_BASE() + args = ['combineTool.py','-d',CMSSW_BASE+'/src/EFTFit/Fitter/test/SMWorkspace.root','-M','MultiDimFit','--algo','grid','--cminPreScan','--cminDefaultMinimizerStrategy=0'] args.extend(['--points','{}'.format(points)]) if name: args.extend(['-n','{}'.format(name)]) @@ -267,7 +290,9 @@ def makeWorkspaceEFT(self, datacard='EFT_MultiDim_Datacard.txt'): if not os.path.isfile(datacard): logging.error("Datacard does not exist!") sys.exit() - CMSSW_BASE = os.getenv('CMSSW_BASE') + + CMSSW_BASE = self.__override_CMSSW_BASE() + args = ['text2workspace.py',datacard,'-P','EFTFit.Fitter.EFTModel:eftmodel','--PO','fits='+CMSSW_BASE+'/src/EFTFit/Fitter/hist_files/EFT_Parameterization.npy','-o','EFTWorkspace.root','--channel-masks'] logging.info(' '.join(args)) @@ -279,7 +304,9 @@ def makeWorkspaceEFT(self, datacard='EFT_MultiDim_Datacard.txt'): def bestFit(self, name='.test', params_POI=[], startValuesString='', freeze=False, autoBounds=True, other=[]): ### Multidimensional fit ### - CMSSW_BASE = os.getenv('CMSSW_BASE') + + CMSSW_BASE = self.__override_CMSSW_BASE() + if params_POI == []: params_POI = self.wcs args=['combine','-d',CMSSW_BASE+'/src/EFTFit/Fitter/test/EFTWorkspace.root','-v','2','--saveFitResult','-M','MultiDimFit','-H','AsymptoticLimits','--cminPoiOnlyFit','--cminDefaultMinimizerStrategy=2'] @@ -317,7 +344,7 @@ def batchDNNScan(self, name='.test', batch='crab', points=1000000, workspace='pt ### Runs deltaNLL Scan in for a single parameter using CRAB or Condor ### logging.info("Doing grid scan...") - CMSSW_BASE = os.getenv('CMSSW_BASE') + CMSSW_BASE = self.__override_CMSSW_BASE() nsplit = 100 # jobs per task jobs = points // nsplit # points per job @@ -405,12 +432,18 @@ def gridScan(self, name='.test', batch='', freeze=False, scan_params=['ctW','ctZ ### Runs deltaNLL Scan in two parameters using CRAB or Condor ### logging.info("Doing grid scan...") - CMSSW_BASE = os.getenv('CMSSW_BASE') - if not "/afs/" in workspace: + CMSSW_BASE = self.__override_CMSSW_BASE() + + print(f"CMSSW_BASE after manipulation is {CMSSW_BASE}") + + if not (workspace.startswith("/afs/") or workspace.startswith("/users/") or workspace.startswith("/scratch365/")): wsname = CMSSW_BASE+'/src/EFTFit/Fitter/test/'+workspace if not os.path.exists(wsname): print('WARNING! I was not able to find the workspace in afs, I will try finding it by assuming you passed me an absolute path') wsname = workspace + else: + wsname = workspace + if not os.path.exists(wsname): raise RuntimeError('Failed to find the workspace, either considering it as a local afs path or an absolute path. Please, fix it!') print('Workspace found! I am gonna use it for running fits...') @@ -476,14 +509,14 @@ def gridScan(self, name='.test', batch='', freeze=False, scan_params=['ctW','ctZ sp.call(['mkdir', 'condor{}'.format(name)]) sp.call(['chmod', 'a+x', 'condor_{}.sh'.format(name.replace('.', ''))]) sp.call(['sed', '-i', 's/ulimit.*/&\\nunset PERL5LIB/', 'condor_{}.sh'.format(name.replace('.', ''))]) - sp.call(['sed', '-i', 's/queue/\\n\\nrequestMemory=10000\\n+JobFlavour = "workday"\\n\\nqueue/', 'condor_{}.sub'.format(name.replace('.', ''))]) # Ask for at least 10GB of RAM + sp.call(['sed', '-i', 's/queue/\\n\\nrequestMemory=20000\\n+JobFlavour = "workday"\\n\\nqueue/', 'condor_{}.sub'.format(name.replace('.', ''))]) # Ask for at least 10GB of RAM # Replace hardcoded paths with $CMSSW_BASE and dynamic paths - cmssw_base = os.getenv('CMSSW_BASE') - test_dir = os.path.join(cmssw_base, 'src', 'EFTFit', 'Fitter', 'test') + CMSSW_BASE = self.__override_CMSSW_BASE() + test_dir = os.path.join(CMSSW_BASE, 'src', 'EFTFit', 'Fitter', 'test') sp.call(['sed', '-i', - 's|executable = \(.*\)|executable = {}/src/EFTFit/Fitter/test/condor_{}.sh\\narguments = $(ProcId)|'.format(cmssw_base, name.replace('.', '')), + 's|executable = \(.*\)|executable = {}/src/EFTFit/Fitter/test/condor_{}.sh\\narguments = $(ProcId)|'.format(CMSSW_BASE, name.replace('.', '')), 'condor_{}.sub'.format(name.replace('.', '')) ]) @@ -633,7 +666,7 @@ def retrieveGridScan(self, name='.test', batch='crab', user='apiccine'):#getpass if os.path.isfile('condor_{}.sub'.format(name.replace('.',''))): os.rename('condor_{}.sub'.format(name.replace('.','')),'condor{0}/condor_{0}.sub'.format(name)) - def submitEFTWilks(self, name='.test', limits='/afs/crc.nd.edu/user/b/byates2/Public/wc_top22006_a24_prof_2sigma.json', workspace='ptz-lj0pt_fullR2_anatest24v01_withAutostats_withSys.root', doBest=False, asimov=False, fixed=False, wc=None, sig=0, batch='condor'): + def submitEFTWilks(self, name='.test', limits='/users/byates2/Public/wc_top22006_a24_prof_2sigma.json', workspace='ptz-lj0pt_fullR2_anatest24v01_withAutostats_withSys.root', doBest=False, asimov=False, fixed=False, wc=None, sig=0, batch='condor'): ''' Submit jobs for GoodnessOfFit: doBest = False - Fix all NPs to 0, run toys with seed(s) speicfied below @@ -657,7 +690,9 @@ def submitEFTWilks(self, name='.test', limits='/afs/crc.nd.edu/user/b/byates2/Pu if not doBest: best = ','.join(['{}={}'.format(key,val[sig]) for key,val in limits.items() if key in self.wcs]) ''' - CMSSW_BASE = os.getenv('CMSSW_BASE') + + CMSSW_BASE = self.__override_CMSSW_BASE() + args = ['combineTool.py','-d',CMSSW_BASE+'/src/EFTFit/Fitter/test/'+workspace,'-M','GoodnessOfFit','--algo','saturated','--cminPreScan','--cminDefaultMinimizerStrategy=0', '--noMCbonly=1'] if not doBest: args = ['combineTool.py','-d',CMSSW_BASE+'/src/EFTFit/Fitter/test/'+workspace,'-M','MultiDimFit','--algo', 'none', '--skipInitialFit', '--cminPreScan','--cminDefaultMinimizerStrategy=0'] @@ -1133,7 +1168,7 @@ def batchReductionFitEFT(self, name='.EFT.Private.Unblinded.Nov16.28redo.Float.c eval `scramv1 runtime -sh` cd %(PWD)s """ % ({ - 'CMSSW_BASE': os.environ['CMSSW_BASE'], + 'CMSSW_BASE': self.__override_CMSSW_BASE, 'SCRAM_ARCH': os.environ['SCRAM_ARCH'], 'PWD': os.environ['PWD'] }) @@ -1182,8 +1217,8 @@ def batchReductionFitEFT(self, name='.EFT.Private.Unblinded.Nov16.28redo.Float.c jobs = 0 wsp_files = set() - cmssw_base = os.getenv('CMSSW_BASE') - script_dir = os.path.join(cmssw_base, 'src', 'EFTFit', 'Fitter', 'scripts') + CMSSW_BASE = self.__override_CMSSW_BASE() + script_dir = os.path.join(CMSSW_BASE, 'src', 'EFTFit', 'Fitter', 'scripts') #for i, proc in enumerate(range(0, points, split), points // split): for i,proc in enumerate(list(range(0,points,split)), points/split): @@ -1447,6 +1482,7 @@ def ImpactInitialFit(self, workspace='ptz-lj0pt_fullR2_anatest17_noAutostats_wit if not wcs: wcs = self.wcs user = os.getlogin() wcs_start = ','.join(wc+'=0' for wc in self.wcs) + CMSSW_BASE = self.__override_CMSSW_BASE() for wc in wcs: print('Submitting', wc) target = 'condor_%s.sh' % wc @@ -1455,10 +1491,10 @@ def ImpactInitialFit(self, workspace='ptz-lj0pt_fullR2_anatest17_noAutostats_wit condorFile.write('ulimit -s unlimited\n') condorFile.write('unset PERL5LIB\n') condorFile.write('set -e\n') - condorFile.write('cd /afs/crc.nd.edu/user/{}/{}/CMSSW_14_1_0_pre4/src\n'.format(user[0], user)) + condorFile.write('cd {}/src\n'.format(CMSSW_BASE)) condorFile.write('export SCRAM_ARCH={}\n'.format(os.environ['SCRAM_ARCH'])) condorFile.write('eval `scramv1 runtime -sh`\n') - condorFile.write('cd /afs/crc.nd.edu/user/{}/{}/CMSSW_14_1_0_pre4/src/EFTFit/Fitter/test/{}\n'.format(user[0], user, job_dir)) + condorFile.write('cd {}/src/EFTFit/Fitter/test/{}\n'.format(CMSSW_BASE, job_dir)) condorFile.write('\n') condorFile.write('if [ $1 -eq 0 ]; then\n') condorFile.write(' combineTool.py -M Impacts -n %s%s --doInitialFit --redefineSignalPOIs %s --robustFit 1 --setParameters %s --freezeParameters ctW,ctZ,cpQM,cbW,cpQ3,cptb,cpt,cQl3i,cQlMi,cQei,ctli,ctei,ctlSi,ctlTi,cQq13,cQq83,cQq11,ctq1,cQq81,ctq8,ctt1,cQQ1,cQt8,cQt1,ctp --setParameterRanges ctW=-4,4:ctZ=-5,5:cpt=-40,30:ctp=-35,65:ctli=-10,10:ctlSi=-10,10:cQl3i=-10,10:cptb=-20,20:ctG=-2,2:cpQM=-10,30:ctlTi=-2,2:ctei=-10,10:cQei=-10,10:cQlMi=-10,10:cpQ3=-15,10:cbW=-5,5:cQq13=-1,1:cQq83=-2,2:cQq11=-2,2:ctq1=-2,2:cQq81=-5,5:ctq8=-5,5:ctt1=-5,5:cQQ1=-10,10:cQt8=-20,20:cQt1=-10,10 -m 1 -d %s' % (wc, version, wc, wcs_start, workspace)) @@ -1470,8 +1506,9 @@ def ImpactInitialFit(self, workspace='ptz-lj0pt_fullR2_anatest17_noAutostats_wit condorFile.write('fi\n') condorFile.close() - cmssw_base = os.getenv('CMSSW_BASE') - test_dir = os.path.join(cmssw_base, 'src', 'EFTFit', 'Fitter', 'test') + CMSSW_BASE = self.__override_CMSSW_BASE() + + test_dir = os.path.join(CMSSW_BASE, 'src', 'EFTFit', 'Fitter', 'test') target = 'condor_%s.sub' % wc with open(target, 'w') as condorFile: @@ -1509,6 +1546,9 @@ def ImpactNuisance(self, workspace='ptz-lj0pt_fullR2_anatest25v01_withAutostats_ user = os.getlogin() ranges = ':'.join([wc+'='+','.join((str(r[0]), str(r[1]))) for wc,r in list(self.wc_ranges_differential.items()) if wc in self.wcs]) wcs_start = ','.join(wc+'=0' for wc in self.wcs) + + CMSSW_BASE = self.__override_CMSSW_BASE() + for wc in wcs: print('Submitting', wc) if unblind: @@ -1519,10 +1559,10 @@ def ImpactNuisance(self, workspace='ptz-lj0pt_fullR2_anatest25v01_withAutostats_ condorFile.write('ulimit -s unlimited\n') condorFile.write('unset PERL5LIB\n') condorFile.write('set -e\n') - condorFile.write('cd /afs/crc.nd.edu/user/{}/{}/CMSSW_14_1_0_pre4/src\n'.format(user[0], user)) + condorFile.write('cd {}/src\n'.format(CMSSW_BASE)) condorFile.write('export SCRAM_ARCH={}\n'.format(os.environ['SCRAM_ARCH'])) condorFile.write('eval `scramv1 runtime -sh`\n') - condorFile.write('cd /afs/crc.nd.edu/user/{}/{}/CMSSW_14_1_0_pre4/src/EFTFit/Fitter/test/{}\n'.format(user[0], user, job_dir)) + condorFile.write('cd {}/src/EFTFit/Fitter/test/{}\n'.format(CMSSW_BASE, job_dir)) condorFile.write('\n') for i,np in enumerate(self.systematics): condorFile.write('if [ $1 -eq {} ]; then\n'.format(i)) @@ -1539,8 +1579,7 @@ def ImpactNuisance(self, workspace='ptz-lj0pt_fullR2_anatest25v01_withAutostats_ condorFile.write('fi\n') condorFile.close() - cmssw_base = os.getenv('CMSSW_BASE') - test_dir = os.path.join(cmssw_base, 'src', 'EFTFit', 'Fitter', 'test') + test_dir = os.path.join(CMSSW_BASE, 'src', 'EFTFit', 'Fitter', 'test') target = 'condor_%s_fit.sub' % wc with open(target, 'w') as condorFile: @@ -1573,6 +1612,9 @@ def ImpactCollect(self, workspace='ptz-lj0pt_fullR2_anatest17_noAutostats_withSy if not wcs: wcs = self.wcs user = os.getlogin() wcs_start = ','.join(wc+'=0' for wc in self.wcs) + + CMSSW_BASE = self.__override_CMSSW_BASE() + for wc in wcs: target = 'condor_%s_collect.sh' % wc condorFile = open(target,'w') @@ -1580,10 +1622,10 @@ def ImpactCollect(self, workspace='ptz-lj0pt_fullR2_anatest17_noAutostats_withSy condorFile.write('ulimit -s unlimited\n') condorFile.write('unset PERL5LIB\n') condorFile.write('set -e\n') - condorFile.write('cd /afs/crc.nd.edu/user/{}/{}/CMSSW_14_1_0_pre4/src\n'.format(user[0], user)) + condorFile.write('cd {}/src\n'.format(CMSSW_BASE)) condorFile.write('export SCRAM_ARCH={}\n'.format(os.environ['SCRAM_ARCH'])) condorFile.write('eval `scramv1 runtime -sh`\n') - condorFile.write('cd /afs/crc.nd.edu/user/{}/{}/CMSSW_14_1_0_pre4/src/EFTFit/Fitter/test/{}\n'.format(user[0], user, job_dir)) + condorFile.write('cd {}/src/EFTFit/Fitter/test/{}\n'.format(CMSSW_BASE, job_dir)) condorFile.write('\n') condorFile.write('combineTool.py -M Impacts -d %s -o impacts%s%s.json --setParameters %s -m 1 -n %s --redefineSignalPOIs %s' % (workspace, wc, version, wcs_start, wc, wc)) if unblind: print('Running over ACTUAL DATA!') @@ -1595,8 +1637,7 @@ def ImpactCollect(self, workspace='ptz-lj0pt_fullR2_anatest17_noAutostats_withSy condorFile.write('\nplotImpacts.py -i impacts%s%s.json -o impacts%s%s\n' % (wc, version, wc, version)) condorFile.close() - cmssw_base = os.getenv('CMSSW_BASE') - test_dir = os.path.join(cmssw_base, 'src', 'EFTFit', 'Fitter', 'test') + test_dir = os.path.join(CMSSW_BASE, 'src', 'EFTFit', 'Fitter', 'test') target = 'condor_%s_collect.sub' % wc with open(target, 'w') as condorFile: diff --git a/Fitter/scripts/text2workspace.sh b/Fitter/scripts/text2workspace.sh old mode 100644 new mode 100755 index a7787c8..294ae13 --- a/Fitter/scripts/text2workspace.sh +++ b/Fitter/scripts/text2workspace.sh @@ -1,33 +1,100 @@ -# This scipt makes the workspace needed for running combine fits, workspace both for interference model (IM) and Dim6top model (AAC). The default is running with IM. Uncomment the AAC command lines to run with AAC model. +#!/usr/bin/env bash -# Note: Difference in two models -# IM model - uses scalings.json file and datacards with ONLY sm templates -# AAC model - uses selectedWCs.txt file and datacards with ALL EFT templates +# This scipt makes the workspace needed for running combine fits, workspace both for interference model (IM) and Dim6top model (AAC). +# The default is running with IM, but it can be switched to AAC (see -m option in PrintUsage function). -# Make sure you have the corret input to run the each model - -# extend run time -ulimit -s unlimited - -# workspace naming +#---------------------------------------- +# Default parameters +#---------------------------------------- +DIR="." WS_NAME="workspace.root" - -# files input COM_CARD="combinedcard.txt" SCAL_DATA="scalings.json" +MODEL="IM" # IM or AAC + +# Will be set later based on the combined card path +SELECTED_WCS="" + +#---------------------------------------- +# Usage function +#---------------------------------------- +PrintUsage() { + cat < Directory containing ${WS_NAME}, ${COM_CARD} and ${SCAL_DATA} + (default: current directory) + -m Physics model: IM or AAC (default: $MODEL) + -h Show this help message and exit +EOF + exit 1 +} -# physics model - interference model -PHY_MODEL="HiggsAnalysis.CombinedLimit.InterferenceModels:interferenceModel" +#---------------------------------------- +# Parse options +#---------------------------------------- +while getopts "d:m:h" opt; do + case "$opt" in + d) DIR="$OPTARG" ;; + m) MODEL="$OPTARG" ;; + h) PrintUsage ;; + *) PrintUsage ;; + esac +done +shift $((OPTIND -1)) -# physics model option - uncomment for AAC model, need selectedWC.txt file -#PHY_MODEL="EFTFit.Fitter.AnomalousCouplingEFTNegative:analiticAnomalousCouplingEFTNegative" -#AAC_OPTION="--X-allow-no-background --for-fits --no-wrappers --X-pack-asympows --optimize-simpdf-constraints=cms --PO selectedWCs=selectedWCs.txt" +#---------------------------------------- +# Resolve paths +#---------------------------------------- -# run text2workspace using interference model -RUN_COMMAND="time text2workspace.py $COM_CARD -P $PHY_MODEL --PO scalingData=$SCAL_DATA --PO verbose -o $WS_NAME" +if [[ ! -d "$DIR" ]]; then + echo "Error: directory '$DIR' not found." >&2 + exit 1 +fi + +WS_NAME="$(realpath "$DIR/$WS_NAME")" +COM_CARD="$(realpath "$DIR/$COM_CARD")" +SCAL_DATA="$(realpath "$DIR/$SCAL_DATA")" +SELECTED_WCS="$(realpath "$DIR/selectedWCs.txt")" + +for file in "$COM_CARD" "$SCAL_DATA"; do + if [[ ! -f "$file" ]]; then + echo "Error: required file '$file' not found." >&2 + exit 1 + fi +done + +if [[ "$MODEL" == "AAC" && ! -f "$SELECTED_WCS" ]]; then + echo "Error: required file '$SELECTED_WCS' not found." >&2 + exit 1 +fi + +#---------------------------------------- +# Main script +#---------------------------------------- + +# extend stack size +ulimit -s unlimited -# run text2workspace using AAC model -#RUN_COMMAND="time text2workspace.py $COM_CARD -P $PHY_MODEL -o $WS_NAME $AAC_OPTION" +# choose physics model +if [[ "$MODEL" == "AAC" ]]; then + PHY_MODEL="EFTFit.Fitter.AnomalousCouplingEFTNegative:analiticAnomalousCouplingEFTNegative" + AAC_OPTION="--X-allow-no-background --for-fits --no-wrappers --X-pack-asympows \ +--optimize-simpdf-constraints=cms --PO selectedWCs=${SELECTED_WCS}" + RUN_COMMAND="time text2workspace.py \ + ${COM_CARD} \ + -P ${PHY_MODEL} \ + -o ${WS_NAME} \ + ${AAC_OPTION}" +else + PHY_MODEL="HiggsAnalysis.CombinedLimit.InterferenceModels:interferenceModel" + RUN_COMMAND="time text2workspace.py \ + ${COM_CARD} \ + -P ${PHY_MODEL} \ + --PO scalingData=${SCAL_DATA} \ + --PO verbose \ + -o ${WS_NAME}" +fi -printf "\nRunning the following command:\n$RUN_COMMAND\n\n" +printf "\nRunning the following command:\n%s\n\n" "$RUN_COMMAND" $RUN_COMMAND diff --git a/README.md b/README.md index ffa59be..0cf9d8f 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # EFTFit -This repository holds the custom files needed to run a EFT fit topcoffea datacards. +This repository holds the custom files needed to run an EFT fit using topcoffea datacards. ## New fancy install script To quickly install this repo, simply run:
@@ -63,11 +63,11 @@ Now we can actually run combine to perform the fits. ``` - Enter `CMSSW_14_1_0_pre4/src/EFTFit/Fitter/test` - Copy all .txt and .root files created by `python analysis/topEFT/datacard_maker.py` (in the `histos` directory of your toplevel topcoffea directory) -- Run `combineCards.py` to merge them all into one txt file. **DO NOT** merge multiple variables for the **same** channel, as this would artifically double the statistics! +- Run `combineCards.py` to merge them all into one txt file. **DO NOT** merge multiple variables for the **same** channel, as this would artificially double the statistics! - E.g. `njets` only: `combineCards.py ttx_multileptons-*{b,p,m}.txt > combinedcard.txt` - E.g. `ptbl` for all categories _but_ `3l off-shell Z` (using `HT` instead): `combineCards.py ttx_multileptons-2lss_*ptbl.txt ttx_multileptons-3l_onZ*ptbl.txt ttx_multileptons-3l_*_offZ_*ht.txt ttx_multileptons-4l_*ptbl.txt > combinedcard.txt` - - TOP-22-006 selection (old mehtod): `combineCards.py ttx_multileptons-{2,4}*lj0pt.txt ttx_multileptons-3l_{p,m}_offZ*lj0pt.txt ttx_multileptons-3l_onZ_1b_*ptz.txt ttx_multileptons-3l_onZ_2b_{4,5}j*ptz.txt ttx_multileptons-3l_onZ_2b_{2,3}j*lj0pt.txt > combinedcard.txt` - - TOP-22-006 selection (new mehtod): The latest tools should produce the correct lj0pt or ptz datacards for the corresponding categoes. Therefore, you can simply run: `combineCards.py ttx_multileptons-*.txt > combinedcard.txt` + - TOP-22-006 selection (old method): `combineCards.py ttx_multileptons-{2,4}*lj0pt.txt ttx_multileptons-3l_{p,m}_offZ*lj0pt.txt ttx_multileptons-3l_onZ_1b_*ptz.txt ttx_multileptons-3l_onZ_2b_{4,5}j*ptz.txt ttx_multileptons-3l_onZ_2b_{2,3}j*lj0pt.txt > combinedcard.txt` + - TOP-22-006 selection (new method): The latest tools should produce the correct lj0pt or ptz datacards for the corresponding categories. Therefore, you can simply run: `combineCards.py ttx_multileptons-*.txt > combinedcard.txt` - NOTE: combine uses a lot of recursive function calls to create the workspace. When running with systematics, this can cause a segmentation fault. You must run `ulimit -s unlimited` once per session to avoid this. - Run the following command to generate the workspace file with interference model by default: ``` @@ -99,12 +99,12 @@ python3 -i ../scripts/EFTPlotter.py plotter.BestScanPlot(basename_float_lst='.081721.njet.Float', basename_freeze_lst='.081821.njet.ptbl.Float', filename='_float_njet_ptbl', titles=['N_{jet} prof.', 'N_{jet}+p_{T}(b+l) prof.'], printFOM=True) ``` ## Steps for reproducing the "official" TOP-22-006 workspace: -1. Combine the cards: Inside of the EFTFit repo, copy the relevant cards (`.txt` files) and templates (`.root` files) for the categories that you want to make a worksapce for into the same directory. For the TOP-22-006 results, this should correspond to the appropriate mix-and-match combination of `ptz` and `lj0pt` that can be obtained with the `datacards_post_processing.py` script (as explained in the "To reproduce the TOP-22-006 histograms and datacards" section of the `topcoffea` readme). Then from within this directory (that contains only the relevant cards/templates _and no extraneous cards/templates_), run `combineCards.py ttx_multileptons-*.txt > combinedcard.txt` to make a combined card. +1. Combine the cards: Inside of the EFTFit repo, copy the relevant cards (`.txt` files) and templates (`.root` files) for the categories that you want to make a workspace for into the same directory. For the TOP-22-006 results, this should correspond to the appropriate mix-and-match combination of `ptz` and `lj0pt` that can be obtained with the `datacards_post_processing.py` script (as explained in the "To reproduce the TOP-22-006 histograms and datacards" section of the `topcoffea` readme). Then from within this directory (that contains only the relevant cards/templates _and no extraneous cards/templates_), run `combineCards.py ttx_multileptons-*.txt > combinedcard.txt` to make a combined card. 1. Copy your selected WC file that was made with your cards (called `selectedWCs.txt`) to somewhere that is accessible from where you will be running the `text2workspace` step. 1. Make the workspace by running the following command. Note that this command can take ~2 hours up to about ~8 hours or more (depending on where it is run). ``` - text2workspace.py combinedcard.txt -o yourworkspacename.root -P EFTFit.Fitter.AnomalousCouplingEFTNegative:analiticAnomalousCouplingEFTNegative --X-allow-no-background --for-fits --no-wrappers --X-pack-asympows --optimize-simpdf-constraints=cms --PO selectedWCs=/path/to/your/selectedWCs.txt + text2workspace.py combinedcard.txt -o yourworkspacename.root -P EFTFit.Fitter.AnomalousCouplingEFTNegative:analyticAnomalousCouplingEFTNegative --X-allow-no-background --for-fits --no-wrappers --X-pack-asympows --optimize-simpdf-constraints=cms --PO selectedWCs=/path/to/your/selectedWCs.txt ``` @@ -131,7 +131,7 @@ to collect all jobs and create the final pdf plots. A blank `wcs` will run over # Making postfit -Note: If you are making Asimov data postfit, the best way to approach this is to simply create a seperate Asimov workspace with only Asimov datacards and perform the exact same steps in this section. (For topeft analysis group, turn off `--unblind` option to obtain Asimov datacards) +Note: If you are making Asimov data postfit, the best way to approach this is to simply create a separate Asimov workspace with only Asimov datacards and perform the exact same steps in this section. (For topeft analysis group, turn off `--unblind` option to obtain Asimov datacards) 1. Copy your workspace into `/Fitter/test` directory, make sure you activate `cmsenv` ### Making the postfit root file 2. In `Fitter/test` run `MultidimFit` to make postfit for the workspace `wsp.root` with the following command: