diff --git a/.gitignore b/.gitignore index 87178727..8fa80711 100644 --- a/.gitignore +++ b/.gitignore @@ -33,7 +33,41 @@ AutoDict_* *.tmp *.pcm *.o +*.swo +*.swm +*.swn +*Test* +*TEST* # Output dirs in signal and bkg Signal/outdir_* +Signal/card* +Trees2WS/TEST* +Trees2WS/outdir* +Plots/SplusBModel* +Trees2WS/Merge* +Plots/pkl* +Trees2WS/card* +Trees2WS/scrip* + + Background/outdir_* +Background/cards* +Background/bin* +Background/lib* +Background/plot* + + +Combine/runFits* +Combine/runImpact* +Combine/t2w_jobs* +Combine/Models* +Combine/plot* + +Datacard/yields* +Datacard/cards* +Datacard/Models* +Merge + + +Combine/Checks/Bias_nominal/*/ diff --git a/Background/RunBackgroundScripts.py b/Background/RunBackgroundScripts.py index 1985d880..1a9562e8 100644 --- a/Background/RunBackgroundScripts.py +++ b/Background/RunBackgroundScripts.py @@ -44,6 +44,8 @@ def leave(): options['lumi'] = lumiMap[_cfg['year']] options['batch'] = _cfg['batch'] options['queue'] = _cfg['queue'] + options['xvar'] = _cfg['xvar'] + options['plotdir'] = _cfg['plotdir'] if 'plotdir' in _cfg else swd__ # Options from command line options['mode'] = opt.mode @@ -69,7 +71,8 @@ def leave(): # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # If cat == auto: extract list of categories from datafile if options['cats'] == 'auto': - options['cats'] = extractListOfCatsFromData(options['dataFile']) + print options['dataFile'] + options['cats'] = extractListOfCatsFromData(options['dataFile']) options['nCats'] = len(options['cats'].split(",")) # Add dummy entries for procs and signalFitWSFile (used in old plotting script) @@ -82,6 +85,7 @@ def leave(): print " --> Categories: %s"%options['cats'] print " --> Extension: %s"%options['ext'] print " --> Category offset: %g"%options['catOffset'] +print " --> Variable to fit: %s"%options['xvar'] print " --> Year: %s ::: Corresponds to intLumi = %s fb^-1"%(options['year'],options['lumi']) print "" print " --> Job information:" diff --git a/Background/config_test.py b/Background/config_test.py index c53cd194..1f9b5fa6 100644 --- a/Background/config_test.py +++ b/Background/config_test.py @@ -2,15 +2,17 @@ backgroundScriptCfg = { - # Setup - 'inputWSDir':'/vols/cms/jl2117/hgg/ws/UL/Sept20/merged_data', # location of 'allData.root' file - 'cats':'auto', # auto: automatically inferred from input ws - 'catOffset':0, # add offset to category numbers (useful for categories from different allData.root files) - 'ext':'test', # extension to add to output directory - 'year':'combined', # Use combined when merging all years in category (for plots) + # Setup + 'inputWSDir':'cards/', # location of 'allData.root' file + 'cats':'auto', # auto: automatically inferred from input ws + 'catOffset':0, # add offset to category numbers (useful for categories from different allData.root files) + 'ext':'allData', # extension to add to output directory + 'year':'combined', # Use combined when merging all years in category (for plots) + 'xvar': 'CMS_hgg_mass', # not yet used, should be passed to the C++ macros + 'plotdir': 'plots', - # Job submission options - 'batch':'IC', # [condor,SGE,IC,local] - 'queue':'hep.q' # for condor e.g. microcentury + # Job submission options + 'batch':'condor', # [condor,SGE,IC,Rome,local] + 'queue':'espresso' # for condor e.g. espresso } diff --git a/Background/runBackgroundScripts.sh b/Background/runBackgroundScripts.sh index b4f824b3..73c30a3f 100755 --- a/Background/runBackgroundScripts.sh +++ b/Background/runBackgroundScripts.sh @@ -20,6 +20,7 @@ BATCH="" QUEUE="" YEAR="2016" CATOFFSET=0 +PLOTDIR="" usage(){ echo "The script runs background scripts:" @@ -36,13 +37,14 @@ echo "--pseudoDataOnly) " echo "--pseudoDataDat)" echo "--sigFile) " echo "--bkgPlotsOnly)" -echo "--seed) for pseudodata random number gen seed (default $SEED)" +echo "--seed) for pseudodata random number gen seed (default $SEED))" echo "--intLumi) specified in fb^-{1} (default $INTLUMI)) " echo "--year) dataset year (default $YEAR)) " echo "--isData) specified in fb^-{1} (default $DATA)) " echo "--unblind) specified in fb^-{1} (default $UNBLIND)) " echo "--batch) which batch system to use (None (''),HTCONDOR,IC) (default '$BATCH')) " echo "--queue) queue to submit jobs to (specific to batch))" +echo "--pdir) directory where to put the plots)" } @@ -50,7 +52,7 @@ echo "--queue) queue to submit jobs to (specific to batch))" # options may be followed by one colon to indicate they have a required argument -if ! options=$(getopt -u -o hi:p:f: -l help,inputFile:,procs:,flashggCats:,ext:,catOffset:,fTestOnly,pseudoDataOnly,bkgPlotsOnly,pseudoDataDat:,sigFile:,seed:,intLumi:,year:,unblind,isData,batch:,queue: -- "$@") +if ! options=$(getopt -u -o hi:p:f: -l help,inputFile:,procs:,flashggCats:,ext:,catOffset:,fTestOnly,pseudoDataOnly,bkgPlotsOnly,pseudoDataDat:,sigFile:,seed:,intLumi:,year:,unblind,isData,batch:,queue:,pdir: -- "$@") then # something went wrong, getopt will put out an error message for us exit 1 @@ -78,6 +80,7 @@ case $1 in --unblind) UNBLIND=1;; --batch) BATCH=$2; shift;; --queue) QUEUE=$2; shift;; +--pdir) PLOTDIR=$2; shift;; (--) shift; break;; (-*) usage; echo "$0: error - unrecognized option $1" 1>&2; usage >> /dev/stderr; exit 1;; @@ -88,7 +91,11 @@ done OUTDIR="outdir_${EXT}" -echo "[INFO] outdir is $OUTDIR, INTLUMI $INTLUMI" +if [[ $PLOTDIR == "" ]]; then + PLOTDIR=$OUTDIR +fi + +echo "[INFO] outdir is $OUTDIR, plotdir is $PLOTDIR INTLUMI $INTLUMI" if [ $ISDATA == 1 ]; then DATAEXT="-Data" @@ -97,6 +104,7 @@ echo "INTLUMI is $INTLUMI, YEAR is $YEAR" OUTDIR="outdir_${EXT}" mkdir -p $OUTDIR +mkdir -p $PLOTDIR if [ $FTESTONLY == 0 -a $PSEUDODATAONLY == 0 -a $BKGPLOTSONLY == 0 ]; then #IF not particular script specified, run all! @@ -147,7 +155,7 @@ if [ $FTESTONLY == 1 ]; then echo "--------------------------------------" echo "Running Background F-Test" -echo "-->Greate background model" +echo "-->Create background model" echo "--------------------------------------" if [ $UNBLIND == 1 ]; then OPT=" --unblind" @@ -159,8 +167,14 @@ if [ $ISDATA == 1 ]; then OPT=" --isData 1" fi +mkdir -p "${PLOTDIR}/bkgfTest${DATAEXT}" +if test -f "/afs/cern.ch/user/g/gpetrucc/php/index.php"; then + cp "/afs/cern.ch/user/g/gpetrucc/php/index.php" "${PLOTDIR}/bkgfTest${DATAEXT}" +elif test -f "/cmshome/dimarcoe/php/index.php"; then + cp "/cmshome/dimarcoe/php/index.php" "${PLOTDIR}/bkgfTest${DATAEXT}" +fi echo " ./bin/fTest -i $FILE --saveMultiPdf $OUTDIR/CMS-HGG_multipdf_$EXT_$CATS.root -D $OUTDIR/bkgfTest$DATAEXT -f $CATS $OPT --year $YEAR --catOffset $CATOFFSET" -./bin/fTest -i $FILE --saveMultiPdf $OUTDIR/CMS-HGG_multipdf_$EXT_$CATS.root -D $OUTDIR/bkgfTest$DATAEXT -f $CATS $OPT --year $YEAR --catOffset $CATOFFSET +./bin/fTest -i $FILE --saveMultiPdf $OUTDIR/CMS-HGG_multipdf_$EXT_$CATS.root -D $OUTDIR/bkgfTest$DATAEXT -P $PLOTDIR/bkgfTest$DATAEXT -f $CATS $OPT --year $YEAR --catOffset $CATOFFSET OPT="" fi diff --git a/Background/run_sequence.sh b/Background/run_sequence.sh new file mode 100644 index 00000000..2678245b --- /dev/null +++ b/Background/run_sequence.sh @@ -0,0 +1,2 @@ +#python scripts/mergeMultiYearsData.py -i cards/cards_current -o cards/cards_current/data_Run2/allData.root +python RunBackgroundScripts.py --inputConfig config_test.py --mode fTestParallel diff --git a/Background/scripts/mergeMultiYearsData.py b/Background/scripts/mergeMultiYearsData.py new file mode 100644 index 00000000..a7978155 --- /dev/null +++ b/Background/scripts/mergeMultiYearsData.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python + +import sys +import glob +import ROOT +from optparse import OptionParser +from commonTools import * + +def get_options(): + parser = OptionParser() + parser.add_option("-i","--inputdir", dest="idir", default="cards/cards_current", help="Input directory") + parser.add_option("-o","--outfile", dest="outfile", default="allData.root", help="Outputfile") + parser.add_option("--years", dest="years", default="2016,2017,2018", help="Merge the datasets of the following years") + return parser.parse_args() +(opt,args) = get_options() + +files = args[:] + + +#Extract all files to be merged +fNames = {} +for year in opt.years.split(","): fNames[year] = glob.glob("%s/data_%s/output_Data_13TeV.root"%(opt.idir,year)) + +cats = extractListOfCats(fNames[opt.years.split(",")[0]]).split(',') + +# Define ouput merged workspace +print " --> Merging output workspaces" +mergedWS = ROOT.RooWorkspace("cms_hgg_13TeV","cms_hgg_13TeV") +mergedWS.imp = getattr(mergedWS,"import") + +# Extract merged datasets +data_merged = {} +data_merged_names = [] +for cat in cats: + data_merged["Data_13TeV_%s" % cat] = ROOT.TFile(fNames[opt.years.split(",")[0]][0]).Get("tagsDumper/cms_hgg_13TeV").data("Data_13TeV_%s" % cat).emptyClone("Data_13TeV_%s" % cat) + data_merged_names.append( data_merged["Data_13TeV_%s" % cat].GetName() ) + +for year, fNames in fNames.iteritems(): + for fName in fNames: + for cat in cats: + d = ROOT.TFile(fName).Get("tagsDumper/cms_hgg_13TeV").data("Data_13TeV_%s" % cat) + print "YEAR = %-6s, CAT = %-30s, n = %d" % (year,cat,d.numEntries()) + for i in range(d.numEntries()): + p = d.get(i) + data_merged["Data_13TeV_%s" % cat].add(p) + +print " --> Writing to: %s"%(opt.outfile) +f = ROOT.TFile(opt.outfile,"RECREATE") +f.mkdir("tagsDumper") +f.cd("tagsDumper") +for _data in data_merged.itervalues(): mergedWS.imp(_data) +mergedWS.Write() +f.Close() diff --git a/Background/src/PdfModelBuilder.cc b/Background/src/PdfModelBuilder.cc index 309cba06..dcaa476b 100644 --- a/Background/src/PdfModelBuilder.cc +++ b/Background/src/PdfModelBuilder.cc @@ -17,6 +17,7 @@ #include "RooConstVar.h" #include "RooFitResult.h" #include "RooRandom.h" +#include #include "boost/algorithm/string/split.hpp" #include "boost/algorithm/string/classification.hpp" @@ -92,6 +93,10 @@ RooAbsPdf* PdfModelBuilder::getChebychev(string prefix, int order){ } RooAbsPdf* PdfModelBuilder::getBernstein(string prefix, int order){ + + //if (order >3) {return NULL;} + + RooArgList *coeffList = new RooArgList(); //coeffList->add(RooConst(1.0)); // no need for cnstant in this interface @@ -123,9 +128,9 @@ RooAbsPdf* PdfModelBuilder::getBernstein(string prefix, int order){ } else if (order==6) { RooBernsteinFast<6> *bern = new RooBernsteinFast<6>(prefix.c_str(),prefix.c_str(),*obs_var,*coeffList); return bern; -// } else if (order==7) { -// RooBernsteinFast<7> *bern = new RooBernsteinFast<7>(prefix.c_str(),prefix.c_str(),*obs_var,*coeffList); - // return bern; + } else if (order==7) { + RooBernsteinFast<7> *bern = new RooBernsteinFast<7>(prefix.c_str(),prefix.c_str(),*obs_var,*coeffList); + return bern; } else { return NULL; } diff --git a/Background/test/fTest.cpp b/Background/test/fTest.cpp index acd8a5fa..cb734988 100644 --- a/Background/test/fTest.cpp +++ b/Background/test/fTest.cpp @@ -64,6 +64,7 @@ RooRealVar *intLumi_ = new RooRealVar("IntLumi","hacked int lumi", 1000.); TRandom3 *RandomGen = new TRandom3(); RooAbsPdf* getPdf(PdfModelBuilder &pdfsModel, string type, int order, const char* ext=""){ + if (type=="Bernstein") return pdfsModel.getBernstein(Form("%s_bern%d",ext,order),order); else if (type=="Chebychev") return pdfsModel.getChebychev(Form("%s_cheb%d",ext,order),order); @@ -608,6 +609,7 @@ int main(int argc, char* argv[]){ int catOffset; string datfile; string outDir; + string plotDir; string outfilename; bool is2011=false; bool verbose=false; @@ -624,7 +626,8 @@ int main(int argc, char* argv[]){ ("ncats,c", po::value(&ncats)->default_value(5), "Number of categories") ("singleCat", po::value(&singleCategory)->default_value(-1), "Run A single Category") ("datfile,d", po::value(&datfile)->default_value("dat/fTest.dat"), "Right results to datfile for BiasStudy") - ("outDir,D", po::value(&outDir)->default_value("plots/fTest"), "Out directory for plots") + ("outDir,D", po::value(&outDir)->default_value("plots/fTest"), "Out directory for results") + ("plotDir,P", po::value(&plotDir)->default_value("plots/fTest"), "Out directory for plots") ("saveMultiPdf", po::value(&outfilename), "Save a MultiPdf model with the appropriate pdfs") ("runFtestCheckWithToys", "When running the F-test, use toys to calculate pvals (and make plots) ") ("is2011", "Run 2011 config") @@ -676,6 +679,7 @@ int main(int argc, char* argv[]){ } system(Form("mkdir -p %s",outDir.c_str())); + system(Form("mkdir -p %s",plotDir.c_str())); TFile *inFile = TFile::Open(fileName.c_str()); RooWorkspace *inWS; if(isFlashgg_){ @@ -845,14 +849,14 @@ int main(int argc, char* argv[]){ if (chi2<0. && order>1) chi2=0.; if (prev_pdf!=NULL){ prob = getProbabilityFtest(chi2,order-prev_order,prev_pdf,bkgPdf,mass,data - ,Form("%s/Ftest_from_%s%d_cat%d.pdf",outDir.c_str(),funcType->c_str(),order,(cat+catOffset))); + ,Form("%s/Ftest_from_%s%d_cat%d.pdf",plotDir.c_str(),funcType->c_str(),order,(cat+catOffset))); std::cout << "[INFO] F-test Prob(chi2>chi2(data)) == " << prob << std::endl; } else { prob = 0; } double gofProb=0; // otherwise we get it later ... - if (!saveMultiPdf) plot(mass,bkgPdf,data,Form("%s/%s%d_cat%d.pdf",outDir.c_str(),funcType->c_str(),order,(cat+catOffset)),flashggCats_,fitStatus,&gofProb); + if (!saveMultiPdf) plot(mass,bkgPdf,data,Form("%s/%s%d_cat%d.pdf",plotDir.c_str(),funcType->c_str(),order,(cat+catOffset)),flashggCats_,fitStatus,&gofProb); cout << "[INFO]\t " << *funcType << " " << order << " " << prevNll << " " << thisNll << " " << chi2 << " " << prob << endl; //fprintf(resFile,"%15s && %d && %10.2f && %10.2f && %10.2f \\\\\n",funcType->c_str(),order,thisNll,chi2,prob); prevNll=thisNll; @@ -908,7 +912,7 @@ int main(int argc, char* argv[]){ // Calculate goodness of fit for the thing to be included (will use toys for lowstats)! double gofProb =0; - plot(mass,bkgPdf,data,Form("%s/%s%d_cat%d.pdf",outDir.c_str(),funcType->c_str(),order,(cat+catOffset)),flashggCats_,fitStatus,&gofProb); + plot(mass,bkgPdf,data,Form("%s/%s%d_cat%d.pdf",plotDir.c_str(),funcType->c_str(),order,(cat+catOffset)),flashggCats_,fitStatus,&gofProb); if ((prob < upperEnvThreshold) ) { // Looser requirements for the envelope @@ -944,7 +948,7 @@ int main(int argc, char* argv[]){ choices_envelope_vec.push_back(choices_envelope); pdfs_vec.push_back(pdfs); - plot(mass,pdfs,data,Form("%s/truths_cat%d",outDir.c_str(),(cat+catOffset)),flashggCats_,cat); + plot(mass,pdfs,data,Form("%s/truths_cat%d",plotDir.c_str(),(cat+catOffset)),flashggCats_,cat); if (saveMultiPdf){ @@ -967,6 +971,7 @@ int main(int argc, char* argv[]){ //double check the best pdf! int bestFitPdfIndex = getBestFitFunction(pdf,data,&catIndex,!verbose); catIndex.setIndex(bestFitPdfIndex); + std::cout << "// ------------------------------------------------------------------------- //" <GetName() << ", in Category " << cat << " with a total of " << catIndex.numTypes() << " pdfs"<< std::endl; storedPdfs.Print(); @@ -983,7 +988,7 @@ int main(int argc, char* argv[]){ outputws->import(catIndex); outputws->import(dataBinned); outputws->import(*data); - plot(mass,pdf,&catIndex,data,Form("%s/multipdf_%s",outDir.c_str(),catname.c_str()),flashggCats_,cat,bestFitPdfIndex); + plot(mass,pdf,&catIndex,data,Form("%s/multipdf_%s",plotDir.c_str(),catname.c_str()),flashggCats_,cat,bestFitPdfIndex); } diff --git a/Background/tools/submissionTools.py b/Background/tools/submissionTools.py index 655ed79d..95c641bc 100644 --- a/Background/tools/submissionTools.py +++ b/Background/tools/submissionTools.py @@ -59,7 +59,7 @@ def writeSubFiles(_opts): c = _opts['cats'].split(",")[cidx] co = _opts['catOffset']+cidx _f.write("if [ $1 -eq %g ]; then\n"%cidx) - _cmd = "%s/runBackgroundScripts.sh -i %s -p %s -f %s --ext %s --catOffset %g --intLumi %s --year %s --batch %s --queue %s --sigFile %s --isData --fTest"%(bwd__,_opts['dataFile'],_opts['procs'],c,_opts['ext'],co,_opts['lumi'],_opts['year'],_opts['batch'],_opts['queue'],_opts['signalFitWSFile']) + _cmd = "%s/runBackgroundScripts.sh -i %s -p %s -f %s --ext %s --catOffset %g --intLumi %s --year %s --batch %s --queue %s --sigFile %s --isData --fTest --pdir %s"%(bwd__,_opts['dataFile'],_opts['procs'],c,_opts['ext'],co,_opts['lumi'],_opts['year'],_opts['batch'],_opts['queue'],_opts['signalFitWSFile'],_opts['plotdir']) _f.write(" %s\n"%_cmd) _f.write("fi\n") @@ -73,7 +73,7 @@ def writeSubFiles(_opts): _fsub.close() # SGE... - if (_opts['batch'] == "IC")|(_opts['batch'] == "SGE")|(_opts['batch'] == "local" ): + if (_opts['batch'] == "IC")|(_opts['batch'] == "SGE")|(_opts['batch'] == "Rome")|(_opts['batch'] == "local" ): _executable = "sub_%s_%s"%(_opts['mode'],_opts['ext']) # Write details depending on mode @@ -102,8 +102,9 @@ def submitFiles(_opts): print " --> Finished submitting files" # SGE - elif _opts['batch'] in ['IC','SGE']: + elif _opts['batch'] in ['IC','SGE','Rome']: _executable = "sub_%s_%s"%(_opts['mode'],_opts['ext']) + _subcmd = 'bsub' if _opts['batch']=='Rome' else 'qsub' # Extract job opts jobOptsStr = _opts['jobOpts'] @@ -113,7 +114,7 @@ def submitFiles(_opts): for cidx in range(_opts['nCats']): c = _opts['cats'].split(",")[cidx] _subfile = "%s/%s_%s"%(_jobdir,_executable,c) - cmdLine = "qsub -q hep.q %s -o %s.log -e %s.err %s.sh"%(jobOptsStr,_subfile,_subfile,_subfile) + cmdLine = "%s -q %s %s -o %s.log -e %s.err %s.sh"%(_subcmd,_opts['queue'],jobOptsStr,_subfile,_subfile,_subfile) run(cmdLine) print " --> Finished submitting files" diff --git a/Combine/Checks/Bias_nominal/BiasFits b/Combine/Checks/Bias_nominal/BiasFits new file mode 120000 index 00000000..0badf49d --- /dev/null +++ b/Combine/Checks/Bias_nominal/BiasFits @@ -0,0 +1 @@ +/eos/home-f/fderiggi/AC/Bias_study/fit \ No newline at end of file diff --git a/Combine/Checks/Bias_nominal/BiasNominalToys b/Combine/Checks/Bias_nominal/BiasNominalToys new file mode 120000 index 00000000..513169fe --- /dev/null +++ b/Combine/Checks/Bias_nominal/BiasNominalToys @@ -0,0 +1 @@ +/eos/home-f/fderiggi/AC/Bias_study/BiaNominalToys \ No newline at end of file diff --git a/Combine/Checks/Bias_nominal/BiasToys b/Combine/Checks/Bias_nominal/BiasToys new file mode 120000 index 00000000..c6efd3f5 --- /dev/null +++ b/Combine/Checks/Bias_nominal/BiasToys @@ -0,0 +1 @@ +/eos/home-f/fderiggi/AC/Bias_study/toy \ No newline at end of file diff --git a/Combine/Checks/Bias_nominal/Bias_study.py b/Combine/Checks/Bias_nominal/Bias_study.py new file mode 100644 index 00000000..00ad6b1a --- /dev/null +++ b/Combine/Checks/Bias_nominal/Bias_study.py @@ -0,0 +1,179 @@ +# Script for running background fitting jobs for flashggFinalFit + +from optparse import OptionParser +from collections import OrderedDict as od +from tools import * +import numpy as np +import os +import concurrent.futures +from biasUtils import * +import os +from optparse import OptionParser +from submissionTools import writeCondorSub + + +# Import tools + +from commonTools import * +from commonObjects import * + +import ROOT as r +r.gROOT.SetBatch(True) +r.gStyle.SetOptStat(2211) + +def get_options(): + parser = OptionParser() + parser.add_option('--step', dest='step', default='Fit', help="Toy or Fit") + parser.add_option('--nToys', dest='nToys', default='100', help="Create a number of Toys") + parser.add_option('--ext', dest='ext', default='24_06_04', help="Estensione che vuoi per i jobs") + parser.add_option('--poi', dest='poi', default='muf', help="parameter of interest") + parser.add_option('--expectSignal', dest='expectSignal', default=0, help="expectSignal") + parser.add_option('--printOnly', dest='printOnly', default=False, action="store_true", help="Dry run: print submission files only") + return parser.parse_args() +(opt,args) = get_options() + +print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ RUNNING TOYS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" +def leave(): + print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ RUNNING TOYS (END) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + sys.exit(1) + + + +toy_jobDir = "NominalToys_%s"%(opt.ext) +toy_outputDir = "/eos/home-f/fderiggi/AC/BiasStudy/Output"+toy_jobDir + +fit_jobDir = "NominalFits_%s_%s"%(opt.ext, opt.poi) +fit_outputDir = "/eos/home-f/fderiggi/AC/BiasStudy/Output_NominalFits_%s_%s"%(opt.ext, opt.poi ) + + + + +if opt.step == "Toy": + + cmdLine = "mkdir -p %s"%(toy_jobDir) + run(cmdLine) + + cmdLine = "mkdir -p %s"%(toy_outputDir ) + run(cmdLine) + _executable = "sub_%s"%(opt.ext) + + + + _subcmd = 'bsub' + + + _f = open("%s/Toys.txt"%(toy_jobDir),"w") + for n in range(eval(opt.nToys)): + _cmd = "combine -m 125.3800 -d ../../Datacard_ALT_0M.root -M GenerateOnly -s -1 --saveToys -t 1 -n split%s --setParameters muV=1.,CMS_zz4l_fai1=0.,muf=1.,fa3_ggH=0. ;mv higgsCombine*split%s* %s_%s/biasStudy_split%s_toys.root"%(n,n,_outputDir,opt.ext,n) + _f.write("%s\n"%_cmd) + _f.close() + + writeSubFiles('OutputBias'+opt.step+'_Jobs',"%s/Toys.txt"%(toy_jobDir), batch = 'condor') + + print " --> Finished submitting files" + +elif opt.step == "Fit": + + cmdLine = "mkdir -p %s"%(fit_jobDir) + run(cmdLine) + _executable = "sub_%s"%(opt.ext) + cmdLine = "mkdir -p %s"%(fit_outputDir) + run(cmdLine) + + FToy = os.listdir(toy_outputDir) + + _f = open("%s/Fit_%s.txt"%(fit_jobDir,opt.poi),"w") + + + + for i,f in enumerate(FToy): + # if not (i > 100 and i < 300) : continue + _cmd = "combine -m 125.3800 -d ../../Datacard_ALT_0M.root -M MultiDimFit -P %s --algo singles --floatOtherPOIs 1 --redefineSignalPOIs muV,muf,fa3_ggH,fa3_ggH,CMS_zz4l_fai1 --setParameters muV=1.,CMS_zz4l_fai1=0.,muf=1.,fa3_ggH=0. --robustFit=1 --setRobustFitAlgo=Minuit2,Migrad --X-rtd FITTER_NEW_CROSSING_ALGO --setRobustFitTolerance=0.5 --X-rtd FITTER_NEVER_GIVE_UP --X-rtd FITTER_BOUND --cminFallbackAlgo Minuit2,0:1. --saveInactivePOI 1 --saveWorkspace --cminDefaultMinimizerStrategy 0 --X-rtd MINIMIZER_freezeDisassociatedParams --X-rtd MINIMIZER_multiMin_hideConstants --X-rtd MINIMIZER_multiMin_maskConstraints --X-rtd MINIMIZER_multiMin_maskChannels=2 --setParameterRanges muV=0.0,4.0:muf=0.0,10.0:fa3_ggH=-0.5.,0.5.:CMS_zz4l_fai1=-0.001.,0.001 -t 1 -n _%ssplit%s_ --toysFile=%s/%s; mv higgsCombine*_%ssplit%s_* %s/biasStudy_split%s_fits.root"%(opt.poi,opt.poi,i,toy_outputDir,f,opt.poi,i,fit_outputDir,i) + _f.write("%s\n"%_cmd) + + + + _f.close() + writeSubFiles('OutputBias'+opt.step+'_Jobs_%s'%(opt.poi),"%s/Fit_%s.txt"%(fit_jobDir,opt.poi), batch = 'condor') + + +elif opt.step == "Plot": + + #_outputDir_Fit =['Output_NominalFits_24_06_04/biasStudy_split455_fits.root'] + + + + label_dict = {"muf":r'\frac{2(\mu_{f}-1)}{\sigma^{+} + \sigma^{-}} ' , + "muV":r'\frac{2(\mu_{V}-1)}{\sigma^{+} + \sigma^{-}} ' , + "CMS_zz4l_fai1":r'\frac{2f_{a3}}{\sigma^{+} + \sigma^{-}} '} + + label_color = {"CMS_zz4l_fai1":ROOT.kMagenta-9, + "muf":ROOT.kAzure+8, + "muV":ROOT.kOrange+6} + + pullHist = r.TH1F(' ', ' ', 20, -5., 5.) + pullHist.GetXaxis().SetTitle(label_dict[opt.poi]) + pullHist.GetYaxis().SetTitle('Entries') + pullHist.GetXaxis().SetTitleOffset(1.) + + + + print(fit_outputDir) + + + for i,FitFile in enumerate(os.listdir(fit_outputDir)): + + + tfile = r.TFile(fit_outputDir +'/'+FitFile) + #tfile = r.TFile(FitFile ) + + tree = tfile.Get('limit') + + tree.GetEntry(0) + if not getattr(tree,'quantileExpected')==-1: + #raiseFailError(itoy,True) + continue + bf = getattr(tree, opt.poi) + + tree.GetEntry(1) + if not abs(getattr(tree,'quantileExpected')--0.32)<0.001: + raiseFailError(itoy,True) + continue + lo = getattr(tree, opt.poi) + + tree.GetEntry(2) + if not abs(getattr(tree,'quantileExpected')-0.32)<0.001: + raiseFailError(itoy,True) + continue + + hi = getattr(tree, opt.poi) + diff = bf - eval(opt.expectSignal) + unc = 0.5 * (hi-lo) + mean = [] + if unc > 0.: + if abs(diff/unc)< 0.05 : print(diff/unc, fit_outputDir +'/'+FitFile) + pullHist.Fill(diff/unc) + + + mean.append(diff/unc) + if unc == 0.: print("") + #print(unc, _outputDir_Fit +'/'+FitFile) + + + pullHist.SetFillColor(label_color[opt.poi]) + pullHist.SetLineColor(label_color[opt.poi]) + pullHist.SetLineWidth(2) + print("mean calcolata senza gaussiana = %s"%(sum(mean)/len(mean))) + canv = r.TCanvas() + canv.SetBottomMargin(0.15); + pullHist.Draw() + + r.gStyle.SetOptFit(11111) + #pullHist.Fit('gaus') + canv.SaveAs('BiasStudy_%s.pdf'%(opt.poi)) + canv.SaveAs('BiasStudy_%s.png'%(opt.poi)) + + + + + \ No newline at end of file diff --git a/Combine/Checks/Bias_nominal/NominalFits_24_06_04_muV b/Combine/Checks/Bias_nominal/NominalFits_24_06_04_muV new file mode 120000 index 00000000..ef56c1ac --- /dev/null +++ b/Combine/Checks/Bias_nominal/NominalFits_24_06_04_muV @@ -0,0 +1 @@ +/eos/home-f/fderiggi/AC/BiasStudy/NominalFits_24_06_04_muV \ No newline at end of file diff --git a/Combine/Checks/Bias_nominal/NominalFits_24_06_04_muf b/Combine/Checks/Bias_nominal/NominalFits_24_06_04_muf new file mode 120000 index 00000000..5afb7663 --- /dev/null +++ b/Combine/Checks/Bias_nominal/NominalFits_24_06_04_muf @@ -0,0 +1 @@ +/eos/home-f/fderiggi/AC/BiasStudy/NominalFits_24_06_04_muf \ No newline at end of file diff --git a/Combine/Checks/Bias_nominal/OutputNominalToys_24_06_04 b/Combine/Checks/Bias_nominal/OutputNominalToys_24_06_04 new file mode 120000 index 00000000..9e67b481 --- /dev/null +++ b/Combine/Checks/Bias_nominal/OutputNominalToys_24_06_04 @@ -0,0 +1 @@ +/eos/home-f/fderiggi/AC/BiasStudy/OutputNominalToys_24_06_04 \ No newline at end of file diff --git a/Combine/Checks/Bias_nominal/Output_NominalFits_24_06_04_CMS_zz4l_fai1 b/Combine/Checks/Bias_nominal/Output_NominalFits_24_06_04_CMS_zz4l_fai1 new file mode 120000 index 00000000..a1aff97b --- /dev/null +++ b/Combine/Checks/Bias_nominal/Output_NominalFits_24_06_04_CMS_zz4l_fai1 @@ -0,0 +1 @@ +/eos/home-f/fderiggi/AC/BiasStudy/Output_NominalFits_24_06_04_CMS_zz4l_fai1 \ No newline at end of file diff --git a/Combine/Checks/Bias_nominal/Output_NominalFits_24_06_04_muV b/Combine/Checks/Bias_nominal/Output_NominalFits_24_06_04_muV new file mode 120000 index 00000000..e9391bfa --- /dev/null +++ b/Combine/Checks/Bias_nominal/Output_NominalFits_24_06_04_muV @@ -0,0 +1 @@ +/eos/home-f/fderiggi/AC/BiasStudy/Output_NominalFits_24_06_04_muV \ No newline at end of file diff --git a/Combine/Checks/Bias_nominal/Output_NominalFits_24_06_04_muf b/Combine/Checks/Bias_nominal/Output_NominalFits_24_06_04_muf new file mode 120000 index 00000000..e6d9ee88 --- /dev/null +++ b/Combine/Checks/Bias_nominal/Output_NominalFits_24_06_04_muf @@ -0,0 +1 @@ +/eos/home-f/fderiggi/AC/BiasStudy/Output_NominalFits_24_06_04_muf \ No newline at end of file diff --git a/Combine/Checks/Bias_nominal/README.md b/Combine/Checks/Bias_nominal/README.md deleted file mode 100644 index 37dd7c70..00000000 --- a/Combine/Checks/Bias_nominal/README.md +++ /dev/null @@ -1,57 +0,0 @@ -# Impacts - -Impacts documentation to come here. - -# Bias studies - -Here we provide a script to perform a simple bias study. - -## Inputs - -The only pre-requisite is a workspace with a single category, -and therefore a single multipdf (the object that controls the envelope method) -and a single pdf index corresponding to the choice of functional form. - -To create this, it's simple to use the existing combineCards functionality, for example: -``` -combineCards.py Datacard.txt --ic cat_name > Datacard_cat_name.txt -``` - -This creates a .txt datacard with only categories matching the reg exp `cat_name` included. -Be careful: you will probably have to manually delete some pdfindex lines at the bottom; -the script does not know that these correspond to the analysis categories, -and therefore will leave them all in (you only want the one corresponding to the category you are studying). - -Once that is done, you can run your usual `text2workspace` command to generate the `-d, --datacard` input for this script. - -## Usage - -The script is split into three different stages: - * `-t, --toys`: throw and save a total of `-n,--nToys` toys for each of the candidate functions included in the envelope - * `-f, --fits`: fit each of those toys and extract the uncertainty - * `-p, --plots`: plot the pull distribution of the resulting fits - -You can then inspect the output plots and hope to see an approximately gaussian shape with zero mean and unit width. -Normally, provided that the absolute value of the mean is less than 0.14, this is considered satisfactory. - -The three steps can be run in one go, but it's probably safer to run them one-by-one. -Here is an example: - -``` -./RunBiasStudy.py -d Datacard_mu_ggH_cat0.root -t -./RunBiasStudy.py -d Datacard_mu_ggH_cat0.root -f -c "--cminDefaultMinimizerStrategy 0 --X-rtd MINIMIZER_freezeDisassociatedParams --X-rtd MINIMIZER_multiMin_hideConstants --X-rtd MINIMIZER_multiMin_maskConstraints --X-rtd MINIMIZER_multiMin_maskChannels=2 --freezeParameters MH" -./RunBiasStudy.py -d Datacard_mu_ggH_cat0.root -p --gaussianFit -``` -The options for the second step are passed to combine; these are recommended to get the fit to converge. -The additional option on the plotting is fairly self-explanatory; it adds a gaussian fit to the output plot. - -## More options - -There are various things one can tweak for these studies. -Here is a list of the common options: - * `-n,--nToys`: the default number of toys is 1000 per function, but can be lowered or raised. - * `-e,--expectSignal`: the injected signal strength is 1 by default, but zero can also be checked, or higher values for searches. - * `-s,--seed`: the default value of -1 finds a random seed; you can fix this for reproducility if you prefer. - * `--poi`: if your parameter of interest is called something other than `r`, say so here. - * `--split`: default number of toys to be thrown or fits to be performed in one go. Set to 500 but may need to be lowered for memory reasons if you have a more complicated fit. - * `--selectFunction`: you can specify a string here to only select certain functions for these studies (e.g. `bern` to match all Bernstein polynomials, `exp1` to match just the first-order exponential). diff --git a/Combine/Checks/Bias_nominal/RunBiasStudy.py b/Combine/Checks/Bias_nominal/RunBiasStudy.py deleted file mode 100755 index 86ffbcd2..00000000 --- a/Combine/Checks/Bias_nominal/RunBiasStudy.py +++ /dev/null @@ -1,125 +0,0 @@ -#!/usr/bin/env python - -from biasUtils import * - -from optparse import OptionParser -parser = OptionParser() -parser.add_option("-d","--datacard",default="Datacard.root") -parser.add_option("-w","--workspace",default="w") -parser.add_option("-t","--toys",action="store_true", default=False) -parser.add_option("-n","--nToys",default=1000,type="int") -parser.add_option("-f","--fits",action="store_true", default=False) -parser.add_option("-p","--plots",action="store_true", default=False) -parser.add_option("-e","--expectSignal",default=1.,type="float") -parser.add_option("-m","--mH",default=125.,type="float") -parser.add_option("-c","--combineOptions",default="") -parser.add_option("-s","--seed",default=-1,type="int") -parser.add_option("--dryRun",action="store_true", default=False) -parser.add_option("--poi",default="r") -parser.add_option("--split",default=500,type="int") -parser.add_option("--selectFunction",default=None) -parser.add_option("--gaussianFit",action="store_true", default=False) -(opts,args) = parser.parse_args() -print -if opts.nToys>opts.split and not opts.nToys%opts.split==0: raise RuntimeError('The number of toys %g needs to be smaller than or divisible by the split number %g'%(opts.nToys, opts.split)) - -import ROOT as r -r.gROOT.SetBatch(True) -r.gStyle.SetOptStat(2211) - -ws = r.TFile(opts.datacard).Get(opts.workspace) - -pdfs = rooArgSetToList(ws.allPdfs()) -multipdfName = None -for pdf in pdfs: - if pdf.InheritsFrom("RooMultiPdf"): - if multipdfName is not None: raiseMultiError() - multipdfName = pdf.GetName() - print 'Conduct bias study for multipdf called %s'%multipdfName -multipdf = ws.pdf(multipdfName) -print - -varlist = rooArgSetToList(ws.allCats()) -indexName = None -for var in varlist: - if var.GetName().startswith('pdfindex'): - if indexName is not None: raiseMultiError() - indexName = var.GetName() - print 'Found index called %s'%indexName -print - -from collections import OrderedDict as od -indexNameMap = od() -for ipdf in range(multipdf.getNumPdfs()): - if opts.selectFunction is not None: - if not multipdf.getPdf(ipdf).GetName().count(opts.selectFunction): continue - indexNameMap[ipdf] = multipdf.getPdf(ipdf).GetName() - -if opts.toys: - if not path.isdir('BiasToysn'): system('mkdir -p BiasToys') - toyCmdBase = 'combine -m %.4f -d %s -M GenerateOnly --expectSignal %.4f -s %g --saveToys %s '%(opts.mH, opts.datacard, opts.expectSignal, opts.seed, opts.combineOptions) - for ipdf,pdfName in indexNameMap.iteritems(): - name = shortName(pdfName) - if opts.nToys > opts.split: - for isplit in range(opts.nToys//opts.split): - toyCmd = toyCmdBase + ' -t %g -n _%s_split%g --setParameters %s=%g --freezeParameters %s'%(opts.split, name, isplit, indexName, ipdf, indexName) - run(toyCmd, dry=opts.dryRun) - system('mv higgsCombine_%s* %s'%(name, toyName(name,split=isplit))) - else: - toyCmd = toyCmdBase + ' -t %g -n _%s --setParameters %s=%g --freezeParameters %s'%(opts.nToys, name, indexName, ipdf, indexName) - run(toyCmd, dry=opts.dryRun) - system('mv higgsCombine_%s* %s'%(name, toyName(name))) -print - -if opts.fits: - if not path.isdir('BiasFits'): system('mkdir -p BiasFits') - fitCmdBase = 'combine -m %.4f -d %s -M MultiDimFit -P %s --algo singles %s '%(opts.mH, opts.datacard, opts.poi, opts.combineOptions) - for ipdf,pdfName in indexNameMap.iteritems(): - name = shortName(pdfName) - if opts.nToys > opts.split: - for isplit in range(opts.nToys//opts.split): - fitCmd = fitCmdBase + ' -t %g -n _%s_split%g --toysFile=%s'%(opts.split, name, isplit, toyName(name,split=isplit)) - run(fitCmd, dry=opts.dryRun) - system('mv higgsCombine_%s* %s'%(name, fitName(name,split=isplit))) - run('hadd %s BiasFits/*%s*split*.root'%(fitName(name),name), dry=opts.dryRun) - else: - fitCmd = fitCmdBase + ' -t %g -n _%s --toysFile=%s'%(opts.nToys, name, toyName(name)) - run(fitCmd, dry=opts.dryRun) - system('mv higgsCombine_%s* %s'%(name, fitName(name))) - -if opts.plots: - if not path.isdir('BiasPlots'): system('mkdir -p BiasPlots') - for ipdf,pdfName in indexNameMap.iteritems(): - name = shortName(pdfName) - tfile = r.TFile(fitName(name)) - tree = tfile.Get('limit') - pullHist = r.TH1F('pullsForTruth_%s'%name, 'Pull distribution using the envelope to fit %s'%name, 80, -4., 4.) - pullHist.GetXaxis().SetTitle('Pull') - pullHist.GetYaxis().SetTitle('Entries') - for itoy in range(opts.nToys): - tree.GetEntry(3*itoy) - if not getattr(tree,'quantileExpected')==-1: - raiseFailError(itoy,True) - continue - bf = getattr(tree, 'r') - tree.GetEntry(3*itoy+1) - if not abs(getattr(tree,'quantileExpected')--0.32)<0.001: - raiseFailError(itoy,True) - continue - lo = getattr(tree, 'r') - tree.GetEntry(3*itoy+2) - if not abs(getattr(tree,'quantileExpected')-0.32)<0.001: - raiseFailError(itoy,True) - continue - hi = getattr(tree, 'r') - diff = bf - opts.expectSignal - unc = 0.5 * (hi-lo) - if unc > 0.: - pullHist.Fill(diff/unc) - canv = r.TCanvas() - pullHist.Draw() - if opts.gaussianFit: - r.gStyle.SetOptFit(111) - pullHist.Fit('gaus') - canv.SaveAs('%s.pdf'%plotName(name)) - canv.SaveAs('%s.png'%plotName(name)) diff --git a/Combine/Checks/Bias_nominal/biasUtils.py b/Combine/Checks/Bias_nominal/biasUtils.py index 6cf638c2..9ce3821f 100644 --- a/Combine/Checks/Bias_nominal/biasUtils.py +++ b/Combine/Checks/Bias_nominal/biasUtils.py @@ -1,4 +1,7 @@ #!/usr/bin/env python +import os +from submissionTools import writeCondorSub + def rooArgSetToList(argset): ## taken from Andrea Marini's great repo here: https://github.com/amarini/rfwsutils/blob/master/wsutils.py#L300-L313 """creates a python list with the contents of argset (which should be a RooArgSet)""" @@ -33,16 +36,84 @@ def toyName(name, split=None): retval = retval.replace(name,'%s_split%g'%(name,split)) return retval +def toyName2(name, split=None): + retval = 'biasStudy_%s_toys.root'%name + if split is not None: + split = int(split) + retval = retval.replace(name,'%s_split%g'%(name,split)) + return retval + def fitName(name, split=None): retval = 'BiasFits/biasStudy_%s_fits.root'%name if split is not None: split = int(split) retval = retval.replace(name,'%s_split%g'%(name,split)) return retval +def fitName2(name, split=None): + retval = 'biasStudy_%s_fits.root'%name + if split is not None: + split = int(split) + retval = retval.replace(name,'%s_split%g'%(name,split)) + return retval def plotName(name): return 'BiasPlots/biasStudy_%s_pulls'%name def run(cmd, dry=False): print cmd - if not dry: system(cmd) + if not dry: os.system(cmd) + + +def writePreamble(_file,_otherBase=None): + twd__ = os.getcwd() + _file.write("#!/bin/bash\n") + _file.write("ulimit -s unlimited\n") + #_file.write("set -e\n") + if _otherBase is not None: _file.write("cd %s\n"%_otherBase) + else: _file.write("cd %s/src\n"%os.environ['CMSSW_BASE']) + _file.write("export SCRAM_ARCH=%s\n"%os.environ['SCRAM_ARCH']) + _file.write("source /cvmfs/cms.cern.ch/cmsset_default.sh\n") + _file.write("cmsenv\n") + _file.write("cd %s\n"%twd__) + _file.write("export PYTHONPATH=$PYTHONPATH:/afs/cern.ch/user/f/fderiggi/CMSSW_10_2_13/src/flashggFinalFit/tools:/afs/cern.ch/user/f/fderiggi/CMSSW_10_2_13/src/flashggFinalFit/Trees2WS/tools\n\n") + +def writeSubFiles(ext,file, batch = 'condor'): + twd__ = os.getcwd() + + # print("mkdir -p %s/outdir_%s/jobs"%(twd__,_opts.ext)) + # Make directory to store sub files + os.system("mkdir -p %s/outdir_OutputBias_Jobs/outdir_%s"%(twd__,ext)) + os.system("mkdir -p %s/outdir_OutputBias_Jobs/outdir_%s/jobs"%(twd__,ext)) + + _jobdir = "%s/outdir_OutputBias_Jobs/outdir_%s/jobs"%(twd__,ext) + # Remove current job files +# if len(glob.glob("%s/*"%_jobdir)): os.system("rm %s/*"%_jobdir) + + # CONDOR + if batch == "condor": + _executable = "condor_%s"%(ext) + _f = open("%s/%s.sh"%(_jobdir,_executable),"w") # single .sh script split into separate jobs + writePreamble(_f) + + # Write details depending on mode + # Extract list of files + # Run separate command per file + + with open(file, 'r') as f: + # Leggi il file riga per riga + i = 0 + for _cmd in f: + if _cmd == '': continue + _f.write("if [ $1 -eq %g ]; then\n"%i) + _f.write(" %s\n"%_cmd) + _f.write("fi\n") + i = i+1 + + _f.close() + os.system("chmod 775 %s/%s.sh"%(_jobdir,_executable)) + #SUB file + _fsub = open("%s/%s.sub"%(_jobdir,_executable),"w") + writeCondorSub(_fsub,_executable,"workday",i,'') + cmdLine = "cd %s; condor_submit %s.sub; cd %s"%(_jobdir,_executable,twd__) + print(cmdLine) + #run(cmdLine) \ No newline at end of file diff --git a/Combine/Checks/Bias_nominal/plots b/Combine/Checks/Bias_nominal/plots new file mode 120000 index 00000000..c851a27e --- /dev/null +++ b/Combine/Checks/Bias_nominal/plots @@ -0,0 +1 @@ +/eos/home-f/fderiggi/www/AC/BiasStudy_JulyProduction_ALT_0M \ No newline at end of file diff --git a/Combine/Checks/Bias_nominal/tools.py b/Combine/Checks/Bias_nominal/tools.py new file mode 100644 index 00000000..0197a3d2 --- /dev/null +++ b/Combine/Checks/Bias_nominal/tools.py @@ -0,0 +1,24 @@ +import os, sys +# Paths and directory +cmsswbase__ = os.environ['CMSSW_BASE'] +cwd__ = os.environ['CMSSW_BASE']+"/src/flashggFinalFit" +wd__ = "%s/Combine/Checks/Bias_nominal"%cwd__ + + +def run(cmd): + print "%s\n\n"%cmd + os.system(cmd) + + +def writePreamble(_file): + _file.write("#!/bin/bash\n") + _file.write("ulimit -s unlimited\n") + #_file.write("set -e\n") + _file.write("cd %s/src\n"%os.environ['CMSSW_BASE']) + _file.write("export SCRAM_ARCH=%s\n"%os.environ['SCRAM_ARCH']) + _file.write("source /cvmfs/cms.cern.ch/cmsset_default.sh\n") + _file.write("cd %s\n"%wd__) + _file.write("cmsenv") + _file.write("MY.SingularityImage = \"/cvmfs/unpacked.cern.ch/gitlab-registry.cern.ch/cms-cat/cmssw-lxplus/cmssw-el7-lxplus:latest/\"") + + _file.write("export PYTHONPATH=$PYTHONPATH:%s/tools:%s/tools\n\n"%(cwd__,wd__)) \ No newline at end of file diff --git a/Combine/CollectFits.py b/Combine/CollectFits.py index 5d89bc28..3e3855c1 100644 --- a/Combine/CollectFits.py +++ b/Combine/CollectFits.py @@ -40,7 +40,13 @@ def run(cmd): if _fit.split(":")[2] == "all": _fitpois = pois else: _fitpois = _fit.split(":")[2].split(",") _name = "%s_%s"%(_fit.split(":")[0],_fit.split(":")[1]) - if opt.doObserved: _name += "_obs" + if opt.doObserved: + _name += "_obs" + mainlabel = "Observed" + else: + mainlabel = "Expected" + # add this to distinguish different fits with same POI + _name += "_"+opt.ext if( _fit.split(":")[0] == "bestfit" ): for poi in _fitpois: @@ -56,9 +62,23 @@ def run(cmd): elif( _fit.split(":")[0] == "profile1D")|( _fit.split(":")[0] == "scan1D" ): for poi in _fitpois: + if poi in ["r_ggH","r_VBF","r_top","r_VH"]: + translate_json = "pois_mu.json" + elif poi=='CMS_zz4l_fai1': + + if 'ALT_0M' in opt.mode: translate_json = "pois_fa3.json" + if 'ALT_0PH' in opt.mode: translate_json = "pois_fa2.json" + if 'ALT_L1' in opt.mode: translate_json = "pois_flambda1.json" + if 'ALT_L1Zg' in opt.mode: translate_json = "pois_flambda1zgamma.json" + else: + print "Warning: unknown poi. Use r as default" + translate_json = "pois_mu.json" + haddcmd = "cd runFits%s_%s; hadd -f %s_%s.root higgsCombine_%s_%s.POINTS.*.*.root; cd .."%(opt.ext,opt.mode,_name,poi,_name,poi) + print(haddcmd ) + plotcmd = "cd runFits%s_%s; plot1DScan.py %s_%s.root --y-cut 30 --y-max 30 -o ../plots/%s_%s%s --POI %s --main-label %s --translate %s/src/flashggFinalFit/Plots/%s; cd .."%(opt.ext,opt.mode,_name,poi,_name,poi,opt.ext,poi,mainlabel,os.environ['CMSSW_BASE'],translate_json) + print "plotcmd = ",plotcmd run(haddcmd) - plotcmd = "cd runFits%s_%s; plot1DScan.py %s_%s.root --y-cut 20 --y-max 20 -o Plots/%s_%s%s --POI %s; cd .."%(opt.ext,opt.mode,_name,poi,_name,poi,opt.ext,poi) run(plotcmd) elif( _fit.split(":")[0] == "scan2D")|( _fit.split(":")[0] == "profile2D" ): diff --git a/Combine/Datacard_ALT_0M.root b/Combine/Datacard_ALT_0M.root new file mode 120000 index 00000000..0c618c59 --- /dev/null +++ b/Combine/Datacard_ALT_0M.root @@ -0,0 +1 @@ +/eos/cms/store/group/phys_higgs/cmshgg/fderiggi/Datacards_DecemberProduction/Datacard_ALT_0M.root \ No newline at end of file diff --git a/Combine/Plot1DCustom.py b/Combine/Plot1DCustom.py new file mode 100644 index 00000000..29e7750e --- /dev/null +++ b/Combine/Plot1DCustom.py @@ -0,0 +1,104 @@ +import ROOT +import numpy as np +from optparse import OptionParser + +def get_options(): + parser = OptionParser() + parser.add_option('--inputFile', dest='inputFile', default='runFitsALT_0M_ALT_0M/profile1D_syst_ALT_0M_CMS_zz4l_fai1.root', help='Input WS directory') + parser.add_option('--output', dest='output', default='plot/TEST', help='') + return parser.parse_args() +(opt,args) = get_options() + +def analyze_and_interpolate(): + # Apri il file ROOT + file = ROOT.TFile.Open(opt.inputFile) + if not file or file.IsZombie(): + print("Errore nell'apertura del file!") + return + + # Accedi al tree + tree = file.Get("limit") + if not tree: + print("Tree 'tree' non trovato!") + return + + # Leggi i valori dal tree + x_vals = [] + y_vals = [] + i = 0 + for entry in tree: + if i % 2 == 0 : + i=i+1 + continue + x_vals.append(entry.CMS_zz4l_fai1) + + y_vals.append(2 * entry.deltaNLL) + i=i+1 + + x_vals = np.array(x_vals) + y_vals = np.array(y_vals) + + + sorted_indices = np.argsort(x_vals) + + x_vals= x_vals[sorted_indices] + y_vals = y_vals[sorted_indices] + + # Crea un grafico + canvas = ROOT.TCanvas("canvas", "Canvas", 800, 600) + graph = ROOT.TGraph(len(x_vals), x_vals, y_vals) + graph.SetTitle("Profile Likelihood;CMS_zz4l_fai1;2*deltaNLL") + + + graph.SetMarkerStyle(20) + graph.SetMarkerColor(ROOT.kBlue) + graph.Draw("AP") + + # Interpolazione per trovare il valore di x quando y=1 + interpolated_points_1 = [] + interpolated_points_4 = [] + for i in range(len(x_vals) - 1): + x1, y1 = x_vals[i], y_vals[i] + x2, y2 = x_vals[i + 1], y_vals[i + 1] + m = (y2-y1)/(x2-x1) + q = (y2-y1)/(x2-x1)* x2-y2 + + # Interpolazione lineare + if (y1 - 1) * (y2 - 1) < 0: # La retta passa per y=1 + x_interp = x1 + (1 - y1) * (x2 - x1) / (y2 - y1) + interpolated_points_1.append(x_interp) + line = ROOT.TLine(x1, y1, x2, y2) + line.SetLineColor(ROOT.kRed) + line.SetLineStyle(2) + line.Draw("same") + + if (y1 - 4) * (y2 - 4) < 0: # La retta passa per y=1 + x_interp = x1 + (4 - y1) * (x2 - x1) / (y2 - y1) + interpolated_points_4.append(x_interp) + line = ROOT.TLine(x1, y1, x2, y2) + line.SetLineColor(ROOT.kRed) + line.SetLineStyle(2) + line.Draw("same") + + + # Disegna una linea orizzontale a y=1 + hline = ROOT.TLine(min(x_vals), 1, max(x_vals), 1) + hline.SetLineColor(ROOT.kGreen) + hline.SetLineStyle(2) + hline.Draw("same") + cl_68 = "@ 68 CL = [" + "%.4g" % interpolated_points_1[0] + ", " + "%.4g" % interpolated_points_1[1] + "]" + cl_95 = "@ 95 CL = [" + "%.4g" % interpolated_points_4[0] + ", " + "%.4g" % interpolated_points_4[1] + "]" + + text = ROOT.TLatex() + text.SetTextSize(0.03) # Dimensione del testo + text.SetTextColor(ROOT.kBlack) # Colore del testo + text.SetNDC() # Imposta le coordinate come Normalized Device Coordinates (opzionale) + text.DrawLatex(0.5, 0.85, cl_68) + text.DrawLatex(0.5, 0.80, cl_95) + + # Mostra la canvas + canvas.SaveAs(opt.output+".png") + file.Close() + +# Esegui la funzione +analyze_and_interpolate() diff --git a/Combine/PlotScans.py b/Combine/PlotScans.py new file mode 100644 index 00000000..84fc9875 --- /dev/null +++ b/Combine/PlotScans.py @@ -0,0 +1,87 @@ +# Script to collect fit outputs + +import os, sys +import re +from optparse import OptionParser +import glob +import json + +print " ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ HGG PLOT SCANS RUN II ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ " + +def get_options(): + parser = OptionParser() + parser.add_option('--inputJson', dest='inputJson', default='inputs.json', help="Input json file to define fits") + parser.add_option('--mode', dest='mode', default='mu_inclusive', help="Type of fit") + parser.add_option('--outdir', dest='outdir', default='', help="name of the output directory in plots/") + parser.add_option('--ext', dest='ext', default='', help="Running over Datacard with extension") + parser.add_option('--doObserved', dest='doObserved', action="store_true", default=False, help="Fit to data") + return parser.parse_args() +(opt,args) = get_options() + +def leave(): + print " ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ HGG COLLECT FITS RUN II (END) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ " + sys.exit(1) + +def run(cmd): + #print "%s\n"%cmd + os.system(cmd) + + +# Read json file +with open( opt.inputJson ) as jsonfile: inputs = json.load(jsonfile)[opt.mode] +# Extract info +pois = inputs['pois'].split(",") +fits = inputs['fits'].split("+") + +# Create plots directory in mode +pdir="plots/%s/%s"%(opt.outdir,opt.mode) +if not os.path.isdir(pdir): + os.system("mkdir -p %s"%pdir) + if os.path.exists("/afs/cern.ch"): os.system("cp /afs/cern.ch/user/g/gpetrucc/php/index.php "+pdir) +# Loop over fits: plot +for fidx in range(len(fits)): + _fit = fits[fidx] + if _fit.split(":")[2] == "all": _fitpois = pois + else: _fitpois = _fit.split(":")[2].split(",") + _name = "%s_%s"%(_fit.split(":")[0],_fit.split(":")[1]) + if opt.doObserved: + _name += "_obs" + mainlabel = "Observed" + else: + mainlabel = "Expected" + + # add this to distinguish different fits with same POI + _name += "_"+opt.ext + + if( _fit.split(":")[0] == "bestfit" ): + for poi in _fitpois: + mvcmd = "mv higgsCombine_%s_%s.MultiDimFit.mH125.root %s/%s_%s.root"%(_name,poi,pdir,_name,poi) + print " --> Storing best fit: %s/%s_%s.root"%(pdir,_name,poi) + run(mvcmd) + + elif( _fit.split(":")[0] == "fixed" ): + for poi in _fitpois: + mvcmd = "mv higgsCombine_%s_%s.MultiDimFit.mH125.root %s/%s.root"%(pdir,_name,pdir,poi,_name) + print " --> Storing fixed point: %s/%s.root"%(pdir,_name) + run(mvcmd) + + elif( _fit.split(":")[0] == "profile1D")|( _fit.split(":")[0] == "scan1D" ): + for poi in _fitpois: + resfile = "higgsCombine_%s_%s.MultiDimFit.mH125.root"%(_name,poi) + if os.path.isfile(resfile): + print " --> Storing ",_fit.split(":")[0],": %s/higgsCombine_%s_%s.root"%(pdir,_name,poi) + run("mv %s %s/higgsCombine_%s_%s.root"%(resfile,pdir,_name,poi)) + if poi in ["r_ggH","r_VBF","r_top","r_VH"]: + translate_json = "pois_mu.json" + elif poi=='CMS_zz4l_fai1': + if 'ALT_0M' in opt.ext: translate_json = "pois_fa3.json" + if 'ALT_0PH' in opt.ext: translate_json = "pois_fa2.json" + if 'ALT_L1' in opt.ext: translate_json = "pois_flambda1.json" + if 'ALT_L1Zg' in opt.ext: translate_json = "pois_flambda1zgamma.json" + else: + print "Warning: unknown poi. Use r as default" + translate_json = "pois_mu.json" + plotcmd = "cd %s; plot1DScan.py higgsCombine_%s_%s.root --y-cut 30 --y-max 30 -o %s_%s --POI %s --main-label %s --translate %s/src/flashggFinalFit/Plots/%s; cd .."%(pdir,_name,poi,_name,poi,poi,mainlabel,os.environ['CMSSW_BASE'],translate_json) + print (plotcmd) + run(plotcmd) + diff --git a/Combine/RunFits.py b/Combine/RunFits.py index c85a0d8b..a6ad8d25 100644 --- a/Combine/RunFits.py +++ b/Combine/RunFits.py @@ -19,7 +19,7 @@ def get_options(): parser.add_option('--doObserved', dest='doObserved', action="store_true", default=False, help="Fit to data") parser.add_option('--snapshotWSFile', dest='snapshotWSFile', default='', help="Full path to snapshot WS file (use when running observed statonly as nuisances are froze at postfit values)") parser.add_option('--commonOpts', dest='commonOpts', default="--cminDefaultMinimizerStrategy 0 --X-rtd MINIMIZER_freezeDisassociatedParams --X-rtd MINIMIZER_multiMin_hideConstants --X-rtd MINIMIZER_multiMin_maskConstraints --X-rtd MINIMIZER_multiMin_maskChannels=2", help="Common combine options for running fits") - parser.add_option('--batch', dest='batch', default='condor', help='Batch: [crab,condor/SGE/IC]') + parser.add_option('--batch', dest='batch', default='condor', help='Batch: [crab,condor/SGE/IC/lxbatch]') parser.add_option('--queue', dest='queue', default='workday', help='Queue e.g. for condor=workday, for IC=hep.q') parser.add_option('--subOpts', dest='subOpts', default="", help="Submission options") parser.add_option('--doCustomCrab', dest='doCustomCrab', default=False, action="store_true", help="Load crab options from custom_crab.py file") @@ -32,9 +32,11 @@ def leave(): print " ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ HGG SUBMIT FITS RUN II (END) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ " exit(1) -def run(cmd): - print "%s\n\n"%cmd - os.system(cmd) +def run(cmd,opt): + if opt.dryRun: + print "%s\n\n"%cmd + else: + os.system(cmd) def getPdfIndicesFromJson(pdfjson): pdfStr = "--setParameters " @@ -45,7 +47,7 @@ def getPdfIndicesFromJson(pdfjson): # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Options: # Expected/Observed -exp_opts = '' if opt.doObserved else '--expectSignal 1 -t -1' +exp_opts = '' if opt.doObserved else '-t -1' # Common opts for combine jobs common_opts = opt.commonOpts @@ -63,11 +65,13 @@ def getPdfIndicesFromJson(pdfjson): if opt.subOpts != "": sub_opts += "\n%s"%opt.subOpts sub_opts += "\'" job_opts = "--job-mode condor %s"%sub_opts -elif( opt.batch == 'SGE' )|( opt.batch == 'IC' ): +elif( opt.batch == 'SGE' )|( opt.batch == 'IC' )|( opt.batch == 'lxbatch' ): sub_opts = "--sub-opts=\'-q %s"%opt.queue if opt.subOpts != "": sub_opts += " %s"%opt.subOpts sub_opts += "\'" - job_opts = "--job-mode SGE %s"%sub_opts + job_opts = "--job-mode %s %s"%(opt.batch,sub_opts) +elif opt.batch == "local": + print "--> Will print the commands to run combine without combineTool interactively\n\n" else: print " --> [ERROR] Batch mode (%s) not supported. Leaving"%opt.batch leave() @@ -85,6 +89,7 @@ def getPdfIndicesFromJson(pdfjson): points = inputs['points'].split("+") fit_opts = inputs['fit_opts'].split("+") + # Loop over fits and set running for fidx in range(len(fits)): _fit = fits[fidx] @@ -96,7 +101,7 @@ def getPdfIndicesFromJson(pdfjson): # If ALL in fit_opts: replace by list of constrained nuisances in workspace if "ALL" in _fit_opts: - fd = ROOT.TFile("Datacard%s_%s.root"%(opt.ext,opt.mode)) + fd = ROOT.TFile("Datacard_%s.root"%(opt.ext)) ws = fd.Get("w") nuisances = ws.obj("ModelConfig").GetNuisanceParameters().contentsString() _fit_opts = re.sub("ALL",nuisances,_fit_opts) @@ -113,11 +118,16 @@ def getPdfIndicesFromJson(pdfjson): pdf_opts = getPdfIndicesFromJson("pdfindex%s_observed.json"%opt.ext) if opt.setPdfIndices else '' else: pdf_opts = getPdfIndicesFromJson("pdfindex%s.json"%opt.ext) if opt.setPdfIndices else '' + # add this to distinguish different fits with same POI + _name += "_"+opt.ext + # File to load workspace if opt.snapshotWSFile != '': d_opts = '-d %s --snapshotName MultiDimFit'%opt.snapshotWSFile else: #d_opts = '-d ../Datacard%s_%s.root'%(opt.ext,opt.mode) - d_opts = '-d %s/src/flashggFinalFit/Combine/Datacard%s_%s.root'%(os.environ['CMSSW_BASE'],opt.ext,opt.mode) + if "-d" in _fit_opts: d_opts = '' + else: d_opts = '-d %s/src/flashggFinalFit/Combine/Datacard_%s.root'%(os.environ['CMSSW_BASE'],opt.ext) + d_opts = '-d %s/src/flashggFinalFit/Combine/Datacard_%s.root'%(os.environ['CMSSW_BASE'],opt.ext) # If setParameters already in _fit_opts then add to fit opts and set pdfOpts = '' if( "setParameters" in _fit_opts )&( pdf_opts != '' ): @@ -131,61 +141,95 @@ def getPdfIndicesFromJson(pdfjson): if _fit.split(":")[0] == "bestfit": if( "statonly" in _fit.split(":")[1] )&( "freezeParameters" not in _fit_opts ): _fit_opts += " --freezeParameters allConstrainedNuisances" for poi in _fitpois: - fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M MultiDimFit -m 125 %s --floatOtherPOIs 1 %s -n _%s_%s -P %s %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,poi,d_opts,exp_opts,_name,poi,poi,_fit_opts,pdf_opts,common_opts,job_opts) - run(fitcmd) + if opt.batch == 'local': + fitcmd = "combine -M MultiDimFit -m 125.38 %s --floatOtherPOIs 1 %s -n _%s_%s -P %s %s %s %s"%(d_opts,exp_opts,_name,poi,poi,_fit_opts,pdf_opts,common_opts) + else: + fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M MultiDimFit -m 125.38 %s --floatOtherPOIs 1 %s -n _%s_%s -P %s %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,poi,d_opts,exp_opts,_name,poi,poi,_fit_opts,pdf_opts,common_opts,job_opts) + run(fitcmd,opt) # For singles point if _fit.split(":")[0] == "singles": if( "statonly" in _fit.split(":")[1] )&( "freezeParameters" not in _fit_opts ): _fit_opts += " --freezeParameters allConstrainedNuisances" for poi in _fitpois: - fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M MultiDimFit -m 125 %s --floatOtherPOIs 1 %s -n _%s_%s -P %s --algo singles %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,poi,d_opts,exp_opts,_name,poi,poi,_fit_opts,pdf_opts,common_opts,job_opts) - run(fitcmd) + if opt.batch == 'local': + fitcmd = "combine -M MultiDimFit -m 125.38 %s --floatOtherPOIs 1 %s -n _%s_%s -P %s --algo singles %s %s %s"%(d_opts,exp_opts,_name,poi,poi,_fit_opts,pdf_opts,common_opts) + else: + fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M MultiDimFit -m 125.38 %s --floatOtherPOIs 1 %s -n _%s_%s -P %s --algo singles %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,poi,d_opts,exp_opts,_name,poi,poi,_fit_opts,pdf_opts,common_opts,job_opts) + run(fitcmd,opt) # For fixed point if _fit.split(":")[0] == "fixed": if( "statonly" in _fit.split(":")[1] )&( "freezeParameters" not in _fit_opts ): _fit_opts += " --freezeParameters allConstrainedNuisances" for poi in _fitpois: - fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M MultiDimFit -m 125 %s --floatOtherPOIs 1 %s -n _%s_%s --algo fixed %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,poi,d_opts,exp_opts,_name,poi,_fit_opts,pdf_opts,common_opts,job_opts) - run(fitcmd) + if opt.batch == 'local': + fitcmd = "combine -M MultiDimFit -m 125.38 %s --floatOtherPOIs 1 %s -n _%s_%s -P %s --algo fixed %s %s %s"%(d_opts,exp_opts,_name,poi,poi,_fit_opts,pdf_opts,common_opts) + else: + fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M MultiDimFit -m 125.38 %s --floatOtherPOIs 1 %s -n _%s_%s --algo fixed %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,poi,d_opts,exp_opts,_name,poi,_fit_opts,pdf_opts,common_opts,job_opts) + run(fitcmd,opt) # For asymptotic limit if _fit.split(":")[0] == "AsymptoticLimit": if( "statonly" in _fit.split(":")[1] )&( "freezeParameters" not in _fit_opts ): _fit_opts += " --freezeParameters allConstrainedNuisances" for poi in _fitpois: - fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M AsymptoticLimits -m 125 %s %s -n _%s_%s --redefineSignalPOI %s %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,poi,d_opts,exp_opts,_name,poi,poi,_fit_opts,pdf_opts,common_opts,job_opts) - run(fitcmd) + if opt.batch == 'local': + fitcmd = "combine -M AsymptoticLimits -m 125.38 %s %s -n _%s_%s --redefineSignalPOI %s %s %s %s"%(d_opts,exp_opts,_name,poi,poi,_fit_opts,pdf_opts,common_opts) + else: + fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M AsymptoticLimits -m 125.38 %s %s -n _%s_%s --redefineSignalPOI %s %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,poi,d_opts,exp_opts,_name,poi,poi,_fit_opts,pdf_opts,common_opts,job_opts) + run(fitcmd,opt) # For 1D scan when profiling other pois elif _fit.split(":")[0] == "profile1D": + if( "statonly" in _fit.split(":")[1] )&( "freezeParameters" not in _fit_opts ): _fit_opts += " --freezeParameters allConstrainedNuisances" for poi in _fitpois: - fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M MultiDimFit -m 125 %s --floatOtherPOIs 1 %s -n _%s_%s -P %s --algo grid --points %s --alignEdges 1 --split-points %s %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,poi,d_opts,exp_opts,_name,poi,poi,_points.split(":")[0],_points.split(":")[1],_fit_opts,pdf_opts,common_opts,job_opts) - run(fitcmd) + if opt.batch == 'local': + fitcmd = "combine -M MultiDimFit -m 125.38 %s --floatOtherPOIs 1 %s -n _%s_%s -P %s --algo grid --points %s --alignEdges 1 %s %s %s"%(d_opts,exp_opts,_name,poi,poi,_points.split(":")[0],_fit_opts,pdf_opts,common_opts) + else: + fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M MultiDimFit -m 125.38 %s --floatOtherPOIs 1 %s -n _%s_%s -P %s --algo grid --points %s --alignEdges 1 --split-points %s %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,poi,d_opts,exp_opts,_name,poi,poi,_points.split(":")[0],_points.split(":")[1],_fit_opts,pdf_opts,common_opts,job_opts) + run(fitcmd,opt) # For 1D scan when fixing other pois elif _fit.split(":")[0] == "scan1D": if( "statonly" in _fit.split(":")[1] )&( "freezeParameters" not in _fit_opts ): _fit_opts += " --freezeParameters allConstrainedNuisances" for poi in _fitpois: - fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M MultiDimFit -m 125 %s --floatOtherPOIs 0 %s -n _%s_%s -P %s --algo grid --points %s --alignEdges 1 --split-points %s %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,poi,d_opts,exp_opts,_name,poi,poi,_points.split(":")[0],_points.split(":")[1],_fit_opts,pdf_opts,common_opts,job_opts) - run(fitcmd) + if opt.batch == 'local': + fitcmd = "combine -M MultiDimFit -m 125.38 %s --floatOtherPOIs 0 %s -n _%s_%s -P %s --algo grid --points %s --alignEdges 1 %s %s %s"%(d_opts,exp_opts,_name,poi,poi,_points.split(":")[0],_fit_opts,pdf_opts,common_opts) + else: + fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M MultiDimFit -m 125.38 %s --floatOtherPOIs 0 %s -n _%s_%s -P %s --algo grid --points %s --alignEdges 1 --split-points %s %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,poi,d_opts,exp_opts,_name,poi,poi,_points.split(":")[0],_points.split(":")[1],_fit_opts,pdf_opts,common_opts,job_opts) + print(fitcmd) + run(fitcmd,opt) # For 2D scan: fix other pois to 0 elif _fit.split(":")[0] == "profile2D": if( "statonly" in _fit.split(":")[1] )&( "freezeParameters" not in _fit_opts ): _fit_opts += " --freezeParameters allConstrainedNuisances" _poisStr = "%s_vs_%s"%(_fitpois[0],_fitpois[1]) - fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M MultiDimFit -m 125 %s -P %s -P %s --floatOtherPOIs 1 %s -n _%s_%s --algo grid --points %s --alignEdges 1 --split-points %s %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,_poisStr,d_opts,_fitpois[0],_fitpois[1],exp_opts,_name,_poisStr,_points.split(":")[0],_points.split(":")[1],_fit_opts,pdf_opts,common_opts,job_opts) - run(fitcmd) + if opt.batch == 'local': + fitcmd = "combine -M MultiDimFit -m 125.38 %s -P %s -P %s --floatOtherPOIs 1 %s -n _%s_%s --algo grid --points %s --alignEdges 1 --split-points %s %s %s %s"%(d_opts,_fitpois[0],_fitpois[1],exp_opts,_name,_poisStr,_points.split(":")[0],_points.split(":")[1],_fit_opts,pdf_opts,common_opts) + else: + fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M MultiDimFit -m 125.38 %s -P %s -P %s --floatOtherPOIs 1 %s -n _%s_%s --algo grid --points %s --alignEdges 1 --split-points %s %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,_poisStr,d_opts,_fitpois[0],_fitpois[1],exp_opts,_name,_poisStr,_points.split(":")[0],_points.split(":")[1],_fit_opts,pdf_opts,common_opts,job_opts) + run(fitcmd,opt) # For 2D scan: fix other pois to 0 elif _fit.split(":")[0] == "scan2D": if( "statonly" in _fit.split(":")[1] )&( "freezeParameters" not in _fit_opts ): _fit_opts += " --freezeParameters allConstrainedNuisances" _poisStr = "%s_vs_%s"%(_fitpois[0],_fitpois[1]) - fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M MultiDimFit -m 125 %s -P %s -P %s --floatOtherPOIs 0 %s -n _%s_%s --algo grid --points %s --alignEdges 1 --split-points %s %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,_poisStr,d_opts,_fitpois[0],_fitpois[1],exp_opts,_name,_poisStr,_points.split(":")[0],_points.split(":")[1],_fit_opts,pdf_opts,common_opts,job_opts) - run(fitcmd) + if opt.batch == 'local': + fitcmd = "combine -M MultiDimFit -m 125.38 %s -P %s -P %s --floatOtherPOIs 0 %s -n _%s_%s --algo grid --points %s --alignEdges 1 --split-points %s %s %s %s"%(d_opts,_fitpois[0],_fitpois[1],exp_opts,_name,_poisStr,_points.split(":")[0],_points.split(":")[1],_fit_opts,pdf_opts,common_opts) + else: + fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_%s -M MultiDimFit -m 125.38 %s -P %s -P %s --floatOtherPOIs 0 %s -n _%s_%s --algo grid --points %s --alignEdges 1 --split-points %s %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,_poisStr,d_opts,_fitpois[0],_fitpois[1],exp_opts,_name,_poisStr,_points.split(":")[0],_points.split(":")[1],_fit_opts,pdf_opts,common_opts,job_opts) + + run(fitcmd,opt) # Robust Hesse elif _fit.split(":")[0] == "robustHesse": if( "statonly" in _fit.split(":")[1] )&( "freezeParameters" not in _fit_opts ): _fit_opts += " --freezeParameters allConstrainedNuisances" _poi = _fitpois[0] - fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s -M MultiDimFit -m 125 %s -P %s --floatOtherPOIs 1 %s -n _%s --robustHesse 1 --robustHesseSave 1 --saveFitResult %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,d_opts,_poi,exp_opts,_name,_fit_opts,pdf_opts,common_opts,job_opts) - run(fitcmd) + if opt.batch == 'local': + fitcmd = "combine -M MultiDimFit -m 125.38 %s -P %s --floatOtherPOIs 1 %s -n _%s --robustHesse 1 --robustHesseSave 1 --saveFitResult %s %s %s"%(d_opts,_poi,exp_opts,_name,_fit_opts,pdf_opts,common_opts) + else: + fitcmd = "cd runFits%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s -M MultiDimFit -m 125.38 %s -P %s --floatOtherPOIs 1 %s -n _%s --robustHesse 1 --robustHesseSave 1 --saveFitResult %s %s %s %s; cd .."%(opt.ext,opt.mode,_name,d_opts,_poi,exp_opts,_name,_fit_opts,pdf_opts,common_opts,job_opts) + run(fitcmd,opt) + + + + diff --git a/Combine/RunImpacts.py b/Combine/RunImpacts.py new file mode 100644 index 00000000..9b5c1dec --- /dev/null +++ b/Combine/RunImpacts.py @@ -0,0 +1,139 @@ +# Script to submit fit jobs +import ROOT +import os, sys +import re +from optparse import OptionParser +import glob +import json + +print " ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ HGG SUBMIT IMPACTS RUN II ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ " + +# subOpts for SGE e.g.: -l h_rt=3:0:0 -l h_vmem=24G -pe hep.pe 2 + +def get_options(): + parser = OptionParser() + parser.add_option('--inputJson', dest='inputJson', default='inputs.json', help="Input json file to define fits") + parser.add_option('--mode', dest='mode', default='mu_inclusive', help="Type of fit") + parser.add_option('--ext', dest='ext', default='', help="Running over Datacard with extension") + parser.add_option('--setPdfIndices', dest='setPdfIndices', action="store_true", default=False, help="Set pdf indixes from pdfindex.json") + parser.add_option('--doObserved', dest='doObserved', action="store_true", default=False, help="Fit to data") + parser.add_option('--doFits', dest='doFits', action="store_true", default=False, help="run one scan per nuisance parameter. Needs initialFit to have been run earlier.") + parser.add_option('--commonOpts', dest='commonOpts', default="--cminDefaultMinimizerStrategy 0 --X-rtd MINIMIZER_freezeDisassociatedParams --X-rtd MINIMIZER_multiMin_hideConstants --X-rtd MINIMIZER_multiMin_maskConstraints --X-rtd MINIMIZER_multiMin_maskChannels=2", help="Common combine options for running fits") + parser.add_option('--batch', dest='batch', default='condor', help='Batch: [crab,condor/SGE/IC/lxbatch]') + parser.add_option('--queue', dest='queue', default='espresso', help='Queue e.g. for condor=workday, for IC=hep.q') + parser.add_option('--subOpts', dest='subOpts', default="", help="Submission options") + parser.add_option('--dryRun', dest='dryRun', action="store_true", default=False, help="Only create submission files") + return parser.parse_args() +(opt,args) = get_options() + +def leave(): + print " ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ HGG SUBMIT IMPACTS RUN II (END) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ " + exit(1) + +def run(cmd,opt): + if opt.dryRun: + print "%s\n\n"%cmd + else: + os.system(cmd) + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Options: +# Expected/Observed +exp_opts = '' if opt.doObserved else '-t -1' + +# Common opts for combine jobs +common_opts = opt.commonOpts + +# Extract path to WS file + + +# Options for job submission +if opt.batch == 'condor': + sub_opts = "--sub-opts=\'+JobFlavour = \"%s\""%opt.queue + if opt.subOpts != "": sub_opts += "\n%s"%opt.subOpts + sub_opts += "\'" + job_opts = "--job-mode condor %s"%sub_opts +elif( opt.batch == 'SGE' )|( opt.batch == 'IC' )|( opt.batch == 'lxbatch' ): + sub_opts = "--sub-opts=\'-q %s"%opt.queue + if opt.subOpts != "": sub_opts += " %s"%opt.subOpts + sub_opts += "\'" + job_opts = "--job-mode %s %s"%(opt.batch,sub_opts) +elif opt.batch == "local": + print "--> Will print the commands to run combine without combineTool interactively\n\n" +else: + print " --> [ERROR] Batch mode (%s) not supported. Leaving"%opt.batch + leave() +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +# Make folder for running fits if does not exist +if not os.path.isdir("runImpacts%s_%s"%(opt.ext,opt.mode)): os.system("mkdir runImpacts%s_%s"%(opt.ext,opt.mode)) + +# Read json file +with open( opt.inputJson ) as jsonfile: inputs = json.load(jsonfile)[opt.mode] + +# Extract info +pois = inputs['pois'].split(",") +fits = inputs['fits'].split("+") +points = inputs['points'].split("+") +fit_opts = inputs['fit_opts'].split("+") + +# Loop over fits and set running +for fidx in range(len(fits)): + _fit = fits[fidx] + if _fit.split(":")[2] == "all": _fitpois = pois + else: _fitpois = _fit.split(":")[2].split(",") + + _points = points[fidx] + # robustFit is needed, so it is enforced below. Do not repeat twice in case it is given. + _fit_opts = fit_opts[fidx].replace("--robustFit=1","").replace("--robustFit 1","") + # in case of doFits, combineTool adds floatOtherPOIs 1, so do not repeat twice in case it is given + if opt.doFits: + _fit_opts = _fit_opts.replace("--floatOtherPOIs=1","").replace("--floatOtherPOIs 1","") + _fit_opts = _fit_opts.replace("--saveInactivePOI=1","").replace("--saveInactivePOI 1","") + _fit_opts = _fit_opts.replace("--saveWorkspace","") + + # If ALL in fit_opts: replace by list of constrained nuisances in workspace + if "ALL" in _fit_opts: + fd = ROOT.TFile("Datacard_%s.root"%(opt.ext)) + ws = fd.Get("w") + nuisances = ws.obj("ModelConfig").GetNuisanceParameters().contentsString() + _fit_opts = re.sub("ALL",nuisances,_fit_opts) + ws.Delete() + fd.Close() + + # Dry run + if opt.dryRun: _fit_opts += " --dry-run" + _name = "%s_%s"%(_fit.split(":")[0],_fit.split(":")[1]) + + # Setting PDF indices + if opt.doObserved: + _name += "_obs" + pdf_opts = getPdfIndicesFromJson("pdfindex%s_observed.json"%opt.ext) if opt.setPdfIndices else '' + else: pdf_opts = getPdfIndicesFromJson("pdfindex%s.json"%opt.ext) if opt.setPdfIndices else '' + + # add this to distinguish different fits with same POI + _name += "_"+opt.ext + + d_opts = '-d %s/src/flashggFinalFit/Combine/Datacard_%s.root'%(os.environ['CMSSW_BASE'],opt.ext) + + # If setParameters already in _fit_opts then add to fit opts and set pdfOpts = '' + if( "setParameters" in _fit_opts )&( pdf_opts != '' ): + pdfstr = re.sub("--setParameters ","",pdf_opts) + _fit_opts = re.sub("--setParameters ","--setParameters %s,"%pdfstr,_fit_opts) + pdf_opts = '' + + # Running different types of fits... + + # For best fit point + if _fit.split(":")[0] == "bestfit": + if( "statonly" in _fit.split(":")[1] )&( "freezeParameters" not in _fit_opts ): _fit_opts += " --freezeParameters allConstrainedNuisances" + impactcmd1 = "cd runImpacts%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_initialFit -M Impacts -m 125.38 %s %s --doInitialFit --robustFit 1 -n _%s_initialFit %s %s %s"%(opt.ext,opt.mode,_name,d_opts,exp_opts,_name,_fit_opts,pdf_opts,common_opts) + impactcmd2 = "cd runImpacts%s_%s; source /cvmfs/cms.cern.ch/crab3/crab.sh; combineTool.py --task-name %s_doFits -M Impacts -m 125.38 %s %s --doFits -n _%s_initialFit %s %s %s"%(opt.ext,opt.mode,_name,d_opts,exp_opts,_name,_fit_opts,pdf_opts,common_opts) + if opt.batch != 'local': + impactcmd1 += " %s"%job_opts + impactcmd2 += " %s"%job_opts + if not opt.doFits: + run(impactcmd1,opt) + else: + print(impactcmd2) + run(impactcmd2,opt) diff --git a/Combine/RunText2Workspace.py b/Combine/RunText2Workspace.py index b8782e16..8fa16ac0 100644 --- a/Combine/RunText2Workspace.py +++ b/Combine/RunText2Workspace.py @@ -8,8 +8,8 @@ def get_options(): parser = OptionParser() parser.add_option('--mode', dest='mode', default='mu_inclusive', help="Physics Model (specified in models.py)") parser.add_option('--ext',dest='ext', default="", help='In case running over datacard with extension') - parser.add_option('--common_opts',dest='common_opts', default="-m 125 higgsMassRange=122,128", help='Common options') - parser.add_option('--batch', dest='batch', default='SGE', help="Batch system [SGE,IC,condor]") + parser.add_option('--common_opts',dest='common_opts', default="-m 125.38 higgsMassRange=122,128", help='Common options') + parser.add_option('--batch', dest='batch', default='condor', help="Batch system [SGE,IC,condor]") parser.add_option('--queue', dest='queue', default='workday', help="Condor queue") parser.add_option('--ncpus', dest='ncpus', default=4, type='int', help="Number of cpus") parser.add_option('--dryRun', dest='dryRun', action="store_true", default=False, help="Only create submission files") @@ -29,35 +29,39 @@ def run(cmd): leave() print " --> Running text2workspace for model: %s"%opt.mode -print " --> Input: Datacard%s.txt --> Output: Datacard%s_%s.root"%(opt.ext,opt.ext,opt.mode) +print " --> Input: Datacard_%s.txt --> Output: Datacard_%s.root"%(opt.ext,opt.ext) if not os.path.isdir("./t2w_jobs"): os.system("mkdir ./t2w_jobs") # Open submission file to write to -fsub = open("./t2w_jobs/t2w_%s%s.sh"%(opt.mode,opt.ext),"w") + +fsub = open("./t2w_jobs/t2w_%s.sh"%(opt.ext),"w") fsub.write("#!/bin/bash\n\n") +fsub.write("source /cvmfs/cms.cern.ch/cmsset_default.sh \n") + fsub.write("cd %s\n\n"%os.environ['PWD']) fsub.write("eval `scramv1 runtime -sh`\n\n") -fsub.write("text2workspace.py Datacard%s.txt -o Datacard%s_%s.root %s %s"%(opt.ext,opt.ext,opt.mode,opt.common_opts,models[opt.mode])) +fsub.write("text2workspace.py Datacard_%s.txt -o Datacard_%s.root %s %s\n"%(opt.ext,opt.ext,opt.common_opts,models[opt.mode])) fsub.close() # Change permission for file -os.system("chmod 775 ./t2w_jobs/t2w_%s%s.sh"%(opt.mode,opt.ext)) +os.system("chmod 775 ./t2w_jobs/t2w_%s.sh"%(opt.ext)) # If using condor then also write submission file if opt.batch == 'condor': - f_cdr = open("./t2w_jobs/t2w_%s%s.sub"%(opt.mode,opt.ext),"w") - f_cdr.write("executable = %s/src/flashggFinalFit/Combine/t2w_jobs/t2w_%s%s.sh\n"%(os.environ['CMSSW_BASE'],opt.mode,opt.ext)) - f_cdr.write("output = %s/src/flashggFinalFit/Combine/t2w_jobs/t2w_%s%s.sh.out\n"%(os.environ['CMSSW_BASE'],opt.mode,opt.ext)) - f_cdr.write("error = %s/src/flashggFinalFit/Combine/t2w_jobs/t2w_%s%s.sh.err\n"%(os.environ['CMSSW_BASE'],opt.mode,opt.ext)) - f_cdr.write("log = %s/src/flashggFinalFit/Combine/t2w_jobs/t2w_%s%s.sh.log\n"%(os.environ['CMSSW_BASE'],opt.mode,opt.ext)) + f_cdr = open("./t2w_jobs/t2w_%s.sub"%(opt.ext),"w") + f_cdr.write("executable = %s/src/flashggFinalFit/Combine/t2w_jobs/t2w_%s.sh\n"%(os.environ['CMSSW_BASE'],opt.ext)) + f_cdr.write("output = %s/src/flashggFinalFit/Combine/t2w_jobs/t2w_%s.sh.out\n"%(os.environ['CMSSW_BASE'],opt.ext)) + f_cdr.write("error = %s/src/flashggFinalFit/Combine/t2w_jobs/t2w_%s.sh.err\n"%(os.environ['CMSSW_BASE'],opt.ext)) + f_cdr.write("log = %s/src/flashggFinalFit/Combine/t2w_jobs/t2w_%s.sh.log\n"%(os.environ['CMSSW_BASE'],opt.ext)) f_cdr.write("+JobFlavour = \"%s\"\n"%opt.queue) + f_cdr.write('MY.SingularityImage = "/cvmfs/unpacked.cern.ch/gitlab-registry.cern.ch/cms-cat/cmssw-lxplus/cmssw-el7-lxplus:latest/"\n\n') f_cdr.write("RequestCpus = %g\n"%opt.ncpus) f_cdr.write("queue\n") f_cdr.close() # Submit -if opt.batch == "condor": subcmd = "condor_submit ./t2w_jobs/t2w_%s%s.sub"%(opt.mode,opt.ext) -elif opt.batch == 'local': subcmd = "bash ./t2w_jobs/t2w_%s%s.sh"%(opt.mode,opt.ext) -else: subcmd = "qsub -q hep.q -l h_rt=6:0:0 -l h_vmem=24G ./t2w_jobs/t2w_%s%s.sh"%(opt.mode,opt.ext) +if opt.batch == "condor": subcmd = "condor_submit ./t2w_jobs/t2w_%s.sub"%(opt.ext) +elif opt.batch == 'local': subcmd = "bash ./t2w_jobs/t2w_%s.sh"%(opt.ext) +else: subcmd = "bsub -q cmsan -o ./t2w_jobs/t2w_%s.log -e ./t2w_jobs/t2w_%s.err ./t2w_jobs/t2w_%s.sh"%(opt.ext,opt.ext,opt.ext) if opt.dryRun: print "[DRY RUN] %s"%subcmd else: run(subcmd) diff --git a/Combine/extractYields.py b/Combine/extractYields.py index b00d2448..5870cb82 100644 --- a/Combine/extractYields.py +++ b/Combine/extractYields.py @@ -19,7 +19,8 @@ def rooiter(x): ret = iter.Next() def procToProcS0(p): - if "ggH" in p: return "ggh" + if "ALT" in p: return p.replace("qqH","qqh") + elif "ggH" in p: return "ggh" elif "qqH" in p: return "qqh" elif "WH_had" in p: return "wh" elif "ZH_had" in p: return "zh" @@ -32,6 +33,7 @@ def procToProcS0(p): elif "tHq" in p: return "thq" elif "tHW" in p: return "thw" elif "bbH" in p: return "bbh" + elif "vH" in p: return "vh" else: print " --> [ERROR] proc s0 not realised for process %s. Leaving"%p sys.exit(1) diff --git a/Combine/inputs.json b/Combine/inputs.json index 8cee5a08..7531ecb4 100644 --- a/Combine/inputs.json +++ b/Combine/inputs.json @@ -1,15 +1,37 @@ + { - "mu_inclusive":{ - "pois":"r", - "fits":"profile1D:syst:r", - "points":"20:1", - "fit_opts":"--setParameterRanges r=0,2 --saveSpecifiedNuis all" - }, - - "mu":{ - "pois":"r_ggH,r_VBF,r_VH,r_top", - "fits":"profile1D:syst:all", - "points":"20:1", - "fit_opts":"--saveSpecifiedNuis all --saveInactivePOI 1 --cminApproxPreFitTolerance=10" + "xsec":{ + "pois":"r_ggH,r_VH,r_VBF,r_top", + "fits":"profile1D:syst:all+bestfit:syst:all", + "points":"21:1+:", + "fit_opts":" --redefineSignalPOIs r_top,r_VBF,r_VH,r_ggH --setParameters r_ggH=1,r_top=1,r_VH=1,r_VBF=1 --setParameterRanges r_ggH=0.0,2.0:r_VBF=0.0,2.0:r_VH=0.0,3.0:r_top=0.0,4.0 --X-rtd FITTER_NEW_CROSSING_ALGO --X-rtd FITTER_BOUND --saveSpecifiedNuis all --saveInactivePOI 1 + --redefineSignalPOIs r_top,r_VBF,r_VH,r_ggH --setParameters r_ggH=1,r_top=1,r_VH=1,r_VBF=1 --setParameterRanges r_ggH=0.0,2.0:r_VBF=0.0,2.0:r_VH=0.0,3.0:r_top=0.0,4.0 --robustFit=1 --setRobustFitAlgo=Minuit2,Migrad --X-rtd FITTER_NEW_CROSSING_ALGO --setRobustFitTolerance=0.1 --X-rtd FITTER_NEVER_GIVE_UP --X-rtd FITTER_BOUND --cminFallbackAlgo \"Minuit2,0:1.\" --saveInactivePOI 1 --saveWorkspace" + }, + "ALT_0M":{ + "pois":"CMS_zz4l_fai1", + "fits":"profile1D:syst:all+bestfit:syst:all", + "points":"41:1+:", + "fit_opts":" --redefineSignalPOIs muV,muf,CMS_zz4l_fai1 --saveSpecifiedNuis all --saveInactivePOI 1 --setParameterRanges muV=0.0,4.0:muf=0.0,10.0:CMS_zz4l_fai1=-0.001.0,0.001 --setParameters muV=1.,CMS_zz4l_fai1=0.,muf=1., --robustFit=1 --setRobustFitAlgo=Minuit2,Migrad --X-rtd FITTER_NEW_CROSSING_ALGO --setRobustFitTolerance=0.1 --X-rtd FITTER_NEVER_GIVE_UP --X-rtd FITTER_BOUND --cminFallbackAlgo \"Minuit2,0:1.\" + --redefineSignalPOIs muV,muf,CMS_zz4l_fai1 --setParameterRanges muV=0.0,4.0:muf=0.0,10.0:CMS_zz4l_fai1=-0.0008.,0.0008 --setParameters muV=1.,CMS_zz4l_fai1=0.,muf=1. --robustFit=1 --setRobustFitAlgo=Minuit2,Migrad --X-rtd FITTER_NEW_CROSSING_ALGO --setRobustFitTolerance=0.5 --X-rtd FITTER_NEVER_GIVE_UP --X-rtd FITTER_BOUND --cminFallbackAlgo \"Minuit2,0:1.\" --saveInactivePOI 1 --saveWorkspace" + + }, + + + + "ALT_0PH":{ + "pois":"CMS_zz4l_fai1", + "fits":"profile1D:syst:all+bestfit:syst:all", + "points":"61:1+:", + "fit_opts":" --redefineSignalPOIs muV,muf,CMS_zz4l_fai1 --saveSpecifiedNuis all --saveInactivePOI 1 --setParameterRanges muV=0.0,4.0:muf=0.0,10.0:CMS_zz4l_fai1=-0.005,0.005 --setParameters muV=1.,CMS_zz4l_fai1=0.,muf=1. --robustFit=1 --setRobustFitAlgo=Minuit2,Migrad --X-rtd FITTER_NEW_CROSSING_ALGO --setRobustFitTolerance=0.1 --X-rtd FITTER_NEVER_GIVE_UP --X-rtd FITTER_BOUND --cminFallbackAlgo \"Minuit2,0:1.\" + --redefineSignalPOIs muV,muf,CMS_zz4l_fai1 --setParameterRanges muV=0.0,4.0:muf=0.0,10.0:CMS_zz4l_fai1=-0.005,0.005 --setParameters muV=1.,CMS_zz4l_fai1=0.,muf=1. --robustFit=1 --setRobustFitAlgo=Minuit2,Migrad --X-rtd FITTER_NEW_CROSSING_ALGO --setRobustFitTolerance=0.1 --X-rtd FITTER_NEVER_GIVE_UP --X-rtd FITTER_BOUND --cminFallbackAlgo \"Minuit2,0:1.\" --saveInactivePOI 1 --saveWorkspace" + }, + "ALT_L1":{ + "pois":"CMS_zz4l_fai1", + "fits":"profile1D:syst:all+bestfit:syst:all", + "points":"61:1+:", + "fit_opts":"--redefineSignalPOIs muV,muf,CMS_zz4l_fai1 --saveSpecifiedNuis all --saveInactivePOI 1 --setParameterRanges muV=0.0,4.0:muf=0.0,10.0:CMS_zz4l_fai1=-0.0005,0.0005 --setParameters muV=1.,CMS_zz4l_fai1=0.,muf=1. --robustFit=1 --setRobustFitAlgo=Minuit2,Migrad --X-rtd FITTER_NEW_CROSSING_ALGO --setRobustFitTolerance=0.1 --X-rtd FITTER_NEVER_GIVE_UP --X-rtd FITTER_BOUND --cminFallbackAlgo \"Minuit2,0:1.\" + --redefineSignalPOIs muV,muf,CMS_zz4l_fai1 --setParameterRanges muV=0.0,4.0:muf=0.0,10.0.:CMS_zz4l_fai1=-0.0005,0.0005 --setParameters muV=1.,CMS_zz4l_fai1=0.,muf=1. --robustFit=1 --setRobustFitAlgo=Minuit2,Migrad --X-rtd FITTER_NEW_CROSSING_ALGO --setRobustFitTolerance=0.1 --X-rtd FITTER_NEVER_GIVE_UP --X-rtd FITTER_BOUND --cminFallbackAlgo \"Minuit2,0:1.\" --saveInactivePOI 1 --saveWorkspace" + }, + "ALT_L1Zg":{ + "pois":"CMS_zz4l_fai1", + "fits":"profile1D:syst:all+bestfit:syst:all", + "points":"61:1+:", + "fit_opts":"--redefineSignalPOIs muV,muf,CMS_zz4l_fai1 --saveSpecifiedNuis all --saveInactivePOI 1 --setParameterRanges muV=0.0,4.0:muf=0.0,10.0:CMS_zz4l_fai1=-0.005,0.005 --setParameters muV=1.,CMS_zz4l_fai1=0.,muf=1. --robustFit=1 --setRobustFitAlgo=Minuit2,Migrad --X-rtd FITTER_NEW_CROSSING_ALGO --setRobustFitTolerance=0.1 --X-rtd FITTER_NEVER_GIVE_UP --X-rtd FITTER_BOUND --cminFallbackAlgo \"Minuit2,0:1.\" + --redefineSignalPOIs muV,muf,CMS_zz4l_fai1 --setParameterRanges muV=0.0,4.0:muf=0.0,10.0:CMS_zz4l_fai1=-0.001,0.001 --setParameters muV=1.,CMS_zz4l_fai1=0.,muf=1. --robustFit=1 --setRobustFitAlgo=Minuit2,Migrad --X-rtd FITTER_NEW_CROSSING_ALGO --setRobustFitTolerance=0.1 --X-rtd FITTER_NEVER_GIVE_UP --X-rtd FITTER_BOUND --cminFallbackAlgo \"Minuit2,0:1.\" --saveInactivePOI 1 --saveWorkspace" } } diff --git a/Combine/models.py b/Combine/models.py index 19dd050c..5c8b1d37 100644 --- a/Combine/models.py +++ b/Combine/models.py @@ -1,244 +1,271 @@ models = { - "mu_inclusive":"", + "mu_inclusive":"", + + "mu":"-P HiggsAnalysis.CombinedLimit.PhysicsModel:multiSignalModel \ + --PO \"map=.*/ggH.*hgg:r_ggH[1,0,3]\" \ + --PO \"map=.*/qqH.*hgg:r_VBF[1,0,3]\" \ + --PO \"map=.*/ttH.*hgg:r_top[1,-1,2]\" \ + --PO \"map=.*/ZH.*hgg:r_VH[1,-1,2]\" \ + --PO \"map=.*/WH.*hgg:r_VH[1,-1,2]\"", - "mu":"-P HiggsAnalysis.CombinedLimit.PhysicsModel:multiSignalModel \ ---PO \"map=.*/ggH.*:r_ggH[1,0,2]\" \ ---PO \"map=.*/bbH.*:r_ggH[1,0,2]\" \ ---PO \"map=.*/qqH.*:r_VBF[1,0,3]\" \ ---PO \"map=.*/WH_had.*:r_VH[1,0,3]\" \ ---PO \"map=.*/ZH_had.*:r_VH[1,0,3]\" \ ---PO \"map=.*/ggZH_had.*:r_VH[1,0,3]\" \ ---PO \"map=.*/WH_lep.*:r_VH[1,0,3]\" \ ---PO \"map=.*/ZH_lep.*:r_VH[1,0,3]\" \ ---PO \"map=.*/ggZH_ll.*:r_VH[1,0,3]\" \ ---PO \"map=.*/ggZH_nunu.*:r_VH[1,0,3]\" \ ---PO \"map=.*/ttH.*:r_top[1,0,3]\" \ ---PO \"map=.*/tHq.*:r_top[1,0,3]\" \ ---PO \"map=.*/tHW.*:r_top[1,0,3]\"", + "cp":"-P HiggsAnalysis.CombinedLimit.HiggsJPC:twoHypothesisHiggs \ + --PO=muFloating", - "stage0":"-P HiggsAnalysis.CombinedLimit.PhysicsModel:multiSignalModel \ ---PO \"map=.*/ggH.*:r_ggH[1,0,2]\" \ ---PO \"map=.*/bbH.*:r_ggH[1,0,2]\" \ ---PO \"map=.*/qqH.*:r_qqH[1,0,3]\" \ ---PO \"map=.*/WH_had.*:r_qqH[1,0,3]\" \ ---PO \"map=.*/ZH_had.*:r_qqH[1,0,3]\" \ ---PO \"map=.*/ggZH_had.*:r_ggH[1,0,2]\" \ ---PO \"map=.*/WH_lep.*:r_WH_lep[1,0,5]\" \ ---PO \"map=.*/ZH_lep.*:r_ZH_lep[1,0,5]\" \ ---PO \"map=.*/ggZH_ll.*:r_ZH_lep[1,0,5]\" \ ---PO \"map=.*/ggZH_nunu.*:r_ZH_lep[1,0,5]\" \ ---PO \"map=.*/ttH.*:r_ttH[1,0,3]\" \ ---PO \"map=.*/tHq.*:r_tH[1,0,15]\" \ ---PO \"map=.*/tHW.*:r_tH[1,0,15]\"", + "ALT_0M":"-P HiggsAnalysis.CombinedLimit.FA3_Interference_JHU_rw_MengsMuV:FA3_Interference_JHU_rw_MengsMuV \ + --PO altSignal=ALT_0M", - "stage1p2_maximal":"-P HiggsAnalysis.CombinedLimit.PhysicsModel:multiSignalModel \ ---PO \"map=.*/ggH_0J_PTH_0_10.*:r_ggH_0J_low[1,0,4]\" \ ---PO \"map=.*/ggZH_had_0J_PTH_0_10.*:r_ggH_0J_low[1,0,4]\" \ ---PO \"map=RECO_0J_PTH_0_10_Tag.*/bbH.*:r_ggH_0J_low[1,0,4]\" \ ---PO \"map=.*/ggH_0J_PTH_GT10.*:r_ggH_0J_high[1,0,2]\" \ ---PO \"map=.*/ggZH_had_0J_PTH_GT10.*:r_ggH_0J_high[1,0,2]\" \ ---PO \"map=RECO_0J_PTH_GT10_Tag.*/bbH.*:r_ggH_0J_high[1,0,2]\" \ ---PO \"map=.*/ggH_1J_PTH_0_60.*:r_ggH_1J_low[1,0,4]\" \ ---PO \"map=.*/ggZH_had_1J_PTH_0_60.*:r_ggH_1J_low[1,0,4]\" \ ---PO \"map=RECO_1J_PTH_0_60_Tag.*/bbH.*:r_ggH_1J_low[1,0,4]\" \ ---PO \"map=.*/ggH_1J_PTH_60_120.*:r_ggH_1J_med[1,0,4]\" \ ---PO \"map=.*/ggZH_had_1J_PTH_60_120.*:r_ggH_1J_med[1,0,4]\" \ ---PO \"map=RECO_1J_PTH_60_120_Tag.*/bbH.*:r_ggH_1J_med[1,0,4]\" \ ---PO \"map=.*/ggH_1J_PTH_120_200.*:r_ggH_1J_high[1,0,4]\" \ ---PO \"map=.*/ggZH_had_1J_PTH_120_200.*:r_ggH_1J_high[1,0,4]\" \ ---PO \"map=RECO_1J_PTH_120_200_Tag.*/bbH.*:r_ggH_1J_high[1,0,4]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_0_60.*:r_ggH_2J_low[1,0,4]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_0_60.*:r_ggH_2J_low[1,0,4]\" \ ---PO \"map=RECO_GE2J_PTH_0_60_Tag.*/bbH.*:r_ggH_2J_low[1,0,4]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_60_120.*:r_ggH_2J_med[1,0,4]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_60_120.*:r_ggH_2J_med[1,0,4]\" \ ---PO \"map=RECO_GE2J_PTH_60_120_Tag.*/bbH.*:r_ggH_2J_med[1,0,4]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_120_200.*:r_ggH_2J_high[1,0,4]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_120_200.*:r_ggH_2J_high[1,0,4]\" \ ---PO \"map=RECO_GE2J_PTH_120_200_Tag.*/bbH.*:r_ggH_2J_high[1,0,4]\" \ ---PO \"map=.*/ggH_PTH_.*:r_ggH_BSM[1,0,4]\" \ ---PO \"map=.*/ggZH_had_PTH_.*:r_ggH_BSM[1,0,4]\" \ ---PO \"map=RECO_PTH.*/bbH.*:r_ggH_BSM[1,0,4]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_350_700_.*.*:r_ggH_VBFlike[1,0,6]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_350_700_.*.*:r_ggH_VBFlike[1,0,6]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_GT700_.*.*:r_ggH_VBFlike[1,0,6]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_GT700_.*.*:r_ggH_VBFlike[1,0,6]\" \ ---PO \"map=.*/qqH_GE2J_MJJ_350_700_PTH_0_200_.*:r_qqH_VBFlike[1,0,3]\" \ ---PO \"map=.*/qqH_GE2J_MJJ_GT700_PTH_0_200_.*:r_qqH_VBFlike[1,0,3]\" \ ---PO \"map=.*/WH_had_GE2J_MJJ_350_700_PTH_0_200_.*:r_qqH_VBFlike[1,0,3]\" \ ---PO \"map=.*/WH_had_GE2J_MJJ_GT700_PTH_0_200_.*:r_qqH_VBFlike[1,0,3]\" \ ---PO \"map=.*/ZH_had_GE2J_MJJ_350_700_PTH_0_200_.*:r_qqH_VBFlike[1,0,3]\" \ ---PO \"map=.*/ZH_had_GE2J_MJJ_GT700_PTH_0_200_.*:r_qqH_VBFlike[1,0,3]\" \ ---PO \"map=.*/qqH_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ ---PO \"map=.*/WH_had_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ ---PO \"map=.*/ZH_had_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ ---PO \"map=.*/qqH_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ ---PO \"map=.*/WH_had_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ ---PO \"map=.*/ZH_had_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ ---PO \"map=.*/WH_lep.*hgg:r_WH_lep[1,0,6]\" \ ---PO \"map=.*/ZH_lep.*hgg:r_ZH_lep[1,0,6]\" \ ---PO \"map=.*/ggZH_ll.*hgg:r_ZH_lep[1,0,6]\" \ ---PO \"map=.*/ggZH_nunu.*hgg:r_ZH_lep[1,0,6]\" \ ---PO \"map=.*/ttH.*hgg:r_ttH[1,0,3]\" \ ---PO \"map=.*/tHq.*hgg:r_tH[1,0,15]\" \ ---PO \"map=.*/tHW.*hgg:r_tH[1,0,15]\"", + "ALT_0PH":"-P HiggsAnalysis.CombinedLimit.FA2_Interference_JHU_rw_MengsMuV:FA2_Interference_JHU_rw_MengsMuV \ + --PO altSignal=ALT_0PH", - "stage1p2_minimal":"-P HiggsAnalysis.CombinedLimit.PhysicsModel:multiSignalModel \ ---PO \"map=.*/ggH_0J_PTH_0_10.*:r_ggH_0J_low[1,0,4]\" \ ---PO \"map=.*/ggZH_had_0J_PTH_0_10.*:r_ggH_0J_low[1,0,4]\" \ ---PO \"map=RECO_0J_PTH_0_10_Tag.*/bbH.*:r_ggH_0J_low[1,0,4]\" \ ---PO \"map=.*/ggH_0J_PTH_GT10.*:r_ggH_0J_high[1,0,2]\" \ ---PO \"map=.*/ggZH_had_0J_PTH_GT10.*:r_ggH_0J_high[1,0,2]\" \ ---PO \"map=RECO_0J_PTH_GT10_Tag.*/bbH.*:r_ggH_0J_high[1,0,2]\" \ ---PO \"map=.*/ggH_1J_PTH_0_60.*:r_ggH_1J_low[1,0,4]\" \ ---PO \"map=.*/ggZH_had_1J_PTH_0_60.*:r_ggH_1J_low[1,0,4]\" \ ---PO \"map=RECO_1J_PTH_0_60_Tag.*/bbH.*:r_ggH_1J_low[1,0,4]\" \ ---PO \"map=.*/ggH_1J_PTH_60_120.*:r_ggH_1J_med[1,0,4]\" \ ---PO \"map=.*/ggZH_had_1J_PTH_60_120.*:r_ggH_1J_med[1,0,4]\" \ ---PO \"map=RECO_1J_PTH_60_120_Tag.*/bbH.*:r_ggH_1J_med[1,0,4]\" \ ---PO \"map=.*/ggH_1J_PTH_120_200.*:r_ggH_1J_high[1,0,4]\" \ ---PO \"map=.*/ggZH_had_1J_PTH_120_200.*:r_ggH_1J_high[1,0,4]\" \ ---PO \"map=RECO_1J_PTH_120_200_Tag.*/bbH.*:r_ggH_1J_high[1,0,4]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_0_60.*:r_ggH_2J_low[1,0,4]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_0_60.*:r_ggH_2J_low[1,0,4]\" \ ---PO \"map=RECO_GE2J_PTH_0_60_Tag.*/bbH.*:r_ggH_2J_low[1,0,4]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_60_120.*:r_ggH_2J_med[1,0,4]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_60_120.*:r_ggH_2J_med[1,0,4]\" \ ---PO \"map=RECO_GE2J_PTH_60_120_Tag.*/bbH.*:r_ggH_2J_med[1,0,4]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_120_200.*:r_ggH_2J_high[1,0,4]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_120_200.*:r_ggH_2J_high[1,0,4]\" \ ---PO \"map=RECO_GE2J_PTH_120_200_Tag.*/bbH.*:r_ggH_2J_high[1,0,4]\" \ ---PO \"map=.*/ggH_PTH_200_300.*:r_ggH_BSM_low[1,0,4]\" \ ---PO \"map=.*/ggZH_had_PTH_200_300.*:r_ggH_BSM_low[1,0,4]\" \ ---PO \"map=RECO_PTH_200_300_Tag.*/bbH.*:r_ggH_BSM_low[1,0,4]\" \ ---PO \"map=.*/ggH_PTH_300_450.*:r_ggH_BSM_high[1,0,4]\" \ ---PO \"map=.*/ggZH_had_PTH_300_450.*:r_ggH_BSM_high[1,0,4]\" \ ---PO \"map=RECO_PTH_300_450_Tag.*/bbH.*:r_ggH_BSM_high[1,0,4]\" \ ---PO \"map=.*/ggH_PTH_450_650.*:r_ggH_BSM_high[1,0,4]\" \ ---PO \"map=.*/ggZH_had_PTH_450_650.*:r_ggH_BSM_high[1,0,4]\" \ ---PO \"map=RECO_PTH_450_650_Tag.*/bbH.*:r_ggH_BSM_high[1,0,4]\" \ ---PO \"map=.*/ggH_PTH_GT650.*:r_ggH_BSM_high[1,0,4]\" \ ---PO \"map=.*/ggZH_had_PTH_GT650.*:r_ggH_BSM_high[1,0,4]\" \ ---PO \"map=RECO_PTH_GT650_Tag.*/bbH.*:r_ggH_BSM_high[1,0,4]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ ---PO \"map=.*/qqH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/qqH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ ---PO \"map=.*/qqH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/qqH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ ---PO \"map=.*/WH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/WH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ ---PO \"map=.*/WH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/WH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ ---PO \"map=.*/ZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/ZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ ---PO \"map=.*/ZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/ZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ ---PO \"map=.*/qqH_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ ---PO \"map=.*/WH_had_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ ---PO \"map=.*/ZH_had_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ ---PO \"map=.*/qqH_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ ---PO \"map=.*/WH_had_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ ---PO \"map=.*/ZH_had_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ ---PO \"map=.*/WH_lep_PTV_0_75.*hgg:r_WH_lep_low[1,0,6]\" \ ---PO \"map=.*/WH_lep_PTV_75_150.*hgg:r_WH_lep_high[1,0,6]\" \ ---PO \"map=.*/WH_lep_PTV_150_250.*hgg:r_WH_lep_high[1,0,6]\" \ ---PO \"map=.*/WH_lep_PTV_GT250.*hgg:r_WH_lep_high[1,0,6]\" \ ---PO \"map=.*/ZH_lep.*hgg:r_ZH_lep[1,0,6]\" \ ---PO \"map=.*/ggZH_ll.*hgg:r_ZH_lep[1,0,6]\" \ ---PO \"map=.*/ggZH_nunu.*hgg:r_ZH_lep[1,0,6]\" \ ---PO \"map=.*/ttH_PTH_0_60.*hgg:r_ttH_low[1,0,5]\" \ ---PO \"map=.*/ttH_PTH_60_120.*hgg:r_ttH_medlow[1,0,3]\" \ ---PO \"map=.*/ttH_PTH_120_200.*hgg:r_ttH_medhigh[1,0,4]\" \ ---PO \"map=.*/ttH_PTH_200_300.*hgg:r_ttH_high[1,0,5]\" \ ---PO \"map=.*/ttH_PTH_GT300.*hgg:r_ttH_high[1,0,5]\" \ ---PO \"map=.*/tHq.*hgg:r_tH[1,0,15]\" \ ---PO \"map=.*/tHW.*hgg:r_tH[1,0,15]\"", + "ALT_L1":"-P HiggsAnalysis.CombinedLimit.FL1_Interference_JHU_rw_MengsMuV:FL1_Interference_JHU_rw_MengsMuV \ + --PO \"altSignal=ALT_L1\"", - "stage1p2_extended":"-P HiggsAnalysis.CombinedLimit.PhysicsModel:multiSignalModel \ ---PO \"map=.*/ggH_0J_PTH_0_10.*:r_ggH_0J_low[1,0,4]\" \ ---PO \"map=.*/ggZH_had_0J_PTH_0_10.*:r_ggH_0J_low[1,0,4]\" \ ---PO \"map=RECO_0J_PTH_0_10_Tag.*/bbH.*:r_ggH_0J_low[1,0,4]\" \ ---PO \"map=.*/ggH_0J_PTH_GT10.*:r_ggH_0J_high[1,0,2]\" \ ---PO \"map=.*/ggZH_had_0J_PTH_GT10.*:r_ggH_0J_high[1,0,2]\" \ ---PO \"map=RECO_0J_PTH_GT10_Tag.*/bbH.*:r_ggH_0J_high[1,0,2]\" \ ---PO \"map=.*/ggH_1J_PTH_0_60.*:r_ggH_1J_low[1,0,4]\" \ ---PO \"map=.*/ggZH_had_1J_PTH_0_60.*:r_ggH_1J_low[1,0,4]\" \ ---PO \"map=RECO_1J_PTH_0_60_Tag.*/bbH.*:r_ggH_1J_low[1,0,4]\" \ ---PO \"map=.*/ggH_1J_PTH_60_120.*:r_ggH_1J_med[1,0,4]\" \ ---PO \"map=.*/ggZH_had_1J_PTH_60_120.*:r_ggH_1J_med[1,0,4]\" \ ---PO \"map=RECO_1J_PTH_60_120_Tag.*/bbH.*:r_ggH_1J_med[1,0,4]\" \ ---PO \"map=.*/ggH_1J_PTH_120_200.*:r_ggH_1J_high[1,0,4]\" \ ---PO \"map=.*/ggZH_had_1J_PTH_120_200.*:r_ggH_1J_high[1,0,4]\" \ ---PO \"map=RECO_1J_PTH_120_200_Tag.*/bbH.*:r_ggH_1J_high[1,0,4]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_0_60.*:r_ggH_2J_low[1,0,4]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_0_60.*:r_ggH_2J_low[1,0,4]\" \ ---PO \"map=RECO_GE2J_PTH_0_60_Tag.*/bbH.*:r_ggH_2J_low[1,0,4]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_60_120.*:r_ggH_2J_med[1,0,4]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_60_120.*:r_ggH_2J_med[1,0,4]\" \ ---PO \"map=RECO_GE2J_PTH_60_120_Tag.*/bbH.*:r_ggH_2J_med[1,0,4]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_120_200.*:r_ggH_2J_high[1,0,4]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_120_200.*:r_ggH_2J_high[1,0,4]\" \ ---PO \"map=RECO_GE2J_PTH_120_200_Tag.*/bbH.*:r_ggH_2J_high[1,0,4]\" \ ---PO \"map=.*/ggH_PTH_200_300.*:r_ggH_BSM_low[1,0,4]\" \ ---PO \"map=.*/ggZH_had_PTH_200_300.*:r_ggH_BSM_low[1,0,4]\" \ ---PO \"map=RECO_PTH_200_300_Tag.*/bbH.*:r_ggH_BSM_low[1,0,4]\" \ ---PO \"map=.*/ggH_PTH_300_450.*:r_ggH_BSM_med[1,0,4]\" \ ---PO \"map=.*/ggZH_had_PTH_300_450.*:r_ggH_BSM_med[1,0,4]\" \ ---PO \"map=RECO_PTH_300_450_Tag.*/bbH.*:r_ggH_BSM_med[1,0,4]\" \ ---PO \"map=.*/ggH_PTH_450_650.*:r_ggH_BSM_high[1,0,6]\" \ ---PO \"map=.*/ggZH_had_PTH_450_650.*:r_ggH_BSM_high[1,0,6]\" \ ---PO \"map=RECO_PTH_450_650_Tag.*/bbH.*:r_ggH_BSM_high[1,0,6]\" \ ---PO \"map=.*/ggH_PTH_GT650.*:r_ggH_BSM_high[1,0,6]\" \ ---PO \"map=.*/ggZH_had_PTH_GT650.*:r_ggH_BSM_high[1,0,6]\" \ ---PO \"map=RECO_PTH_GT650_Tag.*/bbH.*:r_ggH_BSM_high[1,0,6]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/ggH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ ---PO \"map=.*/ggZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ ---PO \"map=.*/qqH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/qqH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ ---PO \"map=.*/qqH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/qqH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ ---PO \"map=.*/WH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/WH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ ---PO \"map=.*/WH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/WH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ ---PO \"map=.*/ZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/ZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ ---PO \"map=.*/ZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ ---PO \"map=.*/ZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ ---PO \"map=.*/qqH_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ ---PO \"map=.*/WH_had_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ ---PO \"map=.*/ZH_had_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ ---PO \"map=.*/qqH_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ ---PO \"map=.*/WH_had_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ ---PO \"map=.*/ZH_had_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ ---PO \"map=.*/WH_lep_PTV_0_75.*hgg:r_WH_lep_low[1,0,6]\" \ ---PO \"map=.*/WH_lep_PTV_75_150.*hgg:r_WH_lep_high[1,0,6]\" \ ---PO \"map=.*/WH_lep_PTV_150_250.*hgg:r_WH_lep_high[1,0,6]\" \ ---PO \"map=.*/WH_lep_PTV_GT250.*hgg:r_WH_lep_high[1,0,6]\" \ ---PO \"map=.*/ZH_lep.*hgg:r_ZH_lep[1,0,6]\" \ ---PO \"map=.*/ggZH_ll.*hgg:r_ZH_lep[1,0,6]\" \ ---PO \"map=.*/ggZH_nunu.*hgg:r_ZH_lep[1,0,6]\" \ ---PO \"map=.*/ttH_PTH_0_60.*hgg:r_ttH_low[1,0,5]\" \ ---PO \"map=.*/ttH_PTH_60_120.*hgg:r_ttH_medlow[1,0,3]\" \ ---PO \"map=.*/ttH_PTH_120_200.*hgg:r_ttH_medhigh[1,0,4]\" \ ---PO \"map=.*/ttH_PTH_200_300.*hgg:r_ttH_high[1,0,5]\" \ ---PO \"map=.*/ttH_PTH_GT300.*hgg:r_ttH_high[1,0,5]\" \ ---PO \"map=.*/tHq.*hgg:r_tH[1,0,15]\" \ ---PO \"map=.*/tHW.*hgg:r_tH[1,0,15]\"", + "ALT_L1Zg":"-P HiggsAnalysis.CombinedLimit.FL1Zg_Interference_JHU_rw_MengsMuV:FL1Zg_Interference_JHU_rw_MengsMuV \ + --PO altSignal=ALT_L1Zg", + "xsec":"-P HiggsAnalysis.CombinedLimit.PhysicsModel:multiSignalModel \ + --PO \"map=.*/ggH.*:r_ggH[1,0,2]\" \ + --PO \"map=.*/bbH.*:r_ggH[1,0,2]\" \ + --PO \"map=.*/qqH.*:r_VBF[1,0,3]\" \ + --PO \"map=.*/zh.*:r_VH[1,0,3]\" \ + --PO \"map=.*/wh_plus.*:r_VH[1,0,3]\" \ + --PO \"map=.*/wh_minus.*:r_VH[1,0,3]\" \ + --PO \"map=.*/ZH_had.*:r_VH[1,0,3]\" \ + --PO \"map=.*/ggZH_had.*:r_VH[1,0,3]\" \ + --PO \"map=.*/WPLUSH_lep.*:r_VH[1,0,3]\" \ + --PO \"map=.*/WMINUSH_lep.*:r_VH[1,0,3]\" \ + --PO \"map=.*/ZH_lep.*:r_VH[1,0,3]\" \ + --PO \"map=.*/ZH.*:r_VH[1,0,3]\" \ + --PO \"map=.*/WH.*:r_VH[1,0,3]\" \ + --PO \"map=.*/ggZH_ll.*:r_VH[1,0,3]\" \ + --PO \"map=.*/ggZH_nunu.*:r_VH[1,0,3]\" \ + --PO \"map=.*/ttH.*:r_top[1,0,3]\" \ + --PO \"map=.*/tHq.*:r_top[1,0,3]\" \ + --PO \"map=.*/tHW.*:r_top[1,0,3]\"", + + "stage0":"-P HiggsAnalysis.CombinedLimit.PhysicsModel:multiSignalModel \ + --PO \"map=.*/ggH.*:r_ggH[1,0,2]\" \ + --PO \"map=.*/bbH.*:r_ggH[1,0,2]\" \ + --PO \"map=.*/qqH.*:r_qqH[1,0,3]\" \ + --PO \"map=.*/WH_had.*:r_qqH[1,0,3]\" \ + --PO \"map=.*/ZH_had.*:r_qqH[1,0,3]\" \ + --PO \"map=.*/ggZH_had.*:r_ggH[1,0,2]\" \ + --PO \"map=.*/WH_lep.*:r_WH_lep[1,0,5]\" \ + --PO \"map=.*/ZH_lep.*:r_ZH_lep[1,0,5]\" \ + --PO \"map=.*/ggZH_ll.*:r_ZH_lep[1,0,5]\" \ + --PO \"map=.*/ggZH_nunu.*:r_ZH_lep[1,0,5]\" \ + --PO \"map=.*/ttH.*:r_ttH[1,0,3]\" \ + --PO \"map=.*/tHq.*:r_tH[1,0,15]\" \ + --PO \"map=.*/tHW.*:r_tH[1,0,15]\"", + + "stage1p2_maximal":"-P HiggsAnalysis.CombinedLimit.PhysicsModel:multiSignalModel \ + --PO \"map=.*/ggH_0J_PTH_0_10.*:r_ggH_0J_low[1,0,4]\" \ + --PO \"map=.*/ggZH_had_0J_PTH_0_10.*:r_ggH_0J_low[1,0,4]\" \ + --PO \"map=RECO_0J_PTH_0_10_Tag.*/bbH.*:r_ggH_0J_low[1,0,4]\" \ + --PO \"map=.*/ggH_0J_PTH_GT10.*:r_ggH_0J_high[1,0,2]\" \ + --PO \"map=.*/ggZH_had_0J_PTH_GT10.*:r_ggH_0J_high[1,0,2]\" \ + --PO \"map=RECO_0J_PTH_GT10_Tag.*/bbH.*:r_ggH_0J_high[1,0,2]\" \ + --PO \"map=.*/ggH_1J_PTH_0_60.*:r_ggH_1J_low[1,0,4]\" \ + --PO \"map=.*/ggZH_had_1J_PTH_0_60.*:r_ggH_1J_low[1,0,4]\" \ + --PO \"map=RECO_1J_PTH_0_60_Tag.*/bbH.*:r_ggH_1J_low[1,0,4]\" \ + --PO \"map=.*/ggH_1J_PTH_60_120.*:r_ggH_1J_med[1,0,4]\" \ + --PO \"map=.*/ggZH_had_1J_PTH_60_120.*:r_ggH_1J_med[1,0,4]\" \ + --PO \"map=RECO_1J_PTH_60_120_Tag.*/bbH.*:r_ggH_1J_med[1,0,4]\" \ + --PO \"map=.*/ggH_1J_PTH_120_200.*:r_ggH_1J_high[1,0,4]\" \ + --PO \"map=.*/ggZH_had_1J_PTH_120_200.*:r_ggH_1J_high[1,0,4]\" \ + --PO \"map=RECO_1J_PTH_120_200_Tag.*/bbH.*:r_ggH_1J_high[1,0,4]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_0_60.*:r_ggH_2J_low[1,0,4]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_0_60.*:r_ggH_2J_low[1,0,4]\" \ + --PO \"map=RECO_GE2J_PTH_0_60_Tag.*/bbH.*:r_ggH_2J_low[1,0,4]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_60_120.*:r_ggH_2J_med[1,0,4]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_60_120.*:r_ggH_2J_med[1,0,4]\" \ + --PO \"map=RECO_GE2J_PTH_60_120_Tag.*/bbH.*:r_ggH_2J_med[1,0,4]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_120_200.*:r_ggH_2J_high[1,0,4]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_120_200.*:r_ggH_2J_high[1,0,4]\" \ + --PO \"map=RECO_GE2J_PTH_120_200_Tag.*/bbH.*:r_ggH_2J_high[1,0,4]\" \ + --PO \"map=.*/ggH_PTH_.*:r_ggH_BSM[1,0,4]\" \ + --PO \"map=.*/ggZH_had_PTH_.*:r_ggH_BSM[1,0,4]\" \ + --PO \"map=RECO_PTH.*/bbH.*:r_ggH_BSM[1,0,4]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_350_700_.*.*:r_ggH_VBFlike[1,0,6]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_350_700_.*.*:r_ggH_VBFlike[1,0,6]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_GT700_.*.*:r_ggH_VBFlike[1,0,6]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_GT700_.*.*:r_ggH_VBFlike[1,0,6]\" \ + --PO \"map=.*/qqH_GE2J_MJJ_350_700_PTH_0_200_.*:r_qqH_VBFlike[1,0,3]\" \ + --PO \"map=.*/qqH_GE2J_MJJ_GT700_PTH_0_200_.*:r_qqH_VBFlike[1,0,3]\" \ + --PO \"map=.*/WH_had_GE2J_MJJ_350_700_PTH_0_200_.*:r_qqH_VBFlike[1,0,3]\" \ + --PO \"map=.*/WH_had_GE2J_MJJ_GT700_PTH_0_200_.*:r_qqH_VBFlike[1,0,3]\" \ + --PO \"map=.*/ZH_had_GE2J_MJJ_350_700_PTH_0_200_.*:r_qqH_VBFlike[1,0,3]\" \ + --PO \"map=.*/ZH_had_GE2J_MJJ_GT700_PTH_0_200_.*:r_qqH_VBFlike[1,0,3]\" \ + --PO \"map=.*/qqH_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ + --PO \"map=.*/WH_had_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ + --PO \"map=.*/ZH_had_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ + --PO \"map=.*/qqH_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ + --PO \"map=.*/WH_had_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ + --PO \"map=.*/ZH_had_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ + --PO \"map=.*/WH_lep.*hgg:r_WH_lep[1,0,6]\" \ + --PO \"map=.*/ZH_lep.*hgg:r_ZH_lep[1,0,6]\" \ + --PO \"map=.*/ggZH_ll.*hgg:r_ZH_lep[1,0,6]\" \ + --PO \"map=.*/ggZH_nunu.*hgg:r_ZH_lep[1,0,6]\" \ + --PO \"map=.*/ttH.*hgg:r_ttH[1,0,3]\" \ + --PO \"map=.*/tHq.*hgg:r_tH[1,0,15]\" \ + --PO \"map=.*/tHW.*hgg:r_tH[1,0,15]\"", + + "stage1p2_minimal":"-P HiggsAnalysis.CombinedLimit.PhysicsModel:multiSignalModel \ + --PO \"map=.*/ggH_0J_PTH_0_10.*:r_ggH_0J_low[1,0,4]\" \ + --PO \"map=.*/ggZH_had_0J_PTH_0_10.*:r_ggH_0J_low[1,0,4]\" \ + --PO \"map=RECO_0J_PTH_0_10_Tag.*/bbH.*:r_ggH_0J_low[1,0,4]\" \ + --PO \"map=.*/ggH_0J_PTH_GT10.*:r_ggH_0J_high[1,0,2]\" \ + --PO \"map=.*/ggZH_had_0J_PTH_GT10.*:r_ggH_0J_high[1,0,2]\" \ + --PO \"map=RECO_0J_PTH_GT10_Tag.*/bbH.*:r_ggH_0J_high[1,0,2]\" \ + --PO \"map=.*/ggH_1J_PTH_0_60.*:r_ggH_1J_low[1,0,4]\" \ + --PO \"map=.*/ggZH_had_1J_PTH_0_60.*:r_ggH_1J_low[1,0,4]\" \ + --PO \"map=RECO_1J_PTH_0_60_Tag.*/bbH.*:r_ggH_1J_low[1,0,4]\" \ + --PO \"map=.*/ggH_1J_PTH_60_120.*:r_ggH_1J_med[1,0,4]\" \ + --PO \"map=.*/ggZH_had_1J_PTH_60_120.*:r_ggH_1J_med[1,0,4]\" \ + --PO \"map=RECO_1J_PTH_60_120_Tag.*/bbH.*:r_ggH_1J_med[1,0,4]\" \ + --PO \"map=.*/ggH_1J_PTH_120_200.*:r_ggH_1J_high[1,0,4]\" \ + --PO \"map=.*/ggZH_had_1J_PTH_120_200.*:r_ggH_1J_high[1,0,4]\" \ + --PO \"map=RECO_1J_PTH_120_200_Tag.*/bbH.*:r_ggH_1J_high[1,0,4]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_0_60.*:r_ggH_2J_low[1,0,4]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_0_60.*:r_ggH_2J_low[1,0,4]\" \ + --PO \"map=RECO_GE2J_PTH_0_60_Tag.*/bbH.*:r_ggH_2J_low[1,0,4]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_60_120.*:r_ggH_2J_med[1,0,4]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_60_120.*:r_ggH_2J_med[1,0,4]\" \ + --PO \"map=RECO_GE2J_PTH_60_120_Tag.*/bbH.*:r_ggH_2J_med[1,0,4]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_120_200.*:r_ggH_2J_high[1,0,4]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_120_200.*:r_ggH_2J_high[1,0,4]\" \ + --PO \"map=RECO_GE2J_PTH_120_200_Tag.*/bbH.*:r_ggH_2J_high[1,0,4]\" \ + --PO \"map=.*/ggH_PTH_200_300.*:r_ggH_BSM_low[1,0,4]\" \ + --PO \"map=.*/ggZH_had_PTH_200_300.*:r_ggH_BSM_low[1,0,4]\" \ + --PO \"map=RECO_PTH_200_300_Tag.*/bbH.*:r_ggH_BSM_low[1,0,4]\" \ + --PO \"map=.*/ggH_PTH_300_450.*:r_ggH_BSM_high[1,0,4]\" \ + --PO \"map=.*/ggZH_had_PTH_300_450.*:r_ggH_BSM_high[1,0,4]\" \ + --PO \"map=RECO_PTH_300_450_Tag.*/bbH.*:r_ggH_BSM_high[1,0,4]\" \ + --PO \"map=.*/ggH_PTH_450_650.*:r_ggH_BSM_high[1,0,4]\" \ + --PO \"map=.*/ggZH_had_PTH_450_650.*:r_ggH_BSM_high[1,0,4]\" \ + --PO \"map=RECO_PTH_450_650_Tag.*/bbH.*:r_ggH_BSM_high[1,0,4]\" \ + --PO \"map=.*/ggH_PTH_GT650.*:r_ggH_BSM_high[1,0,4]\" \ + --PO \"map=.*/ggZH_had_PTH_GT650.*:r_ggH_BSM_high[1,0,4]\" \ + --PO \"map=RECO_PTH_GT650_Tag.*/bbH.*:r_ggH_BSM_high[1,0,4]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ + --PO \"map=.*/qqH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/qqH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ + --PO \"map=.*/qqH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/qqH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ + --PO \"map=.*/WH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/WH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ + --PO \"map=.*/WH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/WH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ + --PO \"map=.*/ZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/ZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ + --PO \"map=.*/ZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/ZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ + --PO \"map=.*/qqH_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ + --PO \"map=.*/WH_had_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ + --PO \"map=.*/ZH_had_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ + --PO \"map=.*/qqH_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ + --PO \"map=.*/WH_had_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ + --PO \"map=.*/ZH_had_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ + --PO \"map=.*/WH_lep_PTV_0_75.*hgg:r_WH_lep_low[1,0,6]\" \ + --PO \"map=.*/WH_lep_PTV_75_150.*hgg:r_WH_lep_high[1,0,6]\" \ + --PO \"map=.*/WH_lep_PTV_150_250.*hgg:r_WH_lep_high[1,0,6]\" \ + --PO \"map=.*/WH_lep_PTV_GT250.*hgg:r_WH_lep_high[1,0,6]\" \ + --PO \"map=.*/ZH_lep.*hgg:r_ZH_lep[1,0,6]\" \ + --PO \"map=.*/ggZH_ll.*hgg:r_ZH_lep[1,0,6]\" \ + --PO \"map=.*/ggZH_nunu.*hgg:r_ZH_lep[1,0,6]\" \ + --PO \"map=.*/ttH_PTH_0_60.*hgg:r_ttH_low[1,0,5]\" \ + --PO \"map=.*/ttH_PTH_60_120.*hgg:r_ttH_medlow[1,0,3]\" \ + --PO \"map=.*/ttH_PTH_120_200.*hgg:r_ttH_medhigh[1,0,4]\" \ + --PO \"map=.*/ttH_PTH_200_300.*hgg:r_ttH_high[1,0,5]\" \ + --PO \"map=.*/ttH_PTH_GT300.*hgg:r_ttH_high[1,0,5]\" \ + --PO \"map=.*/tHq.*hgg:r_tH[1,0,15]\" \ + --PO \"map=.*/tHW.*hgg:r_tH[1,0,15]\"", + + "stage1p2_extended":"-P HiggsAnalysis.CombinedLimit.PhysicsModel:multiSignalModel \ + --PO \"map=.*/ggH_0J_PTH_0_10.*:r_ggH_0J_low[1,0,4]\" \ + --PO \"map=.*/ggZH_had_0J_PTH_0_10.*:r_ggH_0J_low[1,0,4]\" \ + --PO \"map=RECO_0J_PTH_0_10_Tag.*/bbH.*:r_ggH_0J_low[1,0,4]\" \ + --PO \"map=.*/ggH_0J_PTH_GT10.*:r_ggH_0J_high[1,0,2]\" \ + --PO \"map=.*/ggZH_had_0J_PTH_GT10.*:r_ggH_0J_high[1,0,2]\" \ + --PO \"map=RECO_0J_PTH_GT10_Tag.*/bbH.*:r_ggH_0J_high[1,0,2]\" \ + --PO \"map=.*/ggH_1J_PTH_0_60.*:r_ggH_1J_low[1,0,4]\" \ + --PO \"map=.*/ggZH_had_1J_PTH_0_60.*:r_ggH_1J_low[1,0,4]\" \ + --PO \"map=RECO_1J_PTH_0_60_Tag.*/bbH.*:r_ggH_1J_low[1,0,4]\" \ + --PO \"map=.*/ggH_1J_PTH_60_120.*:r_ggH_1J_med[1,0,4]\" \ + --PO \"map=.*/ggZH_had_1J_PTH_60_120.*:r_ggH_1J_med[1,0,4]\" \ + --PO \"map=RECO_1J_PTH_60_120_Tag.*/bbH.*:r_ggH_1J_med[1,0,4]\" \ + --PO \"map=.*/ggH_1J_PTH_120_200.*:r_ggH_1J_high[1,0,4]\" \ + --PO \"map=.*/ggZH_had_1J_PTH_120_200.*:r_ggH_1J_high[1,0,4]\" \ + --PO \"map=RECO_1J_PTH_120_200_Tag.*/bbH.*:r_ggH_1J_high[1,0,4]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_0_60.*:r_ggH_2J_low[1,0,4]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_0_60.*:r_ggH_2J_low[1,0,4]\" \ + --PO \"map=RECO_GE2J_PTH_0_60_Tag.*/bbH.*:r_ggH_2J_low[1,0,4]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_60_120.*:r_ggH_2J_med[1,0,4]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_60_120.*:r_ggH_2J_med[1,0,4]\" \ + --PO \"map=RECO_GE2J_PTH_60_120_Tag.*/bbH.*:r_ggH_2J_med[1,0,4]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_0_350_PTH_120_200.*:r_ggH_2J_high[1,0,4]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_0_350_PTH_120_200.*:r_ggH_2J_high[1,0,4]\" \ + --PO \"map=RECO_GE2J_PTH_120_200_Tag.*/bbH.*:r_ggH_2J_high[1,0,4]\" \ + --PO \"map=.*/ggH_PTH_200_300.*:r_ggH_BSM_low[1,0,4]\" \ + --PO \"map=.*/ggZH_had_PTH_200_300.*:r_ggH_BSM_low[1,0,4]\" \ + --PO \"map=RECO_PTH_200_300_Tag.*/bbH.*:r_ggH_BSM_low[1,0,4]\" \ + --PO \"map=.*/ggH_PTH_300_450.*:r_ggH_BSM_med[1,0,4]\" \ + --PO \"map=.*/ggZH_had_PTH_300_450.*:r_ggH_BSM_med[1,0,4]\" \ + --PO \"map=RECO_PTH_300_450_Tag.*/bbH.*:r_ggH_BSM_med[1,0,4]\" \ + --PO \"map=.*/ggH_PTH_450_650.*:r_ggH_BSM_high[1,0,6]\" \ + --PO \"map=.*/ggZH_had_PTH_450_650.*:r_ggH_BSM_high[1,0,6]\" \ + --PO \"map=RECO_PTH_450_650_Tag.*/bbH.*:r_ggH_BSM_high[1,0,6]\" \ + --PO \"map=.*/ggH_PTH_GT650.*:r_ggH_BSM_high[1,0,6]\" \ + --PO \"map=.*/ggZH_had_PTH_GT650.*:r_ggH_BSM_high[1,0,6]\" \ + --PO \"map=RECO_PTH_GT650_Tag.*/bbH.*:r_ggH_BSM_high[1,0,6]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/ggH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ + --PO \"map=.*/ggZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ + --PO \"map=.*/qqH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/qqH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ + --PO \"map=.*/qqH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/qqH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ + --PO \"map=.*/WH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/WH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ + --PO \"map=.*/WH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/WH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ + --PO \"map=.*/ZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25.*:r_qqH_low_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/ZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25.*:r_qqH_low_mjj_high_pthjj[1,0,7]\" \ + --PO \"map=.*/ZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25.*:r_qqH_high_mjj_low_pthjj[1,0,6]\" \ + --PO \"map=.*/ZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25.*:r_qqH_high_mjj_high_pthjj[1,0,5]\" \ + --PO \"map=.*/qqH_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ + --PO \"map=.*/WH_had_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ + --PO \"map=.*/ZH_had_GE2J_.*_PTH_GT200.*:r_qqH_BSM[1,0,4]\" \ + --PO \"map=.*/qqH_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ + --PO \"map=.*/WH_had_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ + --PO \"map=.*/ZH_had_GE2J_MJJ_60_120.*:r_qqH_VHhad[1,0,6]\" \ + --PO \"map=.*/WH_lep_PTV_0_75.*hgg:r_WH_lep_low[1,0,6]\" \ + --PO \"map=.*/WH_lep_PTV_75_150.*hgg:r_WH_lep_high[1,0,6]\" \ + --PO \"map=.*/WH_lep_PTV_150_250.*hgg:r_WH_lep_high[1,0,6]\" \ + --PO \"map=.*/WH_lep_PTV_GT250.*hgg:r_WH_lep_high[1,0,6]\" \ + --PO \"map=.*/ZH_lep.*hgg:r_ZH_lep[1,0,6]\" \ + --PO \"map=.*/ggZH_ll.*hgg:r_ZH_lep[1,0,6]\" \ + --PO \"map=.*/ggZH_nunu.*hgg:r_ZH_lep[1,0,6]\" \ + --PO \"map=.*/ttH_PTH_0_60.*hgg:r_ttH_low[1,0,5]\" \ + --PO \"map=.*/ttH_PTH_60_120.*hgg:r_ttH_medlow[1,0,3]\" \ + --PO \"map=.*/ttH_PTH_120_200.*hgg:r_ttH_medhigh[1,0,4]\" \ + --PO \"map=.*/ttH_PTH_200_300.*hgg:r_ttH_high[1,0,5]\" \ + --PO \"map=.*/ttH_PTH_GT300.*hgg:r_ttH_high[1,0,5]\" \ + --PO \"map=.*/tHq.*hgg:r_tH[1,0,15]\" \ + --PO \"map=.*/tHW.*hgg:r_tH[1,0,15]\"", + "kappas_resolved":"-P HiggsAnalysis.CombinedLimit.LHCHCGModels:K1 --PO BRU=0", diff --git a/Combine/run_sequence.sh b/Combine/run_sequence.sh new file mode 100755 index 00000000..e11ac83a --- /dev/null +++ b/Combine/run_sequence.sh @@ -0,0 +1,112 @@ +outdate=`date +%F` + +STEP=0 +usage(){ + echo "Script to run fits and plots of fit output. dryRun option is for the fitting only, that can be run in batch." + echo "options:" + + echo "-h|--help) " + echo "-s|--step) " + echo "-d|--dryRun) " +} +# options may be followed by one colon to indicate they have a required argument +if ! options=$(getopt -u -o s:hd -l help,step:,dryRun -- "$@") +then +# something went wrong, getopt will put out an error message for us +exit 1 +fi +set -- $options +while [ $# -gt 0 ] +do +case $1 in +-h|--help) usage; exit 0;; +-s|--step) STEP=$2; shift ;; +-d|--dryRun) DR=$2; shift ;; +(--) shift; break;; +(-*) usage; echo "$0: error - unrecognized option $1" 1>&2; usage >> /dev/stderr; exit 1;; +(*) break;; +esac +shift +done + +DROPT="" +if [[ $DR ]]; then + DROPT=" --dryRun " +fi + +fits=("xsec" "ALT_L1" "ALT_L1Zg" "ALT_0PH" "ALT_0M") + +fits=( "ALT_0M" ) + +if [[ $STEP == "t2w" ]]; then + for fit in ${fits[*]} + do + python RunText2Workspace.py --ext $fit --mode $fit --batch condor + done +elif [[ $STEP == "fit" ]]; then + for obs in " " + # " --doObserved " + do + for fit in ${fits[*]} + do + python RunFits.py --inputJson inputs.json --ext $fit --mode $fit ${DROPT} $obs + done + done +elif [[ $STEP == "collect" ]]; then + for obs in " " + # " --doObserved " + do + for fit in ${fits[*]} + do + python CollectFits.py --inputJson inputs.json --ext $fit --mode $fit $obs + done + done +elif [[ $STEP == "plot" ]]; then + for obs in " " + #" --doObserved " + do + for fit in ${fits[*]} + do + + python PlotScans.py --inputJson inputs.json --mode $fit --ext $fit --outdir $outdate-fits $obs + done + done +elif [[ $STEP == "impacts-initial" ]]; then + for fit in ${fits[*]} + do + python RunImpacts.py --inputJson inputs.json --ext $fit --mode $fit --queue workday ${DROPT} + done +elif [[ $STEP == "impacts-scans" ]]; then + for fit in ${fits[*]} + do + python RunImpacts.py --inputJson inputs.json --ext $fit --mode $fit --doFits --queue workday ${DROPT} + done +elif [[ $STEP == "impacts-collect" ]]; then + for fit in ${fits[*]} + do + #cd runImpacts${fit}_${fit} + echo "Making JSON file for fit $fit It might take time, depending on the number of parameters..." + if [[ $fit == "xsec" ]]; then + pois=("r_ggH" "r_VBF" "r_VH" "r_top") + translate="pois_mu.json" + else + pois=("CMS_zz4l_fai1") + translate="pois_${fit}.json" + fi + for poi in ${pois[*]} + do + cd runImpacts${fit}_${fit} + #combineTool.py -M Impacts -n _bestfit_syst_${fit}_initialFit -d ../Datacard_${fit}.root -i impacts_${fit}.json -m 125.38 -o impacts_${poi}.json + echo " combineTool.py -M Impacts -n _bestfit_syst_${fit}_initialFit -d ../Datacard_${fit}.root -i impacts_${fit}.json -m 125.38 -o impacts_${poi}" + echo " ===> Producing impact plots for the *** main-only *** systematics for fit: === $fit === and POI: == $poi === " + cd - +# combineTool.py -M Impacts -n _bestfit_syst_${fit}_initialFit -d ../Datacard_${fit}.root -i impacts_${fit}.json -m 125.38 -o impacts_${poi}.json + plotImpacts.py -i runImpacts${fit}_${fit}/impacts_${poi}.json -o plot_impact/impacts_${poi}_${fit}_allpages --POI ${poi} --translate "../Plots/${translate}" + #plotImpacts.py -i impacts_${poi}.json -o ../plot_impact/impacts_${poi}_${fit}_allpages --POI ${poi} --translate "../../Plots/${translate}" + echo "plotImpacts.py -i impacts_${poi}.json -o ../plot_impact/impacts_${poi}_${fit}_all_pages --POI ${poi} --translate "../../Plots/${translate}" --max-pages "1 + done + done +else + echo "Step $STEP is not one among t2w,fit,plot. Exiting." +fi + diff --git a/Combine/run_sequence_breakdown_proc.sh b/Combine/run_sequence_breakdown_proc.sh new file mode 100755 index 00000000..165b5172 --- /dev/null +++ b/Combine/run_sequence_breakdown_proc.sh @@ -0,0 +1,122 @@ +outdate=`date +%F` + +STEP=0 +usage(){ + echo "Script to run fits and plots of fit output. dryRun option is for the fitting only, that can be run in batch." + echo "options:" + + echo "-h|--help) " + echo "-s|--step) " + echo "-d|--dryRun) " +} +# options may be followed by one colon to indicate they have a required argument +if ! options=$(getopt -u -o s:hd -l help,step:,dryRun -- "$@") +then +# something went wrong, getopt will put out an error message for us +exit 1 +fi +set -- $options +while [ $# -gt 0 ] +do +case $1 in +-h|--help) usage; exit 0;; +-s|--step) STEP=$2; shift ;; +-d|--dryRun) DR=$2; shift ;; +(--) shift; break;; +(-*) usage; echo "$0: error - unrecognized option $1" 1>&2; usage >> /dev/stderr; exit 1;; +(*) break;; +esac +shift +done + +DROPT="" +if [[ $DR ]]; then + DROPT=" --dryRun " +fi + +#fits=("ALT_L1" "ALT_L1Zg" "ALT_0PH" ) +ext2=("GGH" "TTH" "VHHAD" "VBF" "VHMET" "VHLEP" ) +ext2=( "TTH" "VHMET" "NoSyst") +fits=("ALT_0M" ) + + + + + + + +if [[ $STEP == "txt" ]];then + for fit in ${fits[*]} + do + combineCards.py Datacard_${fit}.txt --ic=".*J_.*|.*RECO_PTH.*|.*VBFLIKEGGH.*" -s > Datacard_${fit}_GGH.txt + awk '!/pdfindex/ || /.*J_.*|.*RECO_PTH.*|.*VBFLIKEGGH.*/' Datacard_${fit}_GGH.txt > test.txt && mv test.txt Datacard_${fit}_GGH.txt + + combineCards.py Datacard_${fit}.txt --ic=".*TTH.*" -s > Datacard_${fit}_TTH.txt + awk '!/pdfindex/ || /.*TTH.*/' Datacard_${fit}_TTH.txt > test.txt && mv test.txt Datacard_${fit}_TTH.txt + + combineCards.py Datacard_${fit}.txt --ic=".*VHHAD.*" -s > Datacard_${fit}_VHHAD.txt + awk '!/pdfindex/ || /.*VHHAD.*/' Datacard_${fit}_VHHAD.txt > test.txt && mv test.txt Datacard_${fit}_VHHAD.txt + + combineCards.py Datacard_${fit}.txt --ic=".*VBFTOPO_ACVBF.*|.*VBFTOPO_ACGGH.*" -s > Datacard_${fit}_VBF.txt + awk '!/pdfindex/ || /.*VBFTOPO_ACVBF.*|.*VBFTOPO_ACGGH.*/' Datacard_${fit}_VBF.txt > test.txt && mv test.txt Datacard_${fit}_VBF.txt + + combineCards.py Datacard_${fit}.txt --ic=".*VH_MET.*" -s > Datacard_${fit}_VHMET.txt + awk '!/pdfindex/ || /.*VH_MET.*/' Datacard_${fit}_VHMET.txt > test.txt && mv test.txt Datacard_${fit}_VHMET.txt + + combineCards.py Datacard_${fit}.txt --ic=".*_LEP_Tag.*" -s > Datacard_${fit}_VHLEP.txt + awk '!/pdfindex/ || /.*_LEP_Tag.*/' Datacard_${fit}_VHLEP.txt > test.txt && mv test.txt Datacard_${fit}_VHLEP.txt + + done +elif [[ $STEP == "t2w" ]]; then + for fit in ${fits[*]} + do + for ext in ${ext2[*]} + do + echo python RunText2Workspace.py --ext ${fit}_${ext} --mode ${fit} + python RunText2Workspace.py --ext ${fit}_${ext} --mode ${fit} + done + done + +elif [[ $STEP == "fit" ]]; then + +for fit in ${fits[*]} + do + for obs in " " + # " --doObserved " + do + for ext in ${ext2[*]} + do + echo python RunFits.py --inputJson inputs.json --ext ${fit}_${ext} --mode $fit ${DROPT} $obs + python RunFits.py --inputJson inputs.json --ext ${fit}_${ext} --mode ${fit} ${DROPT} $obs + done + done + done +elif [[ $STEP == "collect" ]]; then + for obs in " " + # " --doObserved " + do + for fit in ${fits[*]} + do + for ext in ${ext2[*]} + do + python CollectFits.py --inputJson inputs.json --ext ${fit}_${ext} --mode $fit $obs + done + done + done +elif [[ $STEP == "plot" ]]; then + for obs in " " + #" --doObserved " + do + for fit in ${fits[*]} + do + string="runFits${fit}_TTH_${fit}/profile1D_syst_${fit}_TTH_CMS_zz4l_fai1.root:TTH:2 runFits${fit}_VBF_${fit}/profile1D_syst_${fit}_VBF_CMS_zz4l_fai1.root:VBF:3 runFits${fit}_VHHAD_${fit}/profile1D_syst_${fit}_VHHAD_CMS_zz4l_fai1.root:VHHAD:4 runFits${fit}_VHMET_${fit}/profile1D_syst_${fit}_VHMET_CMS_zz4l_fai1.root:VH-MET:9 runFits${fit}_VHLEP_${fit}/profile1D_syst_${fit}_VHLEP_CMS_zz4l_fai1.root:VH-LEP:46" + plot1DScan.py runFits${fit}_GGH_${fit}/profile1D_syst_${fit}_GGH_CMS_zz4l_fai1.root --y-cut 4 --y-max 4 -o plots/Breakdown_${fit} --POI CMS_zz4l_fai1 --main-label GGH --translate ../Plots/pois_fa3.json --others $string + +# string="runFits${fit}_NoSyst_${fit}/profile1D_syst_${fit}_NoSyst_CMS_zz4l_fai1.root:NoSyst:2 " + # plot1DScan.py runFits${fit}_${fit}/profile1D_syst_${fit}_CMS_zz4l_fai1.root --y-cut 15 --y-max 15 -o plots/Breakdown_Syst_${fit} --POI CMS_zz4l_fai1 --main-label Syst --translate ../Plots/pois_fa3.json --others $string + done + done +else + echo "Step $STEP is not one among t2w,fit,plot. Exiting." +fi + diff --git a/Combine/run_sequence_breakdown_sum_proc.sh b/Combine/run_sequence_breakdown_sum_proc.sh new file mode 100755 index 00000000..918319ff --- /dev/null +++ b/Combine/run_sequence_breakdown_sum_proc.sh @@ -0,0 +1,108 @@ +outdate=`date +%F` + +STEP=0 +usage(){ + echo "Script to run fits and plots of fit output. dryRun option is for the fitting only, that can be run in batch." + echo "options:" + + echo "-h|--help) " + echo "-s|--step) " + echo "-d|--dryRun) " +} +# options may be followed by one colon to indicate they have a required argument +if ! options=$(getopt -u -o s:hd -l help,step:,dryRun -- "$@") +then +# something went wrong, getopt will put out an error message for us +exit 1 +fi +set -- $options +while [ $# -gt 0 ] +do +case $1 in +-h|--help) usage; exit 0;; +-s|--step) STEP=$2; shift ;; +-d|--dryRun) DR=$2; shift ;; +(--) shift; break;; +(-*) usage; echo "$0: error - unrecognized option $1" 1>&2; usage >> /dev/stderr; exit 1;; +(*) break;; +esac +shift +done + +DROPT="" +if [[ $DR ]]; then + DROPT=" --dryRun " +fi + +fits=("ALT_0M") +ext2=("GGH" "G_T" "G_T_VH" "G_T_VH_VBF" "G_T_VH_VBF_MET" "NoSyst" ) +ext2=("NoSyst" ) +ext2=("G_T" "G_T_VH_VBF_MET" "NoSyst" ) + + +if [[ $STEP == "txt" ]];then + for fit in ${fits[*]} + do + + combineCards.py Datacard_${fit}_GGH.txt Datacard_ALT_0M_TTH.txt > Datacard_${fit}_G_T.txt + + combineCards.py Datacard_${fit}_G_T.txt Datacard_ALT_0M_VHHAD.txt > Datacard_${fit}_G_T_VH.txt + + combineCards.py Datacard_${fit}_G_T_VH.txt Datacard_ALT_0M_VBF.txt > Datacard_${fit}_G_T_VH_VBF.txt + + combineCards.py Datacard_${fit}_G_T_VH_VBF.txt Datacard_ALT_0M_VHMET.txt > Datacard_${fit}_G_T_VH_VBF_MET.txt + + done +elif [[ $STEP == "t2w" ]]; then + for fit in ${fits[*]} + do + for ext in "G_T" "G_T_VH" "G_T_VH_VBF" "G_T_VH_VBF_MET" + do + echo python RunText2Workspace.py --ext ${fit}_${ext} --mode ${fit} + python RunText2Workspace.py --ext ${fit}_${ext} --mode ${fit} + done + done + +elif [[ $STEP == "fit" ]]; then + +for fit in ${fits[*]} + do + for obs in " " + # " --doObserved " + do + for ext in ${ext2[*]} + do + echo python RunFits.py --inputJson inputs.json --ext ${fit}_${ext} --mode $fit ${DROPT} $obs + python RunFits.py --inputJson inputs.json --ext ${fit}_${ext} --mode ${fit} ${DROPT} $obs + done + done + done +elif [[ $STEP == "collect" ]]; then + for obs in " " + # " --doObserved " + do + for fit in ${fits[*]} + do + for ext in ${ext2[*]} + do + python CollectFits.py --inputJson inputs.json --ext ${fit}_${ext} --mode $fit $obs + done + done + done + + +elif [[ $STEP == "plot" ]]; then + for obs in " " + #" --doObserved " + do + for fit in ${fits[*]} + do + string="runFits${fit}_G_T_${fit}/profile1D_syst_${fit}_G_T_CMS_zz4l_fai1.root:TTH:2 runFits${fit}_G_T_VH_VBF_${fit}/profile1D_syst_${fit}_G_T_VH_VBF_CMS_zz4l_fai1.root:VBF:3 runFits${fit}_G_T_VH_${fit}/profile1D_syst_${fit}_G_T_VH_CMS_zz4l_fai1.root:VHHAD:4 runFits${fit}_G_T_VH_VBF_MET_${fit}/profile1D_syst_${fit}_G_T_VH_VBF_MET_CMS_zz4l_fai1.root:VH-MET:9 runFits${fit}_NoSyst_${fit}/profile1D_syst_${fit}_NoSyst_CMS_zz4l_fai1.root:VH-LEP:46" + + plot1DScan.py runFits${fit}_GGH_${fit}/profile1D_syst_${fit}_GGH_CMS_zz4l_fai1.root --y-cut 7 --y-max 7 -o plots/Breakdown_sum_${fit} --POI CMS_zz4l_fai1 --main-label GGH --translate ../Plots/pois_fa3.json --others $string + done + done +else + echo "Step $STEP is not one among t2w,fit,plot. Exiting." +fi + diff --git a/Datacard/Read_pkl.py b/Datacard/Read_pkl.py new file mode 100644 index 00000000..8479d3fa --- /dev/null +++ b/Datacard/Read_pkl.py @@ -0,0 +1,85 @@ +import pandas as pd +import pickle +import glob +import numpy as np +from optparse import OptionParser +from collections import OrderedDict as od +import os, sys + + +def add_spaces(string): + while len(string) < 14: + string += ' ' + return string + + + + +def get_options(): + parser = OptionParser() + # Input details + parser.add_option('--cat', dest='cat', default='all', help="Category ") + parser.add_option('--dir', dest='dir', default='yields_2024-09-01_ALT_0M', help="Directory from wich read pkl") + parser.add_option('--doFile', dest='doFile', default='Yes', help="Do you want to have the output file? Yes or No") + return parser.parse_args() +(opt,args) = get_options() + +cat = opt.cat +models = ['ALT_0M'] +for m in models: + file_path = 'YIELD'+opt.dir[6:]+'_'+cat+'_'+m+'.txt' + if os.path.exists(file_path): os.remove(file_path ) + + if opt.cat == 'all' : pkl_files = glob.glob(opt.dir+'/*.pkl') + else : pkl_files = glob.glob(opt.dir+'/'+cat+'.pkl') + + + pkl_files = sorted(pkl_files) + header = 'year cat proc yield lumi nominal_yield \n' + result = [] + for pkl_file in pkl_files: + + + result_dict = {} + file_name = os.path.basename(pkl_file) + data = [] + with open(pkl_file) as f: + data.append(pickle.load(f)) + + dataFrame = pd.DataFrame() + + + + lumiMap = {'2016':36.33, '2016preVFP': 19.51, '2016postVFP': 16.80, '2017':41.48, '2018':59.83, 'combined':137.62, 'merged':137.62} + + for d in data: + #print d + df = pd.DataFrame() + + df=df.append(d["year"] ) + df=df.append(d["cat"]) + df=df.append(d["proc_s0"]) + df=df.append(d["nominal_yield"] ) + + + + dataFrame = pd.concat([dataFrame, df.T], ignore_index=True) + + + + dataFrame["lumi"] = dataFrame["year"].map(lumiMap) + dataFrame["nominal_yield"] = pd.to_numeric(dataFrame["nominal_yield"], errors='coerce') + dataFrame["nominal_yield_lumi"] = dataFrame["lumi"] * dataFrame["nominal_yield"].astype(float) + dataFrame = dataFrame.drop_duplicates() + #print(dataFrame) + if opt.doFile == 'Yes': + with open(file_path, 'a') as file: + if header != '' : + file.write(header) + file.write(dataFrame.to_string(header=False, index=False)) + file.write('\n') + header = '' + + + + diff --git a/Datacard/RunYields.py b/Datacard/RunYields.py index 8454f2ad..fad28045 100644 --- a/Datacard/RunYields.py +++ b/Datacard/RunYields.py @@ -32,8 +32,8 @@ def get_options(): parser.add_option('--doSystematics', dest='doSystematics', default=False, action="store_true", help="Include systematics calculations and add to datacard") parser.add_option('--ignore-warnings', dest='ignore_warnings', default=False, action="store_true", help="Skip errors for missing systematics. Instead output warning message") # For submission - parser.add_option('--batch', dest='batch', default='IC', help='Batch') - parser.add_option('--queue', dest='queue', default='microcentury', help='Queue: should not take long (microcentury will do)') + parser.add_option('--batch', dest='batch', default='condor', help='Batch') + parser.add_option('--queue', dest='queue', default='espresso', help='Queue: should not take long (microcentury will do)') parser.add_option('--jobOpts', dest='jobOpts', default='', help="Additional options to add to job submission. For Condor separate individual options with a colon (specify all within quotes e.g. \"option_xyz = abc+option_123 = 456\")") parser.add_option('--printOnly', dest='printOnly', default=False, action="store_true", help="Dry run: print submission files only") return parser.parse_args() @@ -71,6 +71,7 @@ def leave(): # If auto: extract cats from first input workspace dir inputWSDir0 = options['inputWSDirMap'].split(",")[0].split("=")[1] WSFileNames = extractWSFileNames(inputWSDir0) +print(inputWSDir0, WSFileNames) if options['cats'] == "auto": options['cats'] = extractListOfCats(WSFileNames) if( opt.doNOTAG )&( 'NOTAG' not in options['cats'] ): diff --git a/Datacard/cleanDatacard.py b/Datacard/cleanDatacard.py index f0edc18b..d57552e2 100644 --- a/Datacard/cleanDatacard.py +++ b/Datacard/cleanDatacard.py @@ -14,6 +14,7 @@ if not opts.outfilename: opts.outfilename = opts.datacard.replace('.txt','_cleaned.txt') + opts.factor = float(opts.factor) factorLo = 1./opts.factor factorHi = opts.factor diff --git a/Datacard/makeDatacard.py b/Datacard/makeDatacard.py index 55148ce9..4e955972 100644 --- a/Datacard/makeDatacard.py +++ b/Datacard/makeDatacard.py @@ -21,6 +21,7 @@ def get_options(): parser.add_option('--doTrueYield', dest='doTrueYield', default=False, action="store_true", help="For pruning: use true number of expected events for proc x cat i.e. Product(XS,BR,eff*acc,lumi). Use only if NOTAG dataset has been included. If false then will use nominal_yield (i.e. sumEntries)") parser.add_option('--mass', dest='mass', default='125', help="MH mass: required for doTrueYield") parser.add_option('--analysis', dest='analysis', default='STXS', help="Analysis extension: required for doTrueYield (see ./tools/XSBR.py for example)") + parser.add_option('--pruneCat', dest='pruneCat', default=None, help="Prune category, can specify multiple times") # For yield/systematics: parser.add_option('--skipCOWCorr', dest='skipCOWCorr', default=False, action="store_true", help="Skip centralObjectWeight correction for events in acceptance") parser.add_option('--doSystematics', dest='doSystematics', default=False, action="store_true", help="Include systematics calculations and add to datacard") @@ -44,11 +45,18 @@ def leave(): # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Concatenate dataframes print " --> Loading per category dataframes into single dataframe" +skipCats = [] +if opt.pruneCat: skipCats = opt.pruneCat.split(",") extStr = "_%s"%opt.ext if opt.ext != '' else '' +print "./yields%s/*.pkl"%extStr pkl_files = glob.glob("./yields%s/*.pkl"%extStr) + pkl_files.sort() # Categories in alphabetical order data = pd.DataFrame() for f_pkl_name in pkl_files: + if any([skipCat in f_pkl_name for skipCat in skipCats]): + print "\t===> Pruning category: ",f_pkl_name + continue with open(f_pkl_name,"rb") as f_pkl: df = pickle.load(f_pkl) data = pd.concat([data,df], ignore_index=True, axis=0, sort=False) @@ -104,6 +112,7 @@ def leave(): data['prune'] = 0 if opt.doTrueYield: print " --> Using the true yield of process for pruning: N = Product(XS,BR,eff*acc,lumi)" + mask = (data['type']=='sig') # Extract XS*BR using tools.XSBR @@ -132,6 +141,7 @@ def leave(): else: print " --> Using nominal yield of process (sumEntries) for pruning" + mask = (data['type']=='sig') # Extract per category yields @@ -141,10 +151,14 @@ def leave(): # Set prune = 1 if < threshold of total cat yield mask = (data['nominal_yield'] Extracting yields: (%s,%s) [%.1f%%]"%(r['proc'],r['cat'],100*(float(ir)/totalSignalRows)) + print " --> Extracting yields: (%s,%s) [%.3f/%.3f = %.1f%%]"%(r['proc'],r['cat'],float(ir),totalSignalRows,100*(float(ir+1)/totalSignalRows)) # Open input WS file and extract workspace f_in = ROOT.TFile(r.inputWSFile) @@ -218,6 +223,7 @@ def get_options(): y, y_COWCorr = 0, 0 sumw2 = 0 for i in range(0,rdata_nominal.numEntries()): + p = rdata_nominal.get(i) w = rdata_nominal.weight() y += w @@ -232,6 +238,7 @@ def get_options(): data.at[ir,'nominal_yield'] = y data.at[ir,'sumw2'] = sumw2 if not opt.skipCOWCorr: data.at[ir,'nominal_yield_COWCorr'] = y_COWCorr + print "\t\t ==> nominal_yield = %f " % (y*_rate/1000.) # Systematics: loop over systematics and use function to extract yield variations if opt.doSystematics: diff --git a/Datacard/run_sequence.sh b/Datacard/run_sequence.sh new file mode 100755 index 00000000..e5e7da0d --- /dev/null +++ b/Datacard/run_sequence.sh @@ -0,0 +1,113 @@ +ext='2024-12-01' + + +STEP=0 +usage(){ + echo "Script to run yields and datacard making. Yields need to be done before running datacards" + echo "options:" + + echo "-h|--help) " + echo "-s|--step) " + echo "-d|--dryRun) " +} +# options may be followed by one colon to indicate they have a required argument +if ! options=$(getopt -u -o s:hd -l help,step:,dryRun -- "$@") +then +# something went wrong, getopt will put out an error message for us +exit 1 +fi +set -- $options +while [ $# -gt 0 ] +do +case $1 in +-h|--help) usage; exit 0;; +-s|--step) STEP=$2; shift ;; +-d|--dryRun) DR=$2; shift ;; +(--) shift; break;; +(-*) usage; echo "$0: error - unrecognized option $1" 1>&2; usage >> /dev/stderr; exit 1;; +(*) break;; +esac +shift +done + +DROPT="" +if [[ $DR ]]; then + DROPT=" --printOnly " +fi + +smprocs=("GG2H" "VBF" "TTH" "WMINUSH2HQQ" "WPLUSH2HQQ" "QQ2HLL") +smprocs_csv=$(IFS=, ; echo "${smprocs[*]}") +echo $smprocs_csv +if [[ $STEP == "yields" ]]; then + # for mu-simple: exclude ALT processes + echo $smprocs_csv + + +# python RunYields.py --cats "auto" --inputWSDirMap 2016preVFP=cards/signal_2016preVFP,2016postVFP=cards/signal_2016postVFP,2017=cards/signal_2017,2018=cards/signal_2018 --procs $smprocs_csv --mergeYears --doSystematics --skipZeroes --ext ${ext}_xsec ${DROPT} + # for the single fai fits: include one ALT sample at a time +# for altproc in "ALT_L1" "ALT_L1Zg" "ALT_0PH" "ALT_0M" + for altproc in "ALT_0M" + + + # to get the interference correctly need the SM (fa1=0), the pure BSM (fai=1) and the mixed one (fai=0.5) + do + + # for bookkeeping mistake, for VBF the files are called ALT_xxx for VBF and ALTxx for VH,TTH + altproc_nonvbf=`echo ${altproc} | sed 's|_||g'` + + + vbfsamples="VBF,VBF_${altproc}" + + if [[ $altproc == "ALT_0PH" ]]; then + zhsamples="QQ2HLL,ZH_${altproc_nonvbf},ZH_${altproc_nonvbf}f05ph0" + elif [[ $altproc == "ALT_0M" ]]; then + zhsamples="QQ2HLL,ZH_${altproc_nonvbf}" + elif [[ $altproc == "ALT_L1" ]]; then + zhsamples="QQ2HLL,ZH_ALT0L1,ZH_ALT0L1f05ph0" + else + zhsamples="QQ2HLL,ZH_ALT0L1Zg,ZH_ALT0L1Zgf05ph0" + fi + + + if [[ $altproc == "ALT_0PH" ]]; then + whsamples="WMINUSH2HQQ,WPLUSH2HQQ,WH_ALT0PH,WH_ALT0PHf05ph0" + elif [[ $altproc == "ALT_0M" ]]; then + whsamples="WMINUSH2HQQ,WPLUSH2HQQ,wh_ALT_0M" + elif [[ $altproc == "ALT_L1" ]]; then + whsamples="WMINUSH2HQQ,WPLUSH2HQQ,WH_ALT0L1f05ph0,wh_ALT_L1" + else + whsamples="WMINUSH2HQQ,WPLUSH2HQQ" + fi + + + + + + python RunYields.py --cats "auto" --inputWSDirMap 2016preVFP=cards/signal_2016preVFP,2016postVFP=cards/signal_2016postVFP,2017=cards/signal_2017,2018=cards/signal_2018 --procs "GG2H,TTH,$vbfsamples,$whsamples,$zhsamples" --mergeYears --doSystematics --skipZeroes --ext ${ext}_${altproc} ${DROPT} + done + + #python RunYields.py --cats "auto" --procs "auto" --inputWSDirMap 2016preVFP=cards/signal_2016preVFP,2016postVFP=cards/signal_2016postVFP,2017=cards/signal_2017,2018=cards/signal_2018 --mergeYears --doSystematics --skipZeroes --ext ${ext}_all --batch condor --queue longlunch ${DROPT} +elif [[ $STEP == "datacards" ]]; then + + for fit in "ALT_0M" + do + echo "making datacards for all years together for type of fit: $fit" + python makeDatacard.py --years 2016preVFP,2016postVFP,2017,2018 --ext ${ext}_${fit} --prune --doSystematics --output "Datacard_${fit}" --pruneCat RECO_VBFLIKEGGH_Tag1,RECO_VBFLIKEGGH_Tag0 + python cleanDatacard.py --datacard "Datacard_${fit}.txt" --factor 2 --removeDoubleSided + mv "Datacard_${fit}_cleaned.txt" "Datacard_${fit}.txt" + + done + +elif [[ $STEP == "links" ]]; then + cd Models + rm signal background + echo "linking Models/signal to ../../Signal/outdir_packaged" + ln -s ../../Signal/outdir_packaged signal + echo "linking Models/background to ../../Background/outdir_2024-02-14" + ln -s ../../Background/outdir_2024-02-14 background + cd - +else + echo "Step $STEP is not one among yields,datacards,links. Exiting." +fi + + diff --git a/Datacard/systematics.py b/Datacard/systematics.py index 61c2b2f0..8de0f9b2 100644 --- a/Datacard/systematics.py +++ b/Datacard/systematics.py @@ -19,29 +19,30 @@ # norm = inorm/inc # Specify as list in dict: e.g. 'tiers'=['inc','inorm','norm','ishape','shape'] + theory_systematics = [ # Normalisation uncertainties: enter interpretations {'name':'BR_hgg','title':'BR_hgg','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':"0.98/1.021"}, # New scheme for ggH stage 1.2 - {'name':'THU_ggH_stxs_Yield','title':'THU_ggH_stxs_Yield','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_Res','title':'THU_ggH_stxs_Res','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_Mig01','title':'THU_ggH_stxs_Mig01','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_Mig12','title':'THU_ggH_stxs_Mig12','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_Boosted','title':'THU_ggH_stxs_Boosted','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_PTH200','title':'THU_ggH_stxs_PTH200','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_PTH300','title':'THU_ggH_stxs_PTH300','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_PTH450','title':'THU_ggH_stxs_PTH450','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_PTH650','title':'THU_ggH_stxs_PTH650','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_0J_PTH10','title':'THU_ggH_stxs_0J_PTH10','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_1J_PTH60','title':'THU_ggH_stxs_1J_PTH60','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_1J_PTH120','title':'THU_ggH_stxs_1J_PTH120','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_GE2J_PTH60','title':'THU_ggH_stxs_GE2J_PTH60','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_GE2J_PTH120','title':'THU_ggH_stxs_GE2J_PTH120','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_GE2J_MJJ350','title':'THU_ggH_stxs_GE2J_MJJ350','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_GE2J_MJJ700','title':'THU_ggH_stxs_GE2J_MJJ700','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_GE2J_LOWMJJ_PTHJJ25','title':'THU_ggH_stxs_GE2J_LOWMJJ_PTHJJ25','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - {'name':'THU_ggH_stxs_GE2J_HIGHMJJ_PTHJJ25','title':'THU_ggH_stxs_GE2J_HIGHMJJ_PTHJJ25','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, - # WG1 scheme for ggH (includes inclusive N3LO unc so dont have this + QCDscale_ggH) + {'name':'THU_ggH_stxs_Yield','title':'THU_ggH_stxs_Yield','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_Res','title':'THU_ggH_stxs_Res','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_Mig01','title':'THU_ggH_stxs_Mig01','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_Mig12','title':'THU_ggH_stxs_Mig12','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_Boosted','title':'THU_ggH_stxs_Boosted','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_PTH200','title':'THU_ggH_stxs_PTH200','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_PTH300','title':'THU_ggH_stxs_PTH300','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_PTH450','title':'THU_ggH_stxs_PTH450','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_PTH650','title':'THU_ggH_stxs_PTH650','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_0J_PTH10','title':'THU_ggH_stxs_0J_PTH10','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_1J_PTH60','title':'THU_ggH_stxs_1J_PTH60','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_1J_PTH120','title':'THU_ggH_stxs_1J_PTH120','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_GE2J_PTH60','title':'THU_ggH_stxs_GE2J_PTH60','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_GE2J_PTH120','title':'THU_ggH_stxs_GE2J_PTH120','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_GE2J_MJJ350','title':'THU_ggH_stxs_GE2J_MJJ350','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_GE2J_MJJ700','title':'THU_ggH_stxs_GE2J_MJJ700','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_GE2J_LOWMJJ_PTHJJ25','title':'THU_ggH_stxs_GE2J_LOWMJJ_PTHJJ25','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + {'name':'THU_ggH_stxs_GE2J_HIGHMJJ_PTHJJ25','title':'THU_ggH_stxs_GE2J_HIGHMJJ_PTHJJ25','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggh_stxs.json'}, + # # WG1 scheme for ggH (includes inclusive N3LO unc so dont have this + QCDscale_ggH) {'name':'THU_ggH_Mu','title':'THU_ggH_Mu','type':'factory','prior':'lnN','correlateAcrossYears':1,'tiers':['inorm']}, {'name':'THU_ggH_Res','title':'THU_ggH_Res','type':'factory','prior':'lnN','correlateAcrossYears':1,'tiers':['inorm']}, {'name':'THU_ggH_Mig01','title':'THU_ggH_Mig01','type':'factory','prior':'lnN','correlateAcrossYears':1,'tiers':['inorm']}, @@ -61,19 +62,19 @@ {'name':'THU_qqH_MJJ700','title':'THU_qqH_MJJ700','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs.json'}, {'name':'THU_qqH_MJJ1000','title':'THU_qqH_MJJ1000','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs.json'}, {'name':'THU_qqH_MJJ1500','title':'THU_qqH_MJJ1500','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs.json'}, - {'name':'THU_qqH_PTHJJ25','title':'THU_qqH_PTHJJ25','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs.json'}, {'name':'THU_qqH_JET01','title':'THU_qqH_JET01','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs.json'}, + {'name':'THU_qqH_PTHJJ25','title':'THU_qqH_PTHJJ25','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs.json'}, #{'name':'QCDscale_qqH','title':'QCDscale_qqH','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh.json'}, {'name':'THU_WH_inc','title':'THU_WH_inc','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_whlep_stxs.json'}, - {'name':'THU_WH_mig75','title':'THU_WH_mig75','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_whlep_stxs.json'}, - {'name':'THU_WH_mig150','title':'THU_WH_mig150','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_whlep_stxs.json'}, - {'name':'THU_WH_mig250','title':'THU_WH_mig250','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_whlep_stxs.json'}, - {'name':'THU_WH_mig01','title':'THU_WH_mig01','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_whlep_stxs.json'}, + #{'name':'THU_WH_mig75','title':'THU_WH_mig75','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_whlep_stxs.json'}, + #{'name':'THU_WH_mig150','title':'THU_WH_mig150','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_whlep_stxs.json'}, + #{'name':'THU_WH_mig250','title':'THU_WH_mig250','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_whlep_stxs.json'}, + #{'name':'THU_WH_mig01','title':'THU_WH_mig01','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_whlep_stxs.json'}, {'name':'THU_ZH_inc','title':'THU_ZH_inc','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_zhlep_stxs.json'}, - {'name':'THU_ZH_mig75','title':'THU_ZH_mig75','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_zhlep_stxs.json'}, - {'name':'THU_ZH_mig150','title':'THU_ZH_mig150','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_zhlep_stxs.json'}, - {'name':'THU_ZH_mig250','title':'THU_ZH_mig250','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_zhlep_stxs.json'}, - {'name':'THU_ZH_mig01','title':'THU_ZH_mig01','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_zhlep_stxs.json'}, + #{'name':'THU_ZH_mig75','title':'THU_ZH_mig75','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_zhlep_stxs.json'}, + #{'name':'THU_ZH_mig150','title':'THU_ZH_mig150','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_zhlep_stxs.json'}, + #{'name':'THU_ZH_mig250','title':'THU_ZH_mig250','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_zhlep_stxs.json'}, + #{'name':'THU_ZH_mig01','title':'THU_ZH_mig01','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_zhlep_stxs.json'}, {'name':'THU_ggZH_inc','title':'THU_ggZH_inc','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggzhlep_stxs.json'}, {'name':'THU_ggZH_mig75','title':'THU_ggZH_mig75','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggzhlep_stxs.json'}, {'name':'THU_ggZH_mig150','title':'THU_ggZH_mig150','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggzhlep_stxs.json'}, @@ -81,13 +82,14 @@ {'name':'THU_ggZH_mig01','title':'THU_ggZH_mig01','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggzhlep_stxs.json'}, #{'name':'QCDscale_VH','title':'QCDscale_VH','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_vh.json'}, # Note: VH had components accounted for in THU_qqH_*, set to 1 in json {'name':'QCDscale_ggZH','title':'QCDscale_ggZH','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_ggzh.json'}, # Note: ggZH lep components are accounted for in THU_ggZH i.e. this only covers the ggZH had component - + ################################### {'name':'THU_ttH_Yield','title':'THU_ttH_Yield','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_tth_stxs.json'}, {'name':'THU_ttH_mig60','title':'THU_ttH_mig60','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_tth_stxs.json'}, {'name':'THU_ttH_mig120','title':'THU_ttH_mig120','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_tth_stxs.json'}, {'name':'THU_ttH_mig200','title':'THU_ttH_mig200','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_tth_stxs.json'}, {'name':'THU_ttH_mig300','title':'THU_ttH_mig300','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_tth_stxs.json'}, - #{'name':'QCDscale_ttH','title':'QCDscale_ttH','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_tth.json'}, + ############################################### + #{'name':'QCDscale_ttH','title':'QCDscale_ttH','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_tth.json'}, {'name':'QCDscale_tHq','title':'QCDscale_tHq','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_thq.json'}, {'name':'QCDscale_tHW','title':'QCDscale_tHW','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_thw.json'}, {'name':'QCDscale_bbH','title':'QCDscale_bbH','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_bbh.json'}, @@ -121,19 +123,23 @@ {'name':'alphaSWeight_1','title':'CMS_hgg_alphaSWeight_1','type':'factory','prior':'lnN','correlateAcrossYears':1,'tiers':['shape']}, # Theory uncertainties for constrained to SM bins - {'name':'THU_qqH_Yield_qqH_cnstr','title':'STXS_constrain_THU_qqH_Yield','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, - {'name':'THU_qqH_PTH200_qqH_cnstr','title':'STXS_constrain_THU_qqH_PTH200','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, - {'name':'THU_qqH_MJJ60_qqH_cnstr','title':'STXS_constrain_THU_qqH_MJJ60','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, - {'name':'THU_qqH_MJJ120_qqH_cnstr','title':'STXS_constrain_THU_qqH_MJJ120','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, - {'name':'THU_qqH_MJJ350_qqH_cnstr','title':'STXS_constrain_THU_qqH_MJJ350','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, - {'name':'THU_qqH_MJJ700_qqH_cnstr','title':'STXS_constrain_THU_qqH_MJJ700','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, - {'name':'THU_qqH_MJJ1000_qqH_cnstr','title':'STXS_constrain_THU_qqH_MJJ1000','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, - {'name':'THU_qqH_MJJ1500_qqH_cnstr','title':'STXS_constrain_THU_qqH_MJJ1500','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, - {'name':'THU_qqH_PTHJJ25_qqH_cnstr','title':'STXS_constrain_THU_qqH_PTHJJ25','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, - {'name':'THU_qqH_JET01_qqH_cnstr','title':'STXS_constrain_THU_qqH_JET01','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, - {'name':'pdf_Higgs_qqH_cnstr','title':'STXS_constrain_pdf_Higgs','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, - {'name':'alphaS_qqH_cnstr','title':'STXS_constrain_alphaS','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'} + {'name':'THU_qqH_Yield_qqH_cnstr','title':'STXS_constrain_THU_qqH_Yield','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, + {'name':'THU_qqH_PTH200_qqH_cnstr','title':'STXS_constrain_THU_qqH_PTH200','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, + {'name':'THU_qqH_MJJ60_qqH_cnstr','title':'STXS_constrain_THU_qqH_MJJ60','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, + {'name':'THU_qqH_MJJ120_qqH_cnstr','title':'STXS_constrain_THU_qqH_MJJ120','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, + {'name':'THU_qqH_MJJ350_qqH_cnstr','title':'STXS_constrain_THU_qqH_MJJ350','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, + {'name':'THU_qqH_MJJ700_qqH_cnstr','title':'STXS_constrain_THU_qqH_MJJ700','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, + {'name':'THU_qqH_MJJ1000_qqH_cnstr','title':'STXS_constrain_THU_qqH_MJJ1000','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, + {'name':'THU_qqH_MJJ1500_qqH_cnstr','title':'STXS_constrain_THU_qqH_MJJ1500','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, + {'name':'THU_qqH_PTHJJ25_qqH_cnstr','title':'STXS_constrain_THU_qqH_PTHJJ25','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, + {'name':'THU_qqH_JET01_qqH_cnstr','title':'STXS_constrain_THU_qqH_JET01','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, + {'name':'pdf_Higgs_qqH_cnstr','title':'STXS_constrain_pdf_Higgs','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'}, + {'name':'alphaS_qqH_cnstr','title':'STXS_constrain_alphaS','type':'constant','prior':'lnN','correlateAcrossYears':1,'value':'theory_uncertainties/thu_qqh_stxs_constrain.json'} + ] + + + # PDF weight for i in range(1,60): theory_systematics.append( {'name':'pdfWeight_%g'%i, 'title':'CMS_hgg_pdfWeight_%g'%i, 'type':'factory','prior':'lnN','correlateAcrossYears':1,'tiers':['shape']} ) @@ -146,9 +152,9 @@ experimental_systematics = [ # Updated luminosity partial-correlation scheme: 13/5/21 (recommended simplified nuisances) - {'name':'lumi_13TeV_Uncorrelated','title':'lumi_13TeV_Uncorrelated','type':'constant','prior':'lnN','correlateAcrossYears':0,'value':{'2016':'1.010','2017':'1.020','2018':'1.015'}}, - {'name':'lumi_13TeV_Correlated','title':'lumi_13TeV_Correlated','type':'constant','prior':'lnN','correlateAcrossYears':-1,'value':{'2016':'1.006','2017':'1.009','2018':'1.020'}}, - {'name':'lumi_13TeV_Correlated_1718','title':'lumi_13TeV_Correlated_1718','type':'constant','prior':'lnN','correlateAcrossYears':-1,'value':{'2016':'-','2017':'1.006','2018':'1.002'}}, + {'name':'lumi_13TeV_Uncorrelated','title':'lumi_13TeV_Uncorrelated','type':'constant','prior':'lnN','correlateAcrossYears':0,'value':{'2016preVFP':'1.010','2016postVFP':'1.010','2017':'1.020','2018':'1.015'}}, + {'name':'lumi_13TeV_Correlated','title':'lumi_13TeV_Correlated','type':'constant','prior':'lnN','correlateAcrossYears':-1,'value':{'2016preVFP':'1.006','2016postVFP':'1.006','2017':'1.009','2018':'1.020'}}, + {'name':'lumi_13TeV_Correlated_1718','title':'lumi_13TeV_Correlated_1718','type':'constant','prior':'lnN','correlateAcrossYears':-1,'value':{'2016preVFP':'-','2016postVFP':'-','2017':'1.006','2018':'1.002'}}, {'name':'LooseMvaSF','title':'CMS_hgg_LooseMvaSF','type':'factory','prior':'lnN','correlateAcrossYears':0}, {'name':'PreselSF','title':'CMS_hgg_PreselSF','type':'factory','prior':'lnN','correlateAcrossYears':0}, {'name':'electronVetoSF','title':'CMS_hgg_electronVetoSF','type':'factory','prior':'lnN','correlateAcrossYears':0}, @@ -164,26 +170,26 @@ {'name':'MvaShift','title':'CMS_hgg_phoIdMva','type':'factory','prior':'lnN','correlateAcrossYears':0}, {'name':'PUJIDShift','title':'CMS_hgg_PUJIDShift','type':'factory','prior':'lnN','correlateAcrossYears':0}, # New partial correlation scheme for JECs (do not use in addition to nominal 'JEC') - {'name':'JECAbsolute','title':'CMS_scale_j_Absolute','type':'factory','prior':'lnN','correlateAcrossYears':1}, - {'name':'JECFlavorQCD','title':'CMS_scale_j_FlavorQCD','type':'factory','prior':'lnN','correlateAcrossYears':1}, - {'name':'JECBBEC1','title':'CMS_scale_j_BBEC1','type':'factory','prior':'lnN','correlateAcrossYears':1}, - {'name':'JECHF','title':'CMS_scale_j_HF','type':'factory','prior':'lnN','correlateAcrossYears':1}, - {'name':'JECEC2','title':'CMS_scale_j_EC2','type':'factory','prior':'lnN','correlateAcrossYears':1}, - {'name':'JECRelativeBal','title':'CMS_scale_j_RelativeBal','type':'factory','prior':'lnN','correlateAcrossYears':1}, - {'name':'JECAbsoluteYEAR','title':'CMS_scale_j_Absolute_y','type':'factory','prior':'lnN','correlateAcrossYears':0}, - {'name':'JECBBEC1YEAR','title':'CMS_scale_j_BBEC1_y','type':'factory','prior':'lnN','correlateAcrossYears':0}, - {'name':'JECHFYEAR','title':'CMS_scale_j_HF_y','type':'factory','prior':'lnN','correlateAcrossYears':0}, - {'name':'JECEC2YEAR','title':'CMS_scale_j_EC2_y','type':'factory','prior':'lnN','correlateAcrossYears':0}, - {'name':'JECRelativeSampleYEAR','title':'CMS_scale_j_RelativeSample_y','type':'factory','prior':'lnN','correlateAcrossYears':0}, +# {'name':'JECAbsolute','title':'CMS_scale_j_Absolute','type':'factory','prior':'lnN','correlateAcrossYears':1}, + # {'name':'JECFlavorQCD','title':'CMS_scale_j_FlavorQCD','type':'factory','prior':'lnN','correlateAcrossYears':1}, + # {'name':'JECBBEC1','title':'CMS_scale_j_BBEC1','type':'factory','prior':'lnN','correlateAcrossYears':1}, + # {'name':'JECHF','title':'CMS_scale_j_HF','type':'factory','prior':'lnN','correlateAcrossYears':1}, + # {'name':'JECEC2','title':'CMS_scale_j_EC2','type':'factory','prior':'lnN','correlateAcrossYears':1}, + # {'name':'JECRelativeBal','title':'CMS_scale_j_RelativeBal','type':'factory','prior':'lnN','correlateAcrossYears':1}, + # {'name':'JECAbsoluteYEAR','title':'CMS_scale_j_Absolute_y','type':'factory','prior':'lnN','correlateAcrossYears':0}, + # {'name':'JECBBEC1YEAR','title':'CMS_scale_j_BBEC1_y','type':'factory','prior':'lnN','correlateAcrossYears':0}, + # {'name':'JECHFYEAR','title':'CMS_scale_j_HF_y','type':'factory','prior':'lnN','correlateAcrossYears':0}, + # {'name':'JECEC2YEAR','title':'CMS_scale_j_EC2_y','type':'factory','prior':'lnN','correlateAcrossYears':0}, + # {'name':'JECRelativeSampleYEAR','title':'CMS_scale_j_RelativeSample_y','type':'factory','prior':'lnN','correlateAcrossYears':0}, - #{'name':'JEC','title':'CMS_scale_j','type':'factory','prior':'lnN','correlateAcrossYears':0}, + {'name':'JEC','title':'CMS_scale_j','type':'factory','prior':'lnN','correlateAcrossYears':0}, {'name':'JER','title':'CMS_res_j','type':'factory','prior':'lnN','correlateAcrossYears':0}, - {'name':'metJecUncertainty','title':'CMS_hgg_MET_scale_j','type':'factory','prior':'lnN','correlateAcrossYears':0}, - {'name':'metJerUncertainty','title':'CMS_hgg_MET_res_j','type':'factory','prior':'lnN','correlateAcrossYears':0}, - {'name':'metPhoUncertainty','title':'CMS_hgg_MET_PhotonScale','type':'factory','prior':'lnN','correlateAcrossYears':0}, - {'name':'metUncUncertainty','title':'CMS_hgg_MET_Unclustered','type':'factory','prior':'lnN','correlateAcrossYears':0}, + # {'name':'metJecUncertainty','title':'CMS_hgg_MET_scale_j','type':'factory','prior':'lnN','correlateAcrossYears':0}, + # {'name':'metJerUncertainty','title':'CMS_hgg_MET_res_j','type':'factory','prior':'lnN','correlateAcrossYears':0}, + # {'name':'metPhoUncertainty','title':'CMS_hgg_MET_PhotonScale','type':'factory','prior':'lnN','correlateAcrossYears':0}, + # {'name':'metUncUncertainty','title':'CMS_hgg_MET_Unclustered','type':'factory','prior':'lnN','correlateAcrossYears':0}, # HEM issue systematic - {'name':'JetHEM','title':'CMS_hgg_JetHEM','type':'factory','prior':'lnN','correlateAcrossYears':0} + {'name':'JetHEM','title':'CMS_hgg_JetHEM','type':'factory','prior':'lnN','correlateAcrossYears':0} ] # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -194,7 +200,7 @@ signal_shape_systematics = [ {'name':'deltafracright','title':'deltafracright','type':'signal_shape','mode':'other','mean':'0.0','sigma':'0.02'}, {'name':'NonLinearity','title':'NonLinearity','type':'signal_shape','mode':'scalesGlobal','mean':'0.0','sigma':'0.002'}, - {'name':'Geant4','title':'Geant4','type':'signal_shape','mode':'scalesGlobal','mean':'0.0','sigma':'0.0005'}, +# {'name':'Geant4','title':'Geant4','type':'signal_shape','mode':'scalesGlobal','mean':'0.0','sigma':'0.0005'}, {'name':'HighR9EB','title':'HighR9EB','type':'signal_shape','mode':'scales','mean':'0.0','sigma':'1.0'}, {'name':'HighR9EE','title':'HighR9EE','type':'signal_shape','mode':'scales','mean':'0.0','sigma':'1.0'}, {'name':'LowR9EB','title':'LowR9EB','type':'signal_shape','mode':'scales','mean':'0.0','sigma':'1.0'}, diff --git a/Datacard/theory_uncertainties/thu_ggh.json b/Datacard/theory_uncertainties/thu_ggh.json index 5eaabc13..559f7721 100644 --- a/Datacard/theory_uncertainties/thu_ggh.json +++ b/Datacard/theory_uncertainties/thu_ggh.json @@ -1,87 +1,7 @@ -{ - "ggH_FWDH":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_0J_PTH_0_10":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_0J_PTH_GT10":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_1J_PTH_0_60":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_1J_PTH_60_120":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_1J_PTH_120_200":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_GE2J_MJJ_0_350_PTH_0_60":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_GE2J_MJJ_0_350_PTH_60_120":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_GE2J_MJJ_0_350_PTH_120_200":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_PTH_200_300":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_PTH_300_450":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_PTH_450_650":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - }, - "ggH_PTH_GT650":{ - "QCDscale_ggH":[1.047,0.931], - "pdf_Higgs_ggH":1.019, - "alphaS_ggH":1.026 - } +{ + "ggH":{ + "QCDscale_ggH":[1.047,0.931], + "pdf_Higgs_ggH":1.019, + "alphaS_ggH":1.026 +} } diff --git a/Datacard/theory_uncertainties/thu_qqh.json b/Datacard/theory_uncertainties/thu_qqh.json index 7afefb76..a871c96f 100644 --- a/Datacard/theory_uncertainties/thu_qqh.json +++ b/Datacard/theory_uncertainties/thu_qqh.json @@ -1,57 +1,56 @@ { - "qqH_FWDH":{ + "qqH":{ "QCDscale_qqH":[1.004,0.997], "pdf_Higgs_qqH":1.021, "alphaS_qqH":1.005 }, - "qqH_0J":{ + + "qqH_ALT_0PH":{ "QCDscale_qqH":[1.004,0.997], "pdf_Higgs_qqH":1.021, "alphaS_qqH":1.005 }, - "qqH_1J":{ + + "qqH_ALT_0PHf05":{ "QCDscale_qqH":[1.004,0.997], "pdf_Higgs_qqH":1.021, "alphaS_qqH":1.005 }, - "qqH_GE2J_MJJ_0_60":{ + + "qqH_ALT_0M'":{ "QCDscale_qqH":[1.004,0.997], "pdf_Higgs_qqH":1.021, "alphaS_qqH":1.005 }, - "qqH_GE2J_MJJ_60_120":{ + + "qqH_ALT_0Mf05":{ "QCDscale_qqH":[1.004,0.997], "pdf_Higgs_qqH":1.021, "alphaS_qqH":1.005 }, - "qqH_GE2J_MJJ_120_350":{ + + "qqH_ALT_L1":{ "QCDscale_qqH":[1.004,0.997], "pdf_Higgs_qqH":1.021, "alphaS_qqH":1.005 }, - "qqH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25":{ + + "qqH_ALT_L1f05'":{ "QCDscale_qqH":[1.004,0.997], "pdf_Higgs_qqH":1.021, "alphaS_qqH":1.005 }, - "qqH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25":{ + + "qqH_ALT_L1Zg'":{ "QCDscale_qqH":[1.004,0.997], "pdf_Higgs_qqH":1.021, "alphaS_qqH":1.005 }, - "qqH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25":{ - "QCDscale_qqH":[1.004,0.997], - "pdf_Higgs_qqH":1.021, - "alphaS_qqH":1.005 - }, - "qqH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25":{ - "QCDscale_qqH":[1.004,0.997], - "pdf_Higgs_qqH":1.021, - "alphaS_qqH":1.005 - }, - "qqH_GE2J_MJJ_GT350_PTH_GT200":{ + + "qqH_ALT_L1Zgf05":{ "QCDscale_qqH":[1.004,0.997], "pdf_Higgs_qqH":1.021, "alphaS_qqH":1.005 } + } diff --git a/Datacard/theory_uncertainties/thu_tth.json b/Datacard/theory_uncertainties/thu_tth.json index 45b4d612..89e69eaf 100644 --- a/Datacard/theory_uncertainties/thu_tth.json +++ b/Datacard/theory_uncertainties/thu_tth.json @@ -1,30 +1,6 @@ { - "ttH_FWDH":{ - "QCDscale_ttH":[1.058,0.908], - "pdf_Higgs_ttH":1.030, - "alphaS_ttH":1.020 - }, - "ttH_PTH_0_60":{ - "QCDscale_ttH":[1.058,0.908], - "pdf_Higgs_ttH":1.030, - "alphaS_ttH":1.020 - }, - "ttH_PTH_60_120":{ - "QCDscale_ttH":[1.058,0.908], - "pdf_Higgs_ttH":1.030, - "alphaS_ttH":1.020 - }, - "ttH_PTH_120_200":{ - "QCDscale_ttH":[1.058,0.908], - "pdf_Higgs_ttH":1.030, - "alphaS_ttH":1.020 - }, - "ttH_PTH_200_300":{ - "QCDscale_ttH":[1.058,0.908], - "pdf_Higgs_ttH":1.030, - "alphaS_ttH":1.020 - }, - "ttH_PTH_GT300":{ + + "ttH":{ "QCDscale_ttH":[1.058,0.908], "pdf_Higgs_ttH":1.030, "alphaS_ttH":1.020 diff --git a/Datacard/theory_uncertainties/thu_tth_stxs.json b/Datacard/theory_uncertainties/thu_tth_stxs.json index 0621f403..8e0c2003 100644 --- a/Datacard/theory_uncertainties/thu_tth_stxs.json +++ b/Datacard/theory_uncertainties/thu_tth_stxs.json @@ -1,4 +1,4 @@ -{ +{ "ttH_FWDH":{ "THU_ttH_Yield":[1.058,0.908], "THU_ttH_mig60":1.000, diff --git a/Datacard/theory_uncertainties/thu_vh.json b/Datacard/theory_uncertainties/thu_vh.json index 2d42f44e..2b1f1344 100644 --- a/Datacard/theory_uncertainties/thu_vh.json +++ b/Datacard/theory_uncertainties/thu_vh.json @@ -1,172 +1,81 @@ -{ - "WH_lep_FWDH":{ - "QCDscale_VH":[1.005,0.993], - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_lep_PTV_0_75":{ - "QCDscale_VH":[1.005,0.993], - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_lep_PTV_75_150":{ - "QCDscale_VH":[1.005,0.993], - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_lep_PTV_150_250_0J":{ - "QCDscale_VH":[1.005,0.993], - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_lep_PTV_150_250_GE1J":{ - "QCDscale_VH":[1.005,0.993], - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_lep_PTV_GT250":{ - "QCDscale_VH":[1.005,0.993], - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "ZH_lep_FWDH":{ - "QCDscale_VH":[1.038,0.969], - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_lep_PTV_0_75":{ - "QCDscale_VH":[1.038,0.969], - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_lep_PTV_75_150":{ - "QCDscale_VH":[1.038,0.969], - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_lep_PTV_150_250_0J":{ - "QCDscale_VH":[1.038,0.969], - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_lep_PTV_150_250_GE1J":{ - "QCDscale_VH":[1.038,0.969], - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_lep_PTV_GT250":{ - "QCDscale_VH":[1.038,0.969], - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "WH_had_FWDH":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_had_0J":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_had_1J":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_had_GE2J_MJJ_0_60":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_had_GE2J_MJJ_60_120":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_had_GE2J_MJJ_120_350":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "WH_had_GE2J_MJJ_GT350_PTH_GT200":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.017, - "alphaS_VH":1.009 - }, - "ZH_had_FWDH":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_had_0J":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_had_1J":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_had_GE2J_MJJ_0_60":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_had_GE2J_MJJ_60_120":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_had_GE2J_MJJ_120_350":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - }, - "ZH_had_GE2J_MJJ_GT350_PTH_GT200":{ - "QCDscale_VH":1.000, - "pdf_Higgs_VH":1.013, - "alphaS_VH":1.009 - } +{ + "zh":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "zh_ALT_L1f05":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "zh_ALT_L1":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "zh_ALT_L1Zgf05":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "zh_ALT_L1Zg":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "zh_ALT_0M":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "zh_ALT_0Mf05":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "zh_ALT_0PH":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "zh_ALT_0PHf05'":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + + + + "wh_ALT_L1":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "wh_ALT_0PH":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "wh_ALT_0PHf05":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "wh_ALT_0M":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "wh_ALT_L1f05":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "wh":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + } + } diff --git a/Datacard/theory_uncertainties/thu_whlep_stxs.json b/Datacard/theory_uncertainties/thu_whlep_stxs.json index 2c2d3a54..cab329e7 100644 --- a/Datacard/theory_uncertainties/thu_whlep_stxs.json +++ b/Datacard/theory_uncertainties/thu_whlep_stxs.json @@ -1,4 +1,26 @@ -{ +{ "WH_ALT0L1f05ph0":{ + "THU_WH_inc":[1.005,0.993] + }, + "WH_ALT0PH":{ + "THU_WH_inc":[1.005,0.993] + }, + "WH_ALT0PHf05ph0":{ + "THU_WH_inc":[1.005,0.993] + }, + "wh_ALT_0M":{ + "THU_WH_inc":[1.005,0.993] + }, + "WMINUSH2HQQ":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "WPLUSH2HQQ":{ + "QCDscale_VH":[1.005,0.993], + "pdf_Higgs_VH":1.017, + "alphaS_VH":1.009 + }, + "WH_lep_FWDH":{ "THU_WH_inc":[1.005,0.993], "THU_WH_mig75":1.000, diff --git a/Datacard/tools/calcSystematics.py b/Datacard/tools/calcSystematics.py index acaafeec..881186a5 100644 --- a/Datacard/tools/calcSystematics.py +++ b/Datacard/tools/calcSystematics.py @@ -42,7 +42,8 @@ def addConstantSyst(sd,_syst,options): def getValueFromJson(row,uncertainties,sname): # uncertainties is a dict of the form proc:{sname:X} - p = re.sub("_2016_%s"%decayMode,"",row['proc']) + p = re.sub("_2016preVFP_%s"%decayMode,"",row['proc']) + p = re.sub("_2016postVFP_%s"%decayMode,"",p) p = re.sub("_2017_%s"%decayMode,"",p) p = re.sub("_2018_%s"%decayMode,"",p) if p in uncertainties: @@ -58,8 +59,11 @@ def getValueFromJson(row,uncertainties,sname): def factoryType(d,s): #Fix for pdfWeight (as Nweights > 10) - if('pdfWeight' in s['name']): return "s_w" - #if('pdfWeight' in s['name'])|('alphaSWeight' in s['name']): return "s_w" + #if('pdfWeight' in s['name']): return "s_w" + if('pdfWeight' in s['name'])|('alphaSWeight' in s['name'])|('scaleWeight' in s['name']): return "s_w" + + #Fix for rare cases in which there is no signal for that category at all (and skipZeroes has been used) + if(d[d['type']=='sig'].size==0): return "-" # Loop over rows in dataframe: until syst is found for ir, r in d[d['type']=='sig'].iterrows(): @@ -69,8 +73,8 @@ def factoryType(d,s): dataHistDown = "%s_%sDown01sigma"%(r.nominalDataName,s['name']) # Check if syst is var (i.e. weight) in workspace - if ws.allVars().selectByName("%s*"%(s['name'])).getSize(): - nWeights = ws.allVars().selectByName("%s*"%(s['name'])).getSize() + if ws.allVars().selectByName("%s*sigma"%(s['name'])).getSize(): + nWeights = ws.allVars().selectByName("%s*sigma"%(s['name'])).getSize() ws.Delete() f.Close() if nWeights == 2: return "a_w" @@ -91,7 +95,7 @@ def factoryType(d,s): f.Close() # If never found: - print " --> [ERROR] systematic %s: cannot extract type in factoryType function. Doesn't match requirement for (anti)-symmetric weights or anti-symmetric histograms. Leaving..." + print " --> [ERROR] systematic %s: cannot extract type in factoryType function. Doesn't match requirem5Cent for (anti)-symmetric weights or anti-symmetric histograms. Leaving..." % s['name'] sys.exit(1) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -314,11 +318,12 @@ def theorySystFactory(d,systs,ftype,options,stxsMergeScheme=None,_removal=False) # Loop over systematics and add new column in dataFrame for each tier for s in systs: if s['type'] == 'constant': continue - for tier in s['tiers']: - if tier == 'mnorm': - if options.doSTXSMerging: - for mergeName in stxsMergeScheme: d["%s_%s_mnorm"%(s['name'],mergeName)] = '-' - else: d["%s_%s"%(s['name'],tier)] = '-' + if 'tiers' in s: + for tier in s['tiers']: + if tier == 'mnorm': + if options.doSTXSMerging: + for mergeName in stxsMergeScheme: d["%s_%s_mnorm"%(s['name'],mergeName)] = '-' + else: d["%s_%s"%(s['name'],tier)] = '-' # Loop over systematics and fill entries for rows which satisfy mask for s in systs: @@ -329,9 +334,10 @@ def theorySystFactory(d,systs,ftype,options,stxsMergeScheme=None,_removal=False) if "THU_ggH" in s['name']: mask = (d['type']=='sig')&(d['nominal_yield']!=0)&(d['proc'].str.contains('ggH')) else: mask = (d['type']=='sig')&(d['nominal_yield']!=0) # Loop over tiers and use appropriate mode for compareYield function: skip mnorm as treated separately below - for tier in s['tiers']: - if tier == 'mnorm': continue - d.loc[mask,"%s_%s"%(s['name'],tier)] = d[mask].apply(lambda x: compareYield(x,f,s['name'],mode=tier), axis=1) + if 'tiers' in s: + for tier in s['tiers']: + if tier == 'mnorm': continue + d.loc[mask,"%s_%s"%(s['name'],tier)] = d[mask].apply(lambda x: compareYield(x,f,s['name'],mode=tier), axis=1) # For merging STXS bins in parameter scheme: calculate mnorm systematics (merged-STXS-normalisation) # One nuisance per merge @@ -339,7 +345,7 @@ def theorySystFactory(d,systs,ftype,options,stxsMergeScheme=None,_removal=False) for mergeName in stxsMergeScheme: for s in systs: if s['type'] == 'constant': continue - elif 'mnorm' not in s['tiers']: continue + elif ('tiers' in s and 'mnorm' not in s['tiers']): continue for year in options.years.split(","): # Remove NaN entries and require specific year mask = (d['merge_%s_nominal_yield'%mergeName]==d['merge_%s_nominal_yield'%mergeName])&(d['year']==year)&(d['nominal_yield']!=0) @@ -585,4 +591,4 @@ def compareSystForEnvelope(row,systs,stier,mname=None): # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Function to change syst title -def renameSyst(t,oldexp,newexp): return re.sub(oldexp,newexp,t) +def renameSyst(t,oldexp,newexp): return re.sub(oldexp,newexp,t) \ No newline at end of file diff --git a/Datacard/tools/checkYields.py b/Datacard/tools/checkYields.py new file mode 100644 index 00000000..e706ec4e --- /dev/null +++ b/Datacard/tools/checkYields.py @@ -0,0 +1,22 @@ +# USAGE: python tools/checkYields.py 2023-03-02 +import sys +from os import access,F_OK + +ext = sys.argv[1] + +fits = ["xsec","ALT_0M","ALT_0PH","ALT_L1","ALT_L1Zg"] + +allcats = ["RECO_0J_PTH_0_10_Tag0","RECO_0J_PTH_0_10_Tag1","RECO_0J_PTH_0_10_Tag2","RECO_0J_PTH_GT10_Tag0","RECO_0J_PTH_GT10_Tag1","RECO_0J_PTH_GT10_Tag2","RECO_1J_PTH_0_60_Tag0","RECO_1J_PTH_0_60_Tag1","RECO_1J_PTH_0_60_Tag2","RECO_1J_PTH_120_200_Tag0","RECO_1J_PTH_120_200_Tag1","RECO_1J_PTH_120_200_Tag2","RECO_1J_PTH_60_120_Tag0","RECO_1J_PTH_60_120_Tag1","RECO_1J_PTH_60_120_Tag2","RECO_GE2J_PTH_0_60_Tag0","RECO_GE2J_PTH_0_60_Tag1","RECO_GE2J_PTH_0_60_Tag2","RECO_GE2J_PTH_120_200_Tag0","RECO_GE2J_PTH_120_200_Tag1","RECO_GE2J_PTH_120_200_Tag2","RECO_GE2J_PTH_60_120_Tag0","RECO_GE2J_PTH_60_120_Tag1","RECO_GE2J_PTH_60_120_Tag2","RECO_PTH_200_300_Tag0","RECO_PTH_200_300_Tag1","RECO_PTH_300_450_Tag0","RECO_PTH_300_450_Tag1","RECO_PTH_450_650_Tag0","RECO_PTH_GT650_Tag0","RECO_THQ_LEP","RECO_TTH_HAD_PTH_0_60_Tag0","RECO_TTH_HAD_PTH_0_60_Tag1","RECO_TTH_HAD_PTH_0_60_Tag2","RECO_TTH_HAD_PTH_120_200_Tag0","RECO_TTH_HAD_PTH_120_200_Tag1","RECO_TTH_HAD_PTH_120_200_Tag2","RECO_TTH_HAD_PTH_120_200_Tag3","RECO_TTH_HAD_PTH_200_300_Tag0","RECO_TTH_HAD_PTH_200_300_Tag1","RECO_TTH_HAD_PTH_200_300_Tag2","RECO_TTH_HAD_PTH_60_120_Tag0","RECO_TTH_HAD_PTH_60_120_Tag1","RECO_TTH_HAD_PTH_60_120_Tag2","RECO_TTH_HAD_PTH_GT300_Tag0","RECO_TTH_HAD_PTH_GT300_Tag1","RECO_TTH_LEP_PTH_0_60_Tag0","RECO_TTH_LEP_PTH_0_60_Tag1","RECO_TTH_LEP_PTH_0_60_Tag2","RECO_TTH_LEP_PTH_120_200_Tag0","RECO_TTH_LEP_PTH_120_200_Tag1","RECO_TTH_LEP_PTH_200_300_Tag0","RECO_TTH_LEP_PTH_60_120_Tag0","RECO_TTH_LEP_PTH_60_120_Tag1","RECO_TTH_LEP_PTH_60_120_Tag2","RECO_TTH_LEP_PTH_GT300_Tag0","RECO_VBFLIKEGGH_Tag0","RECO_VBFLIKEGGH_Tag1","RECO_VBFTOPO_ACGGH_Tag0","RECO_VBFTOPO_ACGGH_Tag1","RECO_VBFTOPO_ACVBFBSM_Tag0","RECO_VBFTOPO_ACVBFBSM_Tag1","RECO_VBFTOPO_ACVBFSM_Tag0","RECO_VBFTOPO_VHHAD_Tag0","RECO_VBFTOPO_VHHAD_Tag1","RECO_VH_MET_Tag0","RECO_VH_MET_Tag1","RECO_VH_MET_Tag2","RECO_WH_LEP_PTV_0_75_Tag0","RECO_WH_LEP_PTV_0_75_Tag1","RECO_WH_LEP_PTV_75_150_Tag0","RECO_WH_LEP_PTV_75_150_Tag1","RECO_WH_LEP_PTV_GT150_Tag0","RECO_ZH_LEP_Tag0","RECO_ZH_LEP_Tag1"] + +for fit in fits: + print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + print "--> Fit type: ",fit + print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + base = "yields_{ext}_{fit}".format(ext=ext,fit=fit) + for cat in allcats: + fname = "yields_{ext}_{fit}/{cat}.pkl".format(ext=ext,fit=fit,cat=cat) + if not access(fname,F_OK): + print "File ",fname," not present!" + print "\n\n" + + diff --git a/Datacard/tools/submissionTools.py b/Datacard/tools/submissionTools.py index 35eaf07c..35909053 100644 --- a/Datacard/tools/submissionTools.py +++ b/Datacard/tools/submissionTools.py @@ -69,7 +69,7 @@ def writeSubFiles(_opts): _fsub.close() # SGE... - if (_opts['batch'] == "IC")|(_opts['batch'] == "SGE")|(_opts['batch'] == "local" ): + if (_opts['batch'] == "IC")|(_opts['batch'] == "SGE")|(_opts['batch'] == "Rome")|(_opts['batch'] == "local" ): _executable = "sub_yields_%s"%_opts['ext'] for cidx in range(_opts['nCats']): @@ -92,8 +92,9 @@ def submitFiles(_opts): print " --> Finished submitting files" # SGE - elif _opts['batch'] in ['IC','SGE']: + elif _opts['batch'] in ['IC','SGE','Rome']: _executable = "sub_yields_%s"%_opts['ext'] + _subcmd = 'bsub' if _opts['batch']=='Rome' else 'qsub' # Extract job opts jobOptsStr = _opts['jobOpts'] @@ -101,7 +102,7 @@ def submitFiles(_opts): for cidx in range(_opts['nCats']): c = _opts['cats'].split(",")[cidx] _subfile = "%s/%s_%s"%(_jobdir,_executable,c) - cmdLine = "qsub -q hep.q %s -o %s.log -e %s.err %s.sh"%(jobOptsStr,_subfile,_subfile,_subfile) + cmdLine = "%s -q %s %s -o %s.log -e %s.err %s.sh"%(_subcmd,_opts['queue'],jobOptsStr,_subfile,_subfile,_subfile) run(cmdLine) print " --> Finished submitting files" diff --git a/Datacard/tools/writeToDatacard.py b/Datacard/tools/writeToDatacard.py index b1f00a45..43f0173d 100644 --- a/Datacard/tools/writeToDatacard.py +++ b/Datacard/tools/writeToDatacard.py @@ -111,8 +111,9 @@ def writeSystematic(f,d,s,options,stxsMergeScheme=None,scaleCorrScheme=None): for cat in d.cat.unique(): for ir,r in d[d['cat']==cat].iterrows(): if r['proc'] == "data_obs": continue + k = "%s%s%s"%(s['name'],mergeStr,tierStr) # Extract value and add to line (with checks) - sval = r["%s%s%s"%(s['name'],mergeStr,tierStr)] + sval = "0" if k not in r else r[k] lsyst = addSyst(lsyst,sval,stitle,r['proc'],cat) # Remove final space from line and add to file f.write("%s\n"%lsyst[:-1]) diff --git a/Plots/cats.json b/Plots/cats.json index 79e62fe3..a9359cc1 100644 --- a/Plots/cats.json +++ b/Plots/cats.json @@ -1,77 +1,12 @@ { - "all":"All Categories", - "wall":"#splitline{All Categories}{S/(S+B) weighted}", - "RECO_0J_PTH_0_10_Tag0":"0J low p_{T}^{#gamma#gamma} Tag0", - "RECO_0J_PTH_0_10_Tag1":"0J low p_{T}^{#gamma#gamma} Tag1", - "RECO_0J_PTH_0_10_Tag2":"0J low p_{T}^{#gamma#gamma} Tag2", - "RECO_0J_PTH_GT10_Tag0":"0J high p_{T}^{#gamma#gamma} Tag0", - "RECO_0J_PTH_GT10_Tag1":"0J high p_{T}^{#gamma#gamma} Tag1", - "RECO_0J_PTH_GT10_Tag2":"0J high p_{T}^{#gamma#gamma} Tag2", - "RECO_1J_PTH_0_60_Tag0":"1J low p_{T}^{#gamma#gamma} Tag0", - "RECO_1J_PTH_0_60_Tag1":"1J low p_{T}^{#gamma#gamma} Tag1", - "RECO_1J_PTH_0_60_Tag2":"1J low p_{T}^{#gamma#gamma} Tag2", - "RECO_1J_PTH_60_120_Tag0":"1J med p_{T}^{#gamma#gamma} Tag0", - "RECO_1J_PTH_60_120_Tag1":"1J med p_{T}^{#gamma#gamma} Tag1", - "RECO_1J_PTH_60_120_Tag2":"1J med p_{T}^{#gamma#gamma} Tag2", - "RECO_1J_PTH_120_200_Tag0":"1J high p_{T}^{#gamma#gamma} Tag0", - "RECO_1J_PTH_120_200_Tag1":"1J high p_{T}^{#gamma#gamma} Tag1", - "RECO_1J_PTH_120_200_Tag2":"1J high p_{T}^{#gamma#gamma} Tag2", - "RECO_GE2J_PTH_0_60_Tag0":"#geq2J low p_{T}^{#gamma#gamma} Tag0", - "RECO_GE2J_PTH_0_60_Tag1":"#geq2J low p_{T}^{#gamma#gamma} Tag1", - "RECO_GE2J_PTH_0_60_Tag2":"#geq2J low p_{T}^{#gamma#gamma} Tag2", - "RECO_GE2J_PTH_60_120_Tag0":"#geq2J med p_{T}^{#gamma#gamma} Tag0", - "RECO_GE2J_PTH_60_120_Tag1":"#geq2J med p_{T}^{#gamma#gamma} Tag1", - "RECO_GE2J_PTH_60_120_Tag2":"#geq2J med p_{T}^{#gamma#gamma} Tag2", - "RECO_GE2J_PTH_120_200_Tag0":"#geq2J high p_{T}^{#gamma#gamma} Tag0", - "RECO_GE2J_PTH_120_200_Tag1":"#geq2J high p_{T}^{#gamma#gamma} Tag1", - "RECO_GE2J_PTH_120_200_Tag2":"#geq2J high p_{T}^{#gamma#gamma} Tag2", - "RECO_PTH_200_300_Tag0":"BSM low p_{T}^{#gamma#gamma} Tag0", - "RECO_PTH_200_300_Tag1":"BSM low p_{T}^{#gamma#gamma} Tag1", - "RECO_PTH_300_450_Tag0":"BSM med-low p_{T}^{#gamma#gamma} Tag0", - "RECO_PTH_300_450_Tag1":"BSM med-low p_{T}^{#gamma#gamma} Tag1", - "RECO_PTH_450_650_Tag0":"BSM med-high p_{T}^{#gamma#gamma}", - "RECO_PTH_GT650_Tag0":"BSM high p_{T}^{#gamma#gamma}", - "RECO_VBFTOPO_VHHAD_Tag0":"qqH VH-like Tag0", - "RECO_VBFTOPO_VHHAD_Tag1":"qqH VH-like Tag1", - "RECO_VBFTOPO_JET3VETO_LOWMJJ_Tag0":"qqH low m_{jj} low p_{T}^{Hjj} Tag0", - "RECO_VBFTOPO_JET3VETO_LOWMJJ_Tag1":"qqH low m_{jj} low p_{T}^{Hjj} Tag1", - "RECO_VBFTOPO_JET3VETO_HIGHMJJ_Tag0":"qqH high m_{jj} low p_{T}^{Hjj} Tag0", - "RECO_VBFTOPO_JET3VETO_HIGHMJJ_Tag1":"qqH high m_{jj} low p_{T}^{Hjj} Tag1", - "RECO_VBFTOPO_JET3_LOWMJJ_Tag0":"qqH low m_{jj} high p_{T}^{Hjj} Tag0", - "RECO_VBFTOPO_JET3_LOWMJJ_Tag1":"qqH low m_{jj} high p_{T}^{Hjj} Tag1", - "RECO_VBFTOPO_JET3_HIGHMJJ_Tag0":"qqH high m_{jj} high p_{T}^{Hjj} Tag0", - "RECO_VBFTOPO_JET3_HIGHMJJ_Tag1":"qqH high m_{jj} high p_{T}^{Hjj} Tag1", - "RECO_VBFTOPO_BSM_Tag0":"qqH BSM Tag0", - "RECO_VBFTOPO_BSM_Tag1":"qqH BSM Tag1", - "RECO_VBFLIKEGGH_Tag0":"ggH VBF-like Tag0", - "RECO_VBFLIKEGGH_Tag1":"ggH VBF-like Tag1", - "RECO_TTH_HAD_PTH_0_60_Tag0":"ttH had p_{T}^{#gamma#gamma} < 60 Tag0", - "RECO_TTH_HAD_PTH_0_60_Tag1":"ttH had p_{T}^{#gamma#gamma} < 60 Tag1", - "RECO_TTH_HAD_PTH_60_120_Tag0":"ttH had 60 < p_{T}^{#gamma#gamma} < 120 Tag0", - "RECO_TTH_HAD_PTH_60_120_Tag1":"ttH had 60 p_{T}^{#gamma#gamma} < 120 Tag1", - "RECO_TTH_HAD_PTH_120_200_Tag0":"ttH had 120 < p_{T}^{#gamma#gamma} < 200 Tag0", - "RECO_TTH_HAD_PTH_120_200_Tag1":"ttH had 120 < p_{T}^{#gamma#gamma} < 200 Tag1", - "RECO_TTH_HAD_PTH_200_300_Tag0":"ttH had 200 < p_{T}^{#gamma#gamma} < 300 Tag0", - "RECO_TTH_HAD_PTH_200_300_Tag1":"ttH had 200 < p_{T}^{#gamma#gamma} < 300 Tag1", - "RECO_TTH_HAD_PTH_GT300_Tag0":"ttH had p_{T}^{#gamma#gamma} > 300 Tag0", - "RECO_TTH_HAD_PTH_GT300_Tag1":"ttH had p_{T}^{#gamma#gamma} > 300 Tag1", - "RECO_TTH_LEP_PTH_0_60_Tag0":"ttH lep p_{T}^{#gamma#gamma} < 60 Tag0", - "RECO_TTH_LEP_PTH_0_60_Tag1":"ttH lep p_{T}^{#gamma#gamma} < 60 Tag1", - "RECO_TTH_LEP_PTH_60_120_Tag0":"ttH lep 60 < p_{T}^{#gamma#gamma} < 120 Tag0", - "RECO_TTH_LEP_PTH_60_120_Tag1":"ttH lep 60 p_{T}^{#gamma#gamma} < 120 Tag1", - "RECO_TTH_LEP_PTH_120_200_Tag0":"ttH lep 120 < p_{T}^{#gamma#gamma} < 200 Tag0", - "RECO_TTH_LEP_PTH_120_200_Tag1":"ttH lep 120 < p_{T}^{#gamma#gamma} < 200 Tag1", - "RECO_TTH_LEP_PTH_200_300_Tag0":"ttH lep 200 < p_{T}^{#gamma#gamma} < 300 Tag0", - "RECO_TTH_LEP_PTH_GT300_Tag0":"ttH lep p_{T}^{#gamma#gamma} > 300 Tag0", - "RECO_VH_MET_Tag0":"VH MET Tag0", - "RECO_VH_MET_Tag1":"VH MET Tag1", - "RECO_VH_MET_Tag2":"VH MET Tag2", - "RECO_WH_LEP_PTV_0_75_Tag0":"WH lep p_{T}^{V} < 75 Tag0", - "RECO_WH_LEP_PTV_0_75_Tag1":"WH lep p_{T}^{V} < 75 Tag1", - "RECO_WH_LEP_PTV_75_150_Tag0":"WH lep 75 < p_{T}^{V} < 150 Tag0", - "RECO_WH_LEP_PTV_75_150_Tag1":"WH lep 75 < p_{T}^{V} < 150 Tag1", - "RECO_WH_LEP_PTV_GT150_Tag0":"WH lep p_{T}^{V} > 150 Tag0", - "RECO_ZH_LEP_Tag0":"ZH lep Tag0", - "RECO_ZH_LEP_Tag1":"ZH lep Tag1", - "RECO_THQ_LEP":"tHq lep" + "all":"All Categories", + "wall":"#splitline{All Categories}{S/(S+B) weighted}", + "RECO_VBFTOPO_ACGGH_Tag0":"ggH low purity", + "RECO_VBFTOPO_ACGGH_Tag1":"ggH high purity", + "RECO_VBFTOPO_ACVBFSM_Tag0":"qqH SM-like", + "RECO_VBFTOPO_ACVBFBSM_Tag0":"qqH BSM-like high purity", + "RECO_VBFTOPO_ACVBFBSM_Tag1":"qqH BSM-like med purity", + "RECO_VBFTOPO_VHHAD_Tag0":"qqH VH Tag0", + "RECO_VBFTOPO_VHHAD_Tag1":"qqH VH Tag1", } + diff --git a/Plots/cats_latex.json b/Plots/cats_latex.json index d5151913..544b0ed7 100644 --- a/Plots/cats_latex.json +++ b/Plots/cats_latex.json @@ -31,6 +31,11 @@ "RECO_PTH_GT650_Tag0":"BSM high $\\ptgg$", "RECO_VBFTOPO_VHHAD_Tag0":"qqH VH-like Tag0", "RECO_VBFTOPO_VHHAD_Tag1":"qqH VH-like Tag1", + "RECO_VBFTOPO_ACGGH_Tag0":"qqH ggH-like Tag0", + "RECO_VBFTOPO_ACGGH_Tag1":"qqH ggH-like Tag1", + "RECO_VBFTOPO_ACVBFSM_Tag0":"qqH SM-like", + "RECO_VBFTOPO_ACVBFBSM_Tag0":"qqH BSM-like Tag0", + "RECO_VBFTOPO_ACVBFBSM_Tag1":"qqH BSM-like Tag1", "RECO_VBFTOPO_JET3VETO_LOWMJJ_Tag0":"qqH low $\\mjj$ low $\\ptHjj$ Tag0", "RECO_VBFTOPO_JET3VETO_LOWMJJ_Tag1":"qqH low $\\mjj$ low $\\ptHjj$ Tag1", "RECO_VBFTOPO_JET3VETO_HIGHMJJ_Tag0":"qqH high $\\mjj$ low $\\ptHjj$ Tag0", @@ -51,24 +56,26 @@ "RECO_TTH_HAD_PTH_60_120_Tag1":"ttH had med-low $\\ptgg$ Tag1", "RECO_TTH_HAD_PTH_60_120_Tag2":"ttH had med-low $\\ptgg$ Tag2", "RECO_TTH_HAD_PTH_60_120_Tag3":"ttH had med-low $\\ptgg$ Tag3", - "RECO_TTH_HAD_PTH_120_200_Tag0":"ttH had med-high $\\ptgg$ Tag0", - "RECO_TTH_HAD_PTH_120_200_Tag1":"ttH had med-high $\\ptgg$ Tag1", - "RECO_TTH_HAD_PTH_120_200_Tag2":"ttH had med-high $\\ptgg$ Tag2", - "RECO_TTH_HAD_PTH_120_200_Tag3":"ttH had med-high $\\ptgg$ Tag3", - "RECO_TTH_HAD_PTH_GT200_Tag0":"ttH had high $\\ptgg$ Tag0", - "RECO_TTH_HAD_PTH_GT200_Tag1":"ttH had high $\\ptgg$ Tag1", - "RECO_TTH_HAD_PTH_GT200_Tag2":"ttH had high $\\ptgg$ Tag2", - "RECO_TTH_HAD_PTH_GT200_Tag3":"ttH had high $\\ptgg$ Tag3", + "RECO_TTH_HAD_PTH_120_200_Tag0":"ttH had med $\\ptgg$ Tag0", + "RECO_TTH_HAD_PTH_120_200_Tag1":"ttH had med $\\ptgg$ Tag1", + "RECO_TTH_HAD_PTH_120_200_Tag2":"ttH had med $\\ptgg$ Tag2", + "RECO_TTH_HAD_PTH_120_200_Tag3":"ttH had med $\\ptgg$ Tag3", + "RECO_TTH_HAD_PTH_200_300_Tag0":"ttH had med-high $\\ptgg$ Tag0", + "RECO_TTH_HAD_PTH_200_300_Tag1":"ttH had med-high $\\ptgg$ Tag1", + "RECO_TTH_HAD_PTH_200_300_Tag2":"ttH had med-high $\\ptgg$ Tag2", + "RECO_TTH_HAD_PTH_GT300_Tag0":"ttH had high $\\ptgg$ Tag0", + "RECO_TTH_HAD_PTH_GT300_Tag1":"ttH had high $\\ptgg$ Tag1", + "RECO_TTH_HAD_PTH_GT300_Tag2":"ttH had high $\\ptgg$ Tag2", "RECO_VH_MET_Tag0":"VH MET Tag0", "RECO_VH_MET_Tag1":"VH MET Tag1", - "RECO_WH_LEP_LOW_Tag0":"WH lep low $p_{T}^{V}$ Tag0", - "RECO_WH_LEP_LOW_Tag1":"WH lep low $p_{T}^{V}$ Tag1", - "RECO_WH_LEP_LOW_Tag2":"WH lep low $p_{T}^{V}$ Tag2", - "RECO_WH_LEP_HIGH_Tag0":"WH lep high $p_{T}^{V}$ Tag0", - "RECO_WH_LEP_HIGH_Tag1":"WH lep high $p_{T}^{V}$ Tag1", - "RECO_WH_LEP_HIGH_Tag2":"WH lep high $p_{T}^{V}$ Tag2", - "RECO_ZH_LEP_Tag0":"ZH lep Tag0", - "RECO_ZH_LEP_Tag1":"ZH lep Tag1", + "RECO_VH_MET_Tag2":"VH MET Tag2", + "RECO_VH_MET_Tag3":"VH MET Tag3", + "RECO_WH_LEP_Tag0":"WH-lep Tag0", + "RECO_WH_LEP_Tag1":"WH-lep Tag1", + "RECO_WH_LEP_Tag2":"WH-lep Tag2", + "RECO_WH_LEP_Tag3":"WH-lep Tag3", + "RECO_ZH_LEP_Tag0":"ZH-lep Tag0", + "RECO_ZH_LEP_Tag1":"ZH-lep Tag1", "RECO_TTH_LEP_PTH_0_60_Tag0":"ttH lep low $\\ptgg$ Tag0", "RECO_TTH_LEP_PTH_0_60_Tag1":"ttH lep low $\\ptgg$ Tag1", "RECO_TTH_LEP_PTH_0_60_Tag2":"ttH lep low $\\ptgg$ Tag2", @@ -77,14 +84,17 @@ "RECO_TTH_LEP_PTH_60_120_Tag1":"ttH lep med-low $\\ptgg$ Tag1", "RECO_TTH_LEP_PTH_60_120_Tag2":"ttH lep med-low $\\ptgg$ Tag2", "RECO_TTH_LEP_PTH_60_120_Tag3":"ttH lep med-low $\\ptgg$ Tag3", - "RECO_TTH_LEP_PTH_120_200_Tag0":"ttH lep med-high $\\ptgg$ Tag0", - "RECO_TTH_LEP_PTH_120_200_Tag1":"ttH lep med-high $\\ptgg$ Tag1", - "RECO_TTH_LEP_PTH_120_200_Tag2":"ttH lep med-high $\\ptgg$ Tag2", - "RECO_TTH_LEP_PTH_120_200_Tag3":"ttH lep med-high $\\ptgg$ Tag3", - "RECO_TTH_LEP_PTH_GT200_Tag0":"ttH lep high $\\ptgg$ Tag0", - "RECO_TTH_LEP_PTH_GT200_Tag1":"ttH lep high $\\ptgg$ Tag1", - "RECO_TTH_LEP_PTH_GT200_Tag2":"ttH lep high $\\ptgg$ Tag2", - "RECO_TTH_LEP_PTH_GT200_Tag3":"ttH lep high $\\ptgg$ Tag3", + "RECO_TTH_LEP_PTH_120_200_Tag0":"ttH lep med $\\ptgg$ Tag0", + "RECO_TTH_LEP_PTH_120_200_Tag1":"ttH lep med $\\ptgg$ Tag1", + "RECO_TTH_LEP_PTH_120_200_Tag2":"ttH lep med $\\ptgg$ Tag2", + "RECO_TTH_LEP_PTH_120_200_Tag3":"ttH lep med $\\ptgg$ Tag3", + "RECO_TTH_LEP_PTH_200_300_Tag0":"ttH lep med-high $\\ptgg$ Tag0", + "RECO_TTH_LEP_PTH_GT300_Tag0":"ttH lep high $\\ptgg$ Tag0", + "RECO_VBFTOPO_ACVHHADBSM_Tag0":"VH had BSM-like Tag0", + "RECO_VBFTOPO_ACVHHADBSM_Tag1":"VH had BSM-like Tag1", + "RECO_VBFTOPO_ACVHHADSM_Tag0":"VH had SM-like Tag0", + "RECO_VBFTOPO_ACVHHADSM_Tag1":"VH had SM-like Tag1", + "RECO_VBFTOPO_ACVHHADSM_Tag2":"VH had SM-like Tag2", "RECO_THQ_LEP":"tHq lep" } diff --git a/Plots/getCatInfo.py b/Plots/getCatInfo.py index ea80f55a..b3cd8624 100644 --- a/Plots/getCatInfo.py +++ b/Plots/getCatInfo.py @@ -83,6 +83,7 @@ def get_options(): catinfo_data = pd.DataFrame(columns=_columns) for c in cats: + print "--> processing cat: ",c sbpdf, bpdf = sb_model.getPdf(c), b_model.getPdf(c) h_sbpdf_tmp = sbpdf.createHistogram("h_sb_tmp_pdfNBins_%s"%c,xvar,ROOT.RooFit.Binning(opt.pdfNBins)) h_bpdf_tmp = bpdf.createHistogram("h_b_tmp_pdfNBins_%s"%c,xvar,ROOT.RooFit.Binning(opt.pdfNBins)) diff --git a/Plots/jcp.json b/Plots/jcp.json new file mode 100644 index 00000000..73f6e30a --- /dev/null +++ b/Plots/jcp.json @@ -0,0 +1,15 @@ +{ + "GG2H":"ggH", + "VBF":"qqH", + "TTH":"ttH", + "VH":"VH", + "VBF_ALTL1":"VBF f_{#Lambda 1}=1", + "VBF_ALTL1f05":"VBF f_{#Lambda 1}=0.5", + "VBF_ALTL1Zg":"VBF f_{#Lambda 1}^{Z#gamma}=1", + "VBF_ALTL1Zgf05":"VBF f_{#Lambda 1}^{Z#gamma}=0.5", + "VBF_ALT0PH":"VBF f_{a2}=1", + "VBF_ALT0PHf05":"VBF f_{a2}=0.5", + "VBF_ALT0M":"VBF f_{a3}=1", + "VBF_ALT0Mf05":"VBF f_{a3}=0.5" +} + diff --git a/Plots/jsons/catsWeights_sospb_test_with_bands_CMS_hgg_mass.json b/Plots/jsons/catsWeights_sospb_test_with_bands_CMS_hgg_mass.json new file mode 100644 index 00000000..5cb21a7b --- /dev/null +++ b/Plots/jsons/catsWeights_sospb_test_with_bands_CMS_hgg_mass.json @@ -0,0 +1 @@ +{"VBFTag_7": 2.6006465078274146, "VBFTag_6": 2.319563028701853, "VBFTag_5": 2.4940202234622872, "VBFTag_3": 0.29448678663126204, "VBFTag_1": 0.425030857434104} \ No newline at end of file diff --git a/Plots/jsons/catsWeights_sospb_xsec_with_bands_CMS_hgg_mass.json b/Plots/jsons/catsWeights_sospb_xsec_with_bands_CMS_hgg_mass.json new file mode 100644 index 00000000..d8388cdf --- /dev/null +++ b/Plots/jsons/catsWeights_sospb_xsec_with_bands_CMS_hgg_mass.json @@ -0,0 +1 @@ +{"RECO_TTH_HAD_PTH_200_300_Tag2": 3.7047637384638543, "RECO_TTH_LEP_PTH_GT300_Tag0": 7.017515075465934, "RECO_1J_PTH_60_120_Tag1": 1.3322970504758085, "RECO_1J_PTH_60_120_Tag0": 2.917866028533023, "RECO_1J_PTH_60_120_Tag2": 0.4621811327272818, "RECO_GE2J_PTH_60_120_Tag2": 0.38551341674564615, "RECO_GE2J_PTH_60_120_Tag0": 2.343160442423521, "RECO_GE2J_PTH_60_120_Tag1": 1.4138556284584916, "RECO_PTH_GT650_Tag0": 2.6631291048016403, "RECO_TTH_HAD_PTH_GT300_Tag0": 11.879305897022968, "RECO_TTH_HAD_PTH_GT300_Tag1": 5.6356308243418525, "RECO_PTH_300_450_Tag0": 5.551464576784765, "RECO_PTH_300_450_Tag1": 0.8173867590883662, "RECO_TTH_LEP_PTH_0_60_Tag0": 10.330206674476488, "RECO_GE2J_PTH_0_60_Tag2": 0.17068710890988975, "RECO_GE2J_PTH_0_60_Tag1": 0.4612694277880015, "RECO_GE2J_PTH_0_60_Tag0": 0.6402852071782084, "RECO_TTH_LEP_PTH_200_300_Tag0": 9.169321164260774, "RECO_WH_LEP_Tag2": 1.3120162946444167, "RECO_VBFTOPO_ACVHHADBSM_Tag0": 2.5794877314481504, "RECO_ZH_LEP_Tag0": 6.883498508763253, "RECO_VH_MET_Tag0": 4.707554373290021, "RECO_VH_MET_Tag1": 3.5967426633377704, "RECO_VH_MET_Tag2": 2.11385748585913, "RECO_VH_MET_Tag3": 3.664641115249901, "RECO_VH_MET_Tag4": 1.8024147009063236, "RECO_VBFTOPO_ACVBFSM_Tag0": 5.3057684037954385, "RECO_VBFTOPO_ACVBFBSM_Tag0": 4.892889948160094, "RECO_VBFTOPO_ACVBFBSM_Tag1": 6.0263450950898365, "RECO_0J_PTH_0_10_Tag0": 0.9075510154001846, "RECO_0J_PTH_0_10_Tag1": 0.542509393293961, "RECO_0J_PTH_0_10_Tag2": 0.2548499709522199, "RECO_VBFTOPO_ACVHHADSM_Tag2": 0.6406535661185703, "RECO_WH_LEP_Tag3": 1.5078632197081947, "RECO_VBFTOPO_ACVHHADSM_Tag0": 1.3539698351911413, "RECO_VBFTOPO_ACVHHADSM_Tag1": 0.8789066552829014, "RECO_PTH_450_650_Tag0": 3.1052418376245354, "RECO_TTH_LEP_PTH_120_200_Tag1": 4.966330570683696, "RECO_GE2J_PTH_120_200_Tag1": 1.8004039468343491, "RECO_GE2J_PTH_120_200_Tag0": 2.611977741741331, "RECO_GE2J_PTH_120_200_Tag2": 0.6907656395207787, "RECO_TTH_HAD_PTH_120_200_Tag0": 9.437657401693743, "RECO_TTH_HAD_PTH_120_200_Tag1": 4.838773176484296, "RECO_TTH_HAD_PTH_120_200_Tag2": 3.420059907145166, "RECO_TTH_HAD_PTH_120_200_Tag3": 2.0251079332692568, "RECO_1J_PTH_0_60_Tag0": 1.7406635300165416, "RECO_1J_PTH_0_60_Tag1": 0.7830433748233497, "RECO_1J_PTH_0_60_Tag2": 0.3037516111517367, "RECO_TTH_LEP_PTH_120_200_Tag0": 9.572455000888066, "RECO_PTH_200_300_Tag1": 1.7612850904900539, "RECO_PTH_200_300_Tag0": 5.254146633366562, "RECO_TTH_HAD_PTH_200_300_Tag0": 10.845809935008266, "RECO_ZH_LEP_Tag1": 0.7857777715415285, "RECO_VBFTOPO_ACGGH_Tag0": 0.944056630426432, "RECO_TTH_LEP_PTH_60_120_Tag1": 6.709946177409349, "RECO_TTH_LEP_PTH_60_120_Tag0": 10.200902062596917, "RECO_VBFTOPO_ACGGH_Tag1": 0.5671404183112475, "RECO_TTH_LEP_PTH_60_120_Tag2": 6.298395034717338, "RECO_0J_PTH_GT10_Tag2": 0.3016436003117112, "RECO_TTH_HAD_PTH_200_300_Tag1": 7.8679384385460835, "RECO_0J_PTH_GT10_Tag0": 1.1392647576974821, "RECO_0J_PTH_GT10_Tag1": 0.7753870672268333, "RECO_1J_PTH_120_200_Tag0": 4.136135329240777, "RECO_1J_PTH_120_200_Tag1": 1.9047209226873127, "RECO_1J_PTH_120_200_Tag2": 1.1487068445570243, "RECO_TTH_HAD_PTH_60_120_Tag1": 5.140266096088913, "RECO_TTH_HAD_PTH_60_120_Tag0": 10.174580098305626, "RECO_TTH_LEP_PTH_0_60_Tag2": 2.610339246883248, "RECO_TTH_HAD_PTH_60_120_Tag2": 3.7733821406944634, "RECO_THQ_LEP": 1.6480382670673261, "RECO_VBFTOPO_ACVHHADBSM_Tag1": 2.334466953356854, "RECO_WH_LEP_Tag0": 6.319472942240827, "RECO_TTH_HAD_PTH_0_60_Tag0": 7.434438828114083, "RECO_TTH_HAD_PTH_0_60_Tag1": 3.8936304847786607, "RECO_TTH_HAD_PTH_0_60_Tag2": 2.4020095117241533, "RECO_WH_LEP_Tag1": 6.282025187696476, "RECO_TTH_LEP_PTH_0_60_Tag1": 8.363436346008132} \ No newline at end of file diff --git a/Plots/jsons/catsWeights_sospbtest_with_bands_CMS_hgg_mass.json b/Plots/jsons/catsWeights_sospbtest_with_bands_CMS_hgg_mass.json new file mode 100644 index 00000000..73a9eecc --- /dev/null +++ b/Plots/jsons/catsWeights_sospbtest_with_bands_CMS_hgg_mass.json @@ -0,0 +1 @@ +{"VBFTag_7": 2.616452312266514, "VBFTag_6": 2.254294293085249, "VBFTag_5": 2.5100261563589465, "VBFTag_3": 0.28551814995518865, "VBFTag_1": 0.4231163105592278} \ No newline at end of file diff --git a/Plots/jsons/catsWeights_sospbxsec_CMS_hgg_mass.json b/Plots/jsons/catsWeights_sospbxsec_CMS_hgg_mass.json new file mode 100644 index 00000000..d35b2379 --- /dev/null +++ b/Plots/jsons/catsWeights_sospbxsec_CMS_hgg_mass.json @@ -0,0 +1 @@ +{"RECO_VH_MET_Tag0": 1.0} \ No newline at end of file diff --git a/Plots/jsons/catsWeights_sospbxses_CMS_hgg_mass.json b/Plots/jsons/catsWeights_sospbxses_CMS_hgg_mass.json new file mode 100644 index 00000000..d8388cdf --- /dev/null +++ b/Plots/jsons/catsWeights_sospbxses_CMS_hgg_mass.json @@ -0,0 +1 @@ +{"RECO_TTH_HAD_PTH_200_300_Tag2": 3.7047637384638543, "RECO_TTH_LEP_PTH_GT300_Tag0": 7.017515075465934, "RECO_1J_PTH_60_120_Tag1": 1.3322970504758085, "RECO_1J_PTH_60_120_Tag0": 2.917866028533023, "RECO_1J_PTH_60_120_Tag2": 0.4621811327272818, "RECO_GE2J_PTH_60_120_Tag2": 0.38551341674564615, "RECO_GE2J_PTH_60_120_Tag0": 2.343160442423521, "RECO_GE2J_PTH_60_120_Tag1": 1.4138556284584916, "RECO_PTH_GT650_Tag0": 2.6631291048016403, "RECO_TTH_HAD_PTH_GT300_Tag0": 11.879305897022968, "RECO_TTH_HAD_PTH_GT300_Tag1": 5.6356308243418525, "RECO_PTH_300_450_Tag0": 5.551464576784765, "RECO_PTH_300_450_Tag1": 0.8173867590883662, "RECO_TTH_LEP_PTH_0_60_Tag0": 10.330206674476488, "RECO_GE2J_PTH_0_60_Tag2": 0.17068710890988975, "RECO_GE2J_PTH_0_60_Tag1": 0.4612694277880015, "RECO_GE2J_PTH_0_60_Tag0": 0.6402852071782084, "RECO_TTH_LEP_PTH_200_300_Tag0": 9.169321164260774, "RECO_WH_LEP_Tag2": 1.3120162946444167, "RECO_VBFTOPO_ACVHHADBSM_Tag0": 2.5794877314481504, "RECO_ZH_LEP_Tag0": 6.883498508763253, "RECO_VH_MET_Tag0": 4.707554373290021, "RECO_VH_MET_Tag1": 3.5967426633377704, "RECO_VH_MET_Tag2": 2.11385748585913, "RECO_VH_MET_Tag3": 3.664641115249901, "RECO_VH_MET_Tag4": 1.8024147009063236, "RECO_VBFTOPO_ACVBFSM_Tag0": 5.3057684037954385, "RECO_VBFTOPO_ACVBFBSM_Tag0": 4.892889948160094, "RECO_VBFTOPO_ACVBFBSM_Tag1": 6.0263450950898365, "RECO_0J_PTH_0_10_Tag0": 0.9075510154001846, "RECO_0J_PTH_0_10_Tag1": 0.542509393293961, "RECO_0J_PTH_0_10_Tag2": 0.2548499709522199, "RECO_VBFTOPO_ACVHHADSM_Tag2": 0.6406535661185703, "RECO_WH_LEP_Tag3": 1.5078632197081947, "RECO_VBFTOPO_ACVHHADSM_Tag0": 1.3539698351911413, "RECO_VBFTOPO_ACVHHADSM_Tag1": 0.8789066552829014, "RECO_PTH_450_650_Tag0": 3.1052418376245354, "RECO_TTH_LEP_PTH_120_200_Tag1": 4.966330570683696, "RECO_GE2J_PTH_120_200_Tag1": 1.8004039468343491, "RECO_GE2J_PTH_120_200_Tag0": 2.611977741741331, "RECO_GE2J_PTH_120_200_Tag2": 0.6907656395207787, "RECO_TTH_HAD_PTH_120_200_Tag0": 9.437657401693743, "RECO_TTH_HAD_PTH_120_200_Tag1": 4.838773176484296, "RECO_TTH_HAD_PTH_120_200_Tag2": 3.420059907145166, "RECO_TTH_HAD_PTH_120_200_Tag3": 2.0251079332692568, "RECO_1J_PTH_0_60_Tag0": 1.7406635300165416, "RECO_1J_PTH_0_60_Tag1": 0.7830433748233497, "RECO_1J_PTH_0_60_Tag2": 0.3037516111517367, "RECO_TTH_LEP_PTH_120_200_Tag0": 9.572455000888066, "RECO_PTH_200_300_Tag1": 1.7612850904900539, "RECO_PTH_200_300_Tag0": 5.254146633366562, "RECO_TTH_HAD_PTH_200_300_Tag0": 10.845809935008266, "RECO_ZH_LEP_Tag1": 0.7857777715415285, "RECO_VBFTOPO_ACGGH_Tag0": 0.944056630426432, "RECO_TTH_LEP_PTH_60_120_Tag1": 6.709946177409349, "RECO_TTH_LEP_PTH_60_120_Tag0": 10.200902062596917, "RECO_VBFTOPO_ACGGH_Tag1": 0.5671404183112475, "RECO_TTH_LEP_PTH_60_120_Tag2": 6.298395034717338, "RECO_0J_PTH_GT10_Tag2": 0.3016436003117112, "RECO_TTH_HAD_PTH_200_300_Tag1": 7.8679384385460835, "RECO_0J_PTH_GT10_Tag0": 1.1392647576974821, "RECO_0J_PTH_GT10_Tag1": 0.7753870672268333, "RECO_1J_PTH_120_200_Tag0": 4.136135329240777, "RECO_1J_PTH_120_200_Tag1": 1.9047209226873127, "RECO_1J_PTH_120_200_Tag2": 1.1487068445570243, "RECO_TTH_HAD_PTH_60_120_Tag1": 5.140266096088913, "RECO_TTH_HAD_PTH_60_120_Tag0": 10.174580098305626, "RECO_TTH_LEP_PTH_0_60_Tag2": 2.610339246883248, "RECO_TTH_HAD_PTH_60_120_Tag2": 3.7733821406944634, "RECO_THQ_LEP": 1.6480382670673261, "RECO_VBFTOPO_ACVHHADBSM_Tag1": 2.334466953356854, "RECO_WH_LEP_Tag0": 6.319472942240827, "RECO_TTH_HAD_PTH_0_60_Tag0": 7.434438828114083, "RECO_TTH_HAD_PTH_0_60_Tag1": 3.8936304847786607, "RECO_TTH_HAD_PTH_0_60_Tag2": 2.4020095117241533, "RECO_WH_LEP_Tag1": 6.282025187696476, "RECO_TTH_LEP_PTH_0_60_Tag1": 8.363436346008132} \ No newline at end of file diff --git a/Plots/makeResultsTables.py b/Plots/makeResultsTables.py index 76e5ae9d..4932327d 100644 --- a/Plots/makeResultsTables.py +++ b/Plots/makeResultsTables.py @@ -21,6 +21,7 @@ def leave(): params['stage1p2_maximal'] = ['r_ggH_0J_low', 'r_ggH_0J_high', 'r_ggH_1J_low', 'r_ggH_1J_med', 'r_ggH_1J_high', 'r_ggH_2J_low', 'r_ggH_2J_med', 'r_ggH_2J_high', 'r_ggH_VBFlike', 'r_ggH_BSM', 'r_qqH_VBFlike', 'r_qqH_VHhad', 'r_qqH_BSM', 'r_WH_lep', 'r_ZH_lep', 'r_ttH', 'r_tH'] params['stage1p2_minimal'] = ['r_ggH_0J_low', 'r_ggH_0J_high', 'r_ggH_1J_low', 'r_ggH_1J_med', 'r_ggH_1J_high', 'r_ggH_2J_low', 'r_ggH_2J_med', 'r_ggH_2J_high', 'r_ggH_BSM_low', 'r_ggH_BSM_high', 'r_qqH_low_mjj_low_pthjj', 'r_qqH_low_mjj_high_pthjj', 'r_qqH_high_mjj_low_pthjj', 'r_qqH_high_mjj_high_pthjj', 'r_qqH_VHhad', 'r_qqH_BSM', 'r_WH_lep_low', 'r_WH_lep_high', 'r_ZH_lep', 'r_ttH_low', 'r_ttH_medlow', 'r_ttH_medhigh', 'r_ttH_high', 'r_tH'] params['stage1p2_extended'] = ['r_ggH_0J_low', 'r_ggH_0J_high', 'r_ggH_1J_low', 'r_ggH_1J_med', 'r_ggH_1J_high', 'r_ggH_2J_low', 'r_ggH_2J_med', 'r_ggH_2J_high', 'r_ggH_BSM_low', 'r_ggH_BSM_med', 'r_ggH_BSM_high', 'r_qqH_low_mjj_low_pthjj', 'r_qqH_low_mjj_high_pthjj', 'r_qqH_high_mjj_low_pthjj', 'r_qqH_high_mjj_high_pthjj', 'r_qqH_VHhad', 'r_qqH_BSM', 'r_WH_lep_low', 'r_WH_lep_med', 'r_WH_lep_high', 'r_ZH_lep', 'r_ttH_low', 'r_ttH_medlow', 'r_ttH_medhigh', 'r_ttH_high', 'r_ttH_veryhigh', 'r_tH'] +params['cp'] = ['r_ggH', 'r_VBF', 'r_ttH', 'r_VH'] def get_options(): parser = OptionParser() diff --git a/Plots/makeSplusBModelPlot.py b/Plots/makeSplusBModelPlot.py index 4cc0edd2..13bf782d 100644 --- a/Plots/makeSplusBModelPlot.py +++ b/Plots/makeSplusBModelPlot.py @@ -52,6 +52,7 @@ def get_options(): parser.add_option("--translatePOIs", dest="translatePOIs", default=None, help="JSON to store poi translations") parser.add_option("--problematicCats", dest="problematicCats", default='', help='Problematic analysis categories to skip when processing all') parser.add_option("--doHHMjjFix", dest="doHHMjjFix", default=False, action="store_true", help="Do fix for HH analysis where some cats have different Mjj var") + parser.add_option("--pdir", dest="pdir", default="./", help="Directory where to put the final plots") return parser.parse_args() (opt,args) = get_options() @@ -211,9 +212,9 @@ def get_options(): # Create dataframe df_bands = pd.DataFrame(columns=_columns) # Loop over toys file and add row for each toy dataset - toyFiles = glob.glob("./SplusBModels%s/toys/toy_*.root"%opt.ext) + toyFiles = glob.glob("./SplusBModels%s/toys/toy_*.root"%(opt.ext)) if len(toyFiles) == 0: - print " * [ERROR] No toys files of form ./SplusBModels%s/toys/toy_*.root. Skipping bands"%opt.ext + print " * [ERROR] No toys files of form ./SplusBModels%s/toys/toy_*.root. Skipping bands"%(opt.ext) opt.doBands = False else: for tidx in range(len(toyFiles)): @@ -263,8 +264,8 @@ def get_options(): else: print " --> Toy veto: zero entries in first bin" # Savin toy yields dataframe to pickle file if opt.saveToyYields: - print " * Saving toy yields to: SplusBModels%s/toyYields_%s.pkl"%(opt.ext,opt.xvar.split(",")[0]) - with open("SplusBModels%s/toyYields_%s.pkl"%(opt.ext,opt.xvar.split(",")[0]),"w") as fD: pickle.dump(df_bands,fD) + print " * Saving toy yields to: ./SplusBModels%s/toyYields_%s.pkl"%(opt.ext,opt.xvar.split(",")[0]) + with open("./SplusBModels%s/toyYields_%s.pkl"%(opt.ext,opt.xvar.split(",")[0]),"w") as fD: pickle.dump(df_bands,fD) # Process each category separately for cidx in range(len(cats)): @@ -419,7 +420,9 @@ def get_options(): # Make plot for individual cats if not opt.skipIndividualCatPlots: print " * making plot" - if not os.path.isdir("./SplusBModels%s"%(opt.ext)): os.system("mkdir ./SplusBModels%s"%(opt.ext)) + if not os.path.isdir("%s/SplusBModels%s"%(opt.pdir,opt.ext)): + os.system("mkdir %s/SplusBModels%s"%(opt.pdir,opt.ext)) + if os.path.exists("/afs/cern.ch"): os.system("cp /afs/cern.ch/user/g/gpetrucc/php/index.php %s/SplusBModels%s"%(opt.pdir,opt.ext)) if opt.doBands: makeSplusBPlot(w,h_data,h_sbpdf,h_bpdf,h_spdf,h_data_ratio,h_bpdf_ratio,h_spdf_ratio,c,opt,df_bands,_reduceRange) else: makeSplusBPlot(w,h_data,h_sbpdf,h_bpdf,h_spdf,h_data_ratio,h_bpdf_ratio,h_spdf_ratio,c,opt,None,_reduceRange) @@ -446,7 +449,9 @@ def get_options(): if opt.doHHMjjFix: _reduceRange = [xvarfix.getMin(),xvarfix.getMax()] else: _reduceRange = None if opt.doSumCategories: - if not os.path.isdir("./SplusBModels%s"%(opt.ext)): os.system("mkdir ./SplusBModels%s"%(opt.ext)) + if not os.path.isdir("%s/SplusBModels%s"%(opt.pdir,opt.ext)): + os.system("mkdir %s/SplusBModels%s"%(opt.pdir,opt.ext)) + if os.path.exists("/afs/cern.ch"): os.system("cp /afs/cern.ch/user/g/gpetrucc/php/index.php %s/SplusBModels%s"%(opt.pdir,opt.ext)) print " --> Making plot for sum of categories" if opt.doBands: makeSplusBPlot(w,h_data_sum,h_sbpdf_sum,h_bpdf_sum,h_spdf_sum,h_data_ratio_sum,h_bpdf_ratio_sum,h_spdf_ratio_sum,'all',opt, df_bands,_reduceRange) else: makeSplusBPlot(w,h_data_sum,h_sbpdf_sum,h_bpdf_sum,h_spdf_sum,h_data_ratio_sum,h_bpdf_ratio_sum,h_spdf_ratio_sum,'all',opt,None,_reduceRange) diff --git a/Plots/makeToys.py b/Plots/makeToys.py index 7dd7def3..fa68a81d 100644 --- a/Plots/makeToys.py +++ b/Plots/makeToys.py @@ -43,7 +43,7 @@ def get_options(): setParam0Str = setParam0Str[:-1] mh_bf = w.var("MH").getVal() -if opt.batch == 'IC': +if opt.batch in ['IC','Rome']: # Create submission file for itoy in range(0,opt.nToys): fsub = open("./SplusBModels%s/toys/jobs/sub_toy_%g.sh"%(opt.ext,itoy),'w') @@ -76,7 +76,8 @@ def get_options(): os.system("chmod 775 ./SplusBModels%s/toys/jobs/sub*.sh"%opt.ext) if not opt.dryRun: subs = glob.glob("./SplusBModels%s/toys/jobs/sub*"%opt.ext) - for fsub in subs: os.system("qsub -q hep.q -l h_rt=4:0:0 -l h_vmem=24G %s"%fsub) + subcmd = 'qsub -q hep.q -l h_rt=4:0:0 -l h_vmem=24G' if opt.batch == 'IC' else 'bsub -q %s'%opt.queue + for fsub in subs: os.system("%s %s"%(subcmd,fsub)) else: print " --> [DRY-RUN] jobs have not been submitted" elif opt.batch == 'condor': diff --git a/Plots/makeYieldsTables.py b/Plots/makeYieldsTables.py index 1db49317..8cf4fbfd 100644 --- a/Plots/makeYieldsTables.py +++ b/Plots/makeYieldsTables.py @@ -14,11 +14,15 @@ from usefulStyle import setCanvas, drawCMS, drawEnPu, drawEnYear, formatHisto from shanePalette import set_color_palette +from commonObjects import * +from commonTools import * + print " ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ HGG YIELDS TABLES RUN II ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ " def leave(): print " ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ HGG YIELDS TABLES RUN II (END) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ " sys.exit(1) + # Define STXS stage 0 mapping to procs stage0_ggh = od() stage0_ggh["ggH"] = ['ggH_0J_PTH_0_10','ggZH_had_0J_PTH_0_10','ggH_0J_PTH_GT10','ggZH_had_0J_PTH_GT10','ggH_1J_PTH_0_60','ggZH_had_1J_PTH_0_60','ggH_1J_PTH_60_120','ggZH_had_1J_PTH_60_120','ggH_1J_PTH_120_200','ggZH_had_1J_PTH_120_200','ggH_GE2J_MJJ_0_350_PTH_0_60','ggZH_had_GE2J_MJJ_0_350_PTH_0_60','ggH_GE2J_MJJ_0_350_PTH_60_120','ggZH_had_GE2J_MJJ_0_350_PTH_60_120','ggH_GE2J_MJJ_0_350_PTH_120_200','ggZH_had_GE2J_MJJ_0_350_PTH_120_200','ggH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25','ggH_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25','ggH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25','ggH_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25','ggZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_0_25','ggZH_had_GE2J_MJJ_350_700_PTH_0_200_PTHJJ_GT25','ggZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_0_25','ggZH_had_GE2J_MJJ_GT700_PTH_0_200_PTHJJ_GT25','ggH_PTH_200_300','ggZH_had_PTH_200_300','ggH_PTH_300_450','ggH_PTH_450_650','ggH_PTH_GT650','ggZH_had_PTH_300_450','ggZH_had_PTH_450_650','ggZH_had_PTH_GT650'] @@ -50,6 +54,17 @@ def leave(): stage0_top["tHq"] = ['tHq'] stage0_top["tHW"] = ['tHW'] +cp_ggh = od() +cp_ggh["ggH"] = ['ggH'] + +cp_qqh = od() +cp_qqh["qqH"] = ['qqH'] + +cp_top = od() +cp_top["ttH"] = ['ttH'] + +cp_vh = od() +cp_vh["vH"] = ['wh','zh'] # ggH tags target_procs_ggh = od() @@ -143,9 +158,94 @@ def leave(): target_procs_qqh["RECO_VBFTOPO_VHHAD_Tag0"] = ['qqH_GE2J_MJJ_60_120','WH_had_GE2J_MJJ_60_120','ZH_had_GE2J_MJJ_60_120'] target_procs_qqh["RECO_VBFTOPO_VHHAD_Tag1"] = ['qqH_GE2J_MJJ_60_120','WH_had_GE2J_MJJ_60_120','ZH_had_GE2J_MJJ_60_120'] +# qqH tags for anomalous couplings +target_procs_ggh_ac = od() +target_procs_ggh_ac["RECO_0J_PTH_0_10_Tag0"] = ['ggH'] +target_procs_ggh_ac["RECO_0J_PTH_0_10_Tag1"] = ['ggH'] +target_procs_ggh_ac["RECO_0J_PTH_0_10_Tag2"] = ['ggH'] +target_procs_ggh_ac["RECO_0J_PTH_GT10_Tag0"] = ['ggH'] +target_procs_ggh_ac["RECO_0J_PTH_GT10_Tag1"] = ['ggH'] +target_procs_ggh_ac["RECO_0J_PTH_GT10_Tag2"] = ['ggH'] +target_procs_ggh_ac["RECO_1J_PTH_0_60_Tag0"] = ['ggH'] +target_procs_ggh_ac["RECO_1J_PTH_0_60_Tag1"] = ['ggH'] +target_procs_ggh_ac["RECO_1J_PTH_0_60_Tag2"] = ['ggH'] +target_procs_ggh_ac["RECO_1J_PTH_120_200_Tag0"] = ['ggH'] +target_procs_ggh_ac["RECO_1J_PTH_120_200_Tag1"] = ['ggH'] +target_procs_ggh_ac["RECO_1J_PTH_120_200_Tag2"] = ['ggH'] +target_procs_ggh_ac["RECO_1J_PTH_60_120_Tag0"] = ['ggH'] +target_procs_ggh_ac["RECO_1J_PTH_60_120_Tag1"] = ['ggH'] +target_procs_ggh_ac["RECO_1J_PTH_60_120_Tag2"] = ['ggH'] +target_procs_ggh_ac["RECO_GE2J_PTH_0_60_Tag0"] = ['ggH'] +target_procs_ggh_ac["RECO_GE2J_PTH_0_60_Tag1"] = ['ggH'] +target_procs_ggh_ac["RECO_GE2J_PTH_0_60_Tag2"] = ['ggH'] +target_procs_ggh_ac["RECO_GE2J_PTH_120_200_Tag0"] = ['ggH'] +target_procs_ggh_ac["RECO_GE2J_PTH_120_200_Tag1"] = ['ggH'] +target_procs_ggh_ac["RECO_GE2J_PTH_120_200_Tag2"] = ['ggH'] +target_procs_ggh_ac["RECO_GE2J_PTH_60_120_Tag0"] = ['ggH'] +target_procs_ggh_ac["RECO_GE2J_PTH_60_120_Tag1"] = ['ggH'] +target_procs_ggh_ac["RECO_GE2J_PTH_60_120_Tag2"] = ['ggH'] +target_procs_ggh_ac["RECO_PTH_200_300_Tag0"] = ['ggH'] +target_procs_ggh_ac["RECO_PTH_200_300_Tag1"] = ['ggH'] +target_procs_ggh_ac["RECO_PTH_300_450_Tag0"] = ['ggH'] +target_procs_ggh_ac["RECO_PTH_300_450_Tag1"] = ['ggH'] +target_procs_ggh_ac["RECO_PTH_450_650_Tag0"] = ['ggH'] +target_procs_ggh_ac["RECO_PTH_GT650_Tag0"] = ['ggH'] + +target_procs_top_ac = od() +target_procs_top_ac["RECO_THQ_LEP"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_0_60_Tag0"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_0_60_Tag1"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_0_60_Tag2"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_120_200_Tag0"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_120_200_Tag1"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_120_200_Tag2"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_120_200_Tag3"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_200_300_Tag0"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_200_300_Tag1"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_200_300_Tag2"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_60_120_Tag0"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_60_120_Tag1"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_60_120_Tag2"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_GT300_Tag0"] = ['ttH'] +target_procs_top_ac["RECO_TTH_HAD_PTH_GT300_Tag1"] = ['ttH'] +target_procs_top_ac["RECO_TTH_LEP_PTH_0_60_Tag0"] = ['ttH'] +target_procs_top_ac["RECO_TTH_LEP_PTH_0_60_Tag1"] = ['ttH'] +target_procs_top_ac["RECO_TTH_LEP_PTH_0_60_Tag2"] = ['ttH'] +target_procs_top_ac["RECO_TTH_LEP_PTH_120_200_Tag0"] = ['ttH'] +target_procs_top_ac["RECO_TTH_LEP_PTH_120_200_Tag1"] = ['ttH'] +target_procs_top_ac["RECO_TTH_LEP_PTH_200_300_Tag0"] = ['ttH'] +target_procs_top_ac["RECO_TTH_LEP_PTH_60_120_Tag0"] = ['ttH'] +target_procs_top_ac["RECO_TTH_LEP_PTH_60_120_Tag1"] = ['ttH'] +target_procs_top_ac["RECO_TTH_LEP_PTH_60_120_Tag2"] = ['ttH'] +target_procs_top_ac["RECO_TTH_LEP_PTH_GT300_Tag0"] = ['ttH'] + +target_procs_qqh_ac = od() +target_procs_qqh_ac["RECO_VBFTOPO_ACGGH_Tag0"] = ['qqH'] +target_procs_qqh_ac["RECO_VBFTOPO_ACGGH_Tag1"] = ['qqH'] +target_procs_qqh_ac["RECO_VBFTOPO_ACVBFBSM_Tag0"] = ['qqH'] +target_procs_qqh_ac["RECO_VBFTOPO_ACVBFBSM_Tag1"] = ['qqH'] +target_procs_qqh_ac["RECO_VBFTOPO_ACVBFSM_Tag0"] = ['qqH'] + +target_procs_vh_ac = od() +target_procs_vh_ac["RECO_VBFTOPO_ACVHHADBSM_Tag0"] = ['vH'] +target_procs_vh_ac["RECO_VBFTOPO_ACVHHADBSM_Tag1"] = ['vH'] +target_procs_vh_ac["RECO_VBFTOPO_ACVHHADSM_Tag2"] = ['vH'] +target_procs_vh_ac["RECO_VBFTOPO_ACVHHADSM_Tag1"] = ['vH'] +target_procs_vh_ac["RECO_VBFTOPO_ACVHHADSM_Tag0"] = ['vH'] +target_procs_vh_ac["RECO_VH_MET_Tag0"] = ['vH'] +target_procs_vh_ac["RECO_VH_MET_Tag1"] = ['vH'] +target_procs_vh_ac["RECO_VH_MET_Tag2"] = ['vH'] +target_procs_vh_ac["RECO_VH_MET_Tag3"] = ['vH'] +target_procs_vh_ac["RECO_WH_LEP_Tag3"] = ['vH'] +target_procs_vh_ac["RECO_WH_LEP_Tag2"] = ['vH'] +target_procs_vh_ac["RECO_WH_LEP_Tag1"] = ['vH'] +target_procs_vh_ac["RECO_WH_LEP_Tag0"] = ['vH'] +target_procs_vh_ac["RECO_ZH_LEP_Tag1"] = ['vH'] +target_procs_vh_ac["RECO_ZH_LEP_Tag0"] = ['vH'] + def get_options(): parser = OptionParser() - parser.add_option("--inputPkl", dest="inputPkl", default='', help="Input pickle file") + parser.add_option("--inputPklDir", dest="inputPklDir", default='', help="Directory in Datacard with Input pickle files for each category") parser.add_option("--loadCatInfo", dest="loadCatInfo", default='', help="Load eff sigma, B and S/S+B from pickle file") parser.add_option("--group", dest="group", default='ggh', help="Group of cats") parser.add_option("--ext", dest="ext", default='', help="Extension for saving") @@ -164,26 +264,29 @@ def LoadTranslations(jsonfilename): translateStage0 = {} if opt.translateStage0 is None else LoadTranslations(opt.translateStage0) if opt.group == "ggh": - stage0 = stage0_ggh - target_procs = target_procs_ggh + stage0 = cp_ggh + target_procs = target_procs_ggh_ac elif opt.group == "qqh": - stage0 = stage0_qqh - target_procs = target_procs_qqh + stage0 = cp_qqh + target_procs = target_procs_qqh_ac elif opt.group == "vh": - stage0 = stage0_vh - target_procs = target_procs_vh + stage0 = cp_vh + target_procs = target_procs_vh_ac elif opt.group == "top": - stage0 = stage0_top - target_procs = target_procs_top + stage0 = cp_top + target_procs = target_procs_top_ac else: print " --> [ERROR] target group of categories %s does not exist"%opt.group leave() # Load input dataFrame from pickle file -if not os.path.exists( opt.inputPkl ): - print " --> [ERROR] Input pickle file does not exist. Leaving" +if not os.path.exists( opt.inputPklDir ): + print " --> [ERROR] Input directory with pickle files does not exist. Leaving" leave() -with open( opt.inputPkl, "rb" ) as fin: data = pickle.load(fin) +yfiles = glob.glob("%s/*.pkl" % opt.inputPklDir) +data = pd.concat([pd.read_pickle(f) for f in yfiles],sort=False) +#with open( opt.inputPkl, "rb" ) as fin: data = pickle.load(fin) + # Load cat info dataframe if opt.loadCatInfo != '': if not os.path.exists( opt.loadCatInfo ): @@ -199,11 +302,21 @@ def LoadTranslations(jsonfilename): # Fill frame for cat in target_procs: - mask = (data['cat']==cat)&(data['type']=='sig') - _nominal_yield = data[mask][opt.yieldVar].sum() - _target_yield = data[mask][data[mask].apply(lambda x: "_".join(x['proc'].split("_")[:-2]) in target_procs[cat], axis=1)][opt.yieldVar].sum() _s0_yields = od() - for s0 in stage0: _s0_yields[s0] = data[mask][data[mask].apply(lambda x: "_".join(x['proc'].split("_")[:-2]) in stage0[s0], axis=1)][opt.yieldVar].sum() + _nominal_yield = 0 + _target_yield = 0 + for year in ['2016preVFP','2016postVFP','2017','2018']: + mask = (data['cat']==cat)&(data['type']=='sig')&(data['year']==year) + # Extract rate from lumi + _rate = float(lumiMap[year]) + _nominal_yield += data[mask][opt.yieldVar].sum()*_rate + _target_yield += data[mask][data[mask].apply(lambda x: "_".join(x['proc'].split("_")[:-2]) in target_procs[cat], axis=1)][opt.yieldVar].sum()*_rate + for s0 in stage0: + s0_y = data[mask][data[mask].apply(lambda x: "_".join(x['proc'].split("_")[:-2]) in stage0[s0], axis=1)][opt.yieldVar].sum()*_rate + if s0 in _s0_yields.keys(): + _s0_yields[s0] += s0_y + else: + _s0_yields[s0] = s0_y if opt.loadCatInfo != '': catdata_mask = catinfo_data['cat']==cat @@ -219,26 +332,29 @@ def LoadTranslations(jsonfilename): tab_data.loc[len(tab_data)] = vals # Make table -nColumns = 5+len(stage0.keys()) -fout = open("Tables/yields_table_lite_%s%s.txt"%(opt.group,opt.ext),"w") +nColumns = 4+len(stage0.keys()) +foutname = "Tables/yields_table_lite_%s%s.txt"%(opt.group,opt.ext) +if not os.path.isdir('Tables'): os.system("mkdir Tables") +fout = open(foutname,"w") fout.write("\\begin{tabular}{%s}\n"%("l|"+("c"*(nColumns-1)))) #fout.write(" \\hline \\hline \n") #fout.write(" \\multirow{3}{*}{Analysis categories} & \\multicolumn{%g}{c|}{SM 125 GeV Higgs boson expected signal} & \\multirow{3}{*}{S/S+B} \\\\ \\cline{2-%g}\n"%(3+len(stage0.keys()),nColumns-1)) -fout.write(" \\multirow{3}{*}{Analysis categories} & \\multicolumn{%g}{c}{SM 125 GeV Higgs boson expected signal} & \\multirow{3}{*}{S/S+B} \\\\ \n"%(3+len(stage0.keys()))) +fout.write(" \\multirow{3}{*}{Analysis categories} & \\multicolumn{%g}{c}{SM 125 GeV Higgs boson expected signal} & \\multirow{3}{*}{S/S+B} \\\\ \n"%(2+len(stage0.keys()))) #fout.write(" & \\multirow{2}{*}{\\begin{tabular}[c]{@{}c@{}}Total\\\\Yield\\end{tabular}} & \\multirow{2}{*}{\\begin{tabular}[c]{@{}c@{}}Target\\\\Fraction\\end{tabular}} & \\multicolumn{%g}{c|}{Production Mode Fractions} & \\multirow{2}{*}{\\begin{tabular}[c]{@{}c@{}}$\\sigma_{\\rm{eff}}$\\\\(GeV)\\end{tabular}} & \\\\ \\cline{4-%g}\n"%(len(stage0.keys()),nColumns-2)) -fout.write(" & \\multirow{2}{*}{Total} & \\multirow{2}{*}{\\begin{tabular}[c]{@{}c@{}}Target\\\\STXS bin(s)\\end{tabular}} & \\multicolumn{%g}{c}{Production Mode Fractions} & \\multirow{2}{*}{\\begin{tabular}[c]{@{}c@{}}$\\sigma_{\\rm{eff}}$\\\\(GeV)\\end{tabular}} & \\\\ \n"%(len(stage0.keys()))) +fout.write(" & \\multirow{2}{*}{Total} & \\multicolumn{%g}{c}{Production Mode Fractions} & \\multirow{2}{*}{\\begin{tabular}[c]{@{}c@{}}$\\sigma_{\\rm{eff}}$\\\\(GeV)\\end{tabular}} & \\\\ \n"%(len(stage0.keys()))) s0_str = Translate(stage0.keys()[0],translateStage0) for s0 in stage0.keys()[1:]: s0_str += " & %s"%Translate(s0,translateStage0) #fout.write(" & & & %s & & \\\\ \\hline \\hline \n"%s0_str) -fout.write(" & & & %s & & \\\\ \\hline \n"%s0_str) +fout.write(" & & %s & & \\\\ \\hline \n"%s0_str) # Add numbers tag_itr = -1 + for ir,r in tab_data.iterrows(): if tag_itr == -1: tag_itr = len(tab_data[tab_data['cat'].str.contains(r['cat'].split("_Tag")[0])])-1 doRow = True else: doRow = False - catline = " %s & %.1f & %.1f\\%%"%(Translate(r['cat'],translateCats),r['nominal_yield'],100*(r['target_yield']/r['nominal_yield'])) + catline = " %s & %.1f"%(Translate(r['cat'],translateCats),r['nominal_yield']) for s0 in stage0: pcs0 = 100*(r['%s_yield'%s0]/r['nominal_yield']) #if pcs0 < 0.1: catline += " & $<$0.1\\%" @@ -247,7 +363,9 @@ def LoadTranslations(jsonfilename): catline += " & %.2f & %.2f"%(r['effSigma'],r['SoverSplusB']) fout.write("%s \\\\ \n"%catline) #if tag_itr == 0: fout.write(" \\hline\n") - if tag_itr == 0: fout.write(" [\\cmsTabSkip]\n") + #if tag_itr == 0: fout.write(" [\\cmsTabSkip]\n") tag_itr -= 1 #fout.write(" \\hline \n") fout.write("\\end{tabular}\n") +print "Written latex table in ",foutname + diff --git a/Plots/plottingTools.py b/Plots/plottingTools.py index d35515eb..f40c409a 100644 --- a/Plots/plottingTools.py +++ b/Plots/plottingTools.py @@ -277,6 +277,6 @@ def makeSplusBPlot(workspace,hD,hSB,hB,hS,hDr,hBr,hSr,cat,options,dB=None,reduce # Save canvas canv.Update() - canv.SaveAs("./SplusBModels%s/%s_%s.png"%(options.ext,cat,options.xvar.split(",")[0])) - canv.SaveAs("./SplusBModels%s/%s_%s.pdf"%(options.ext,cat,options.xvar.split(",")[0])) + canv.SaveAs("%s/SplusBModels%s/%s_%s.png"%(options.pdir,options.ext,cat,options.xvar.split(",")[0])) + canv.SaveAs("%s/SplusBModels%s/%s_%s.pdf"%(options.pdir,options.ext,cat,options.xvar.split(",")[0])) #raw_input("Press any key to continue...") diff --git a/Plots/pois_ALT_0M.json b/Plots/pois_ALT_0M.json new file mode 100644 index 00000000..ae1c2328 --- /dev/null +++ b/Plots/pois_ALT_0M.json @@ -0,0 +1,4 @@ +{ + "r":"#mu", + "CMS_zz4l_fai1":"f_{a3}" +} diff --git a/Plots/pois_ALT_0PH.json b/Plots/pois_ALT_0PH.json new file mode 100644 index 00000000..2ef3fab3 --- /dev/null +++ b/Plots/pois_ALT_0PH.json @@ -0,0 +1,4 @@ +{ + "r":"#mu", + "CMS_zz4l_fai1":"f_{a2}" +} diff --git a/Plots/pois_ALT_L1.json b/Plots/pois_ALT_L1.json new file mode 100644 index 00000000..1bde8282 --- /dev/null +++ b/Plots/pois_ALT_L1.json @@ -0,0 +1,4 @@ +{ + "r":"#mu", + "CMS_zz4l_fai1":"f_{#Lambda1}" +} diff --git a/Plots/pois_ALT_L1Zg.json b/Plots/pois_ALT_L1Zg.json new file mode 100644 index 00000000..fe14d811 --- /dev/null +++ b/Plots/pois_ALT_L1Zg.json @@ -0,0 +1,4 @@ +{ + "r":"#mu", + "CMS_zz4l_fai1":"f_{#Lambda1}^{Z#gamma}" +} diff --git a/Plots/pois_fa2.json b/Plots/pois_fa2.json new file mode 100644 index 00000000..2ef3fab3 --- /dev/null +++ b/Plots/pois_fa2.json @@ -0,0 +1,4 @@ +{ + "r":"#mu", + "CMS_zz4l_fai1":"f_{a2}" +} diff --git a/Plots/pois_fa3.json b/Plots/pois_fa3.json new file mode 100644 index 00000000..ae1c2328 --- /dev/null +++ b/Plots/pois_fa3.json @@ -0,0 +1,4 @@ +{ + "r":"#mu", + "CMS_zz4l_fai1":"f_{a3}" +} diff --git a/Plots/pois_flambda1.json b/Plots/pois_flambda1.json new file mode 100644 index 00000000..1bde8282 --- /dev/null +++ b/Plots/pois_flambda1.json @@ -0,0 +1,4 @@ +{ + "r":"#mu", + "CMS_zz4l_fai1":"f_{#Lambda1}" +} diff --git a/Plots/pois_flambda1zgamma.json b/Plots/pois_flambda1zgamma.json new file mode 100644 index 00000000..fe14d811 --- /dev/null +++ b/Plots/pois_flambda1zgamma.json @@ -0,0 +1,4 @@ +{ + "r":"#mu", + "CMS_zz4l_fai1":"f_{#Lambda1}^{Z#gamma}" +} diff --git a/Plots/run_sequence.sh b/Plots/run_sequence.sh new file mode 100755 index 00000000..f95b2fa8 --- /dev/null +++ b/Plots/run_sequence.sh @@ -0,0 +1,57 @@ +STEP=0 +usage(){ + echo "Script to run fits and plots of fit output." + echo "options:" + + echo "-h|--help) " + echo "-s|--step) " + echo "-d|--dryRun) " +} +# options may be followed by one colon to indicate they have a required argument +if ! options=$(getopt -u -o s:hd -l help,step:,dryRun -- "$@") +then +# something went wrong, getopt will put out an error message for us +exit 1 +fi +set -- $options +while [ $# -gt 0 ] +do +case $1 in +-h|--help) usage; exit 0;; +-s|--step) STEP=$2; shift ;; +-d|--dryRun) DR=$2; shift ;; +(--) shift; break;; +(-*) usage; echo "$0: error - unrecognized option $1" 1>&2; usage >> /dev/stderr; exit 1;; +(*) break;; +esac +shift +done + +bestfit="/eos/cms/store/group/phys_higgs/cmshgg/fderiggi/runFits_JulyProduction/runFitsxsec_xsec/higgsCombine_bestfit_syst_xsec_r_ggH.MultiDimFit.mH125.38.root" +yields="../Datacard/yields_2024-07-12_xsec" + +if [[ $STEP == "spb" ]]; then + python makeSplusBModelPlot.py --inputWSFile $bestfit --loadSnapshot MultiDimFit --cats all --doZeroes --pdir . --ext _test --unblind +elif [[ $STEP == "catweights" ]]; then + python getCatInfo.py --inputWSFile $bestfit --cats all --doBkgRenormalization --saveCatInfo --ext _allCats +elif [[ $STEP == "bands" ]]; then + python makeToys.py --inputWSFile $bestfit --loadSnapshot MultiDimFit --nToys 500 --POIs r_ggH,r_VBF,r_top,r_VH --batch Rome --queue cmsan --ext _test_with_bands +elif [[ $STEP == "spb2-calc" ]]; then + # first time, with bands calculation + python makeSplusBModelPlot.py --inputWSFile $bestfit --loadSnapshot MultiDimFit --cats all --doZeroes --pdir . --ext _test_with_bands --unblind --doBands -saveToyYields --doSumCategories --doCatWeights --saveWeights +elif [[ $STEP == "spb2" ]]; then + # next times, when toys are merged + python makeSplusBModelPlot.py --inputWSFile $bestfit --loadSnapshot MultiDimFit --cats all --doZeroes --pdir . --ext _test_with_bands --unblind --doBands --loadToyYields SplusBModels_test_with_bands/toys/toyYields_CMS_hgg_mass.pkl --doSumCategories --doCatWeights --saveWeights +elif [[ $STEP == "tables" ]]; then + # make tables with yields + groups=("ggh" "qqh" "vh" "top") + groups=("vh") + for group in ${groups[*]} + do + echo "python makeYieldsTables.py --inputPklDir $yields --loadCatInfo pkl/catInfo_allCats.pkl --group $group --translateCats cats_latex.json" +# python makeYieldsTables.py --inputPklDir $yields --loadCatInfo pkl/catInfo_allCats.pkl --group $group --translateCats cats_latex.json + done +else + echo "Step $STEP is not one among yields,datacard,links. Exiting." +fi + diff --git a/README.md b/README.md index 9ea9101c..3673f7e0 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ cmsenv scram b -j 9 # Install Flashgg Final Fit packages -git clone -b dev_fggfinalfits_lite https://github.com/cms-analysis/flashggFinalFit.git +git clone -b dev_fggfinalfits_lite git@github.com:Higgs-Anomalous-Couplings/flashggFinalFit.git cd flashggFinalFit/ ``` diff --git a/Signal/RelativeYields.py b/Signal/RelativeYields.py new file mode 100644 index 00000000..477a0fe9 --- /dev/null +++ b/Signal/RelativeYields.py @@ -0,0 +1,25 @@ + +print " ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Pandas relative yields ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ " +import os, sys +import re +from optparse import OptionParser +import ROOT +import pandas as pd +import glob +import pickle + +def get_options(): + parser = OptionParser() + parser.add_option('--cat', dest='cat', default='VBFTag_7', help="category in wich we are interested in)") + parser.add_option('--input', dest='inDir', default='outdir_2022-06-17_year2016/calcPhotonSyst/pkl/', help='Directiory input') + parser.add_option('--syst', dest='Syst', default='', help='Systematics to see') + return parser.parse_args() +(opt,args) = get_options() + + +pkl_files = glob.glob("./%s/*.pkl"%opt.inDir) +print pkl_files, "./%s/*.pkl" +for p in pkl_files: + with open(p) as f: data_new = pickle.load(f) + print data_new.columns + print data_new.T diff --git a/Signal/RunPackager.py b/Signal/RunPackager.py index e0f95e03..1383dc76 100644 --- a/Signal/RunPackager.py +++ b/Signal/RunPackager.py @@ -18,8 +18,8 @@ def get_options(): parser.add_option("--massPoints", dest='massPoints', default='120,125,130', help="Comma separated list of mass points") parser.add_option('--mergeYears', dest='mergeYears', default=False, action="store_true", help="Use if merging categories across years") parser.add_option('--year', dest='year', default='2016', help="If not merging then add year tag to file name") - parser.add_option('--batch', dest='batch', default='IC', help='Batch') - parser.add_option('--queue', dest='queue', default='microcentury', help='Queue: should not take long (microcentury will do)') + parser.add_option('--batch', dest='batch', default='condor', help='Batch') + parser.add_option('--queue', dest='queue', default='espresso', help='Queue: should not take long (espresso will do)') parser.add_option('--jobOpts', dest='jobOpts', default='', help="Additional options to add to job submission. For Condor separate individual options with a colon (specify all within quotes e.g. \"option_xyz = abc+option_123 = 456\")") parser.add_option('--printOnly', dest='printOnly', default=False, action="store_true", help="Dry run: print submission files only") return parser.parse_args() @@ -52,6 +52,9 @@ def leave(): options['cats'] = extractListOfCats(WSFileNames) options['nCats'] = len(options['cats'].split(",")) + + + print " --> Packaging signal workspaces from: %s"%opt.exts print " --> For analysis categories: %s"%options['cats'] diff --git a/Signal/RunPlotter.py b/Signal/RunPlotter.py index f358072a..7f4647ec 100644 --- a/Signal/RunPlotter.py +++ b/Signal/RunPlotter.py @@ -26,6 +26,7 @@ def get_options(): parser.add_option("--translateProcs", dest="translateProcs", default=None, help="JSON to store proc translations") parser.add_option("--label", dest="label", default='Simulation Preliminary', help="CMS Sub-label") parser.add_option("--doFWHM", dest="doFWHM", default=False, action='store_true', help="Do FWHM") + parser.add_option("--outdir", dest='outdir', default=swd__, help="Output directory (default is the current one)") return parser.parse_args() (opt,args) = get_options() @@ -96,14 +97,14 @@ def get_options(): k = "%s__%s"%(proc,year) _id = "%s_%s_%s_%s"%(proc,year,cat,sqrts__) norms[k] = w.function("%s_%s_normThisLumi"%(outputWSObjectTitle__,_id)) - # Iterate over norms: extract total category norm catNorm = 0 for k, norm in norms.iteritems(): proc, year = k.split("__") w.var("IntLumi").setVal(lumiScaleFactor*lumiMap[year]) + catNorm += norm.getVal() - + # Iterate over norms and extract data sets + pdfs for k, norm in norms.iteritems(): proc, year = k.split("__") @@ -148,13 +149,13 @@ def get_options(): # Per-year pdf histograms if len(opt.years.split(",")) > 1: for year in opt.years.split(","): - if 'pdf_%s'%year not in hists: + if 'pdf_%s'%year not in hists or hists['pdf_%s'%year]==None: hists['pdf_%s'%year] = hists['pdf'].Clone() hists['pdf_%s'%year].Reset() # Fill for _id,p in hpdfs.iteritems(): if year in _id: hists['pdf_%s'%year] += p - + # Garbage removal for d in data_rwgt.itervalues(): d.Delete() for p in hpdfs.itervalues(): p.Delete() @@ -162,5 +163,8 @@ def get_options(): fin.Close() # Make plot -if not os.path.isdir("%s/outdir_%s/Plots"%(swd__,opt.ext)): os.system("mkdir %s/outdir_%s/Plots"%(swd__,opt.ext)) -plotSignalModel(hists,opt,_outdir="%s/outdir_%s/Plots"%(swd__,opt.ext)) +outdir="%s/%s/Plots"%(opt.outdir,opt.ext) +if not os.path.isdir(outdir): os.system("mkdir -p %s"%outdir) +if os.path.exists("/afs/cern.ch"): os.system("cp /afs/cern.ch/user/g/gpetrucc/php/index.php "+outdir) +elif os.path.exists("/cmshome/dimarcoe"): os.system("cp /cmshome/dimarcoe/php/index.php "+outdir) +plotSignalModel(hists,opt,_outdir=outdir) diff --git a/Signal/RunSignalScripts.py b/Signal/RunSignalScripts.py index 0afe7718..a0209bf7 100644 --- a/Signal/RunSignalScripts.py +++ b/Signal/RunSignalScripts.py @@ -52,6 +52,8 @@ def leave(): options['smears'] = _cfg['smears'] options['batch'] = _cfg['batch'] options['queue'] = _cfg['queue'] + options['xvar'] = _cfg['xvar'] + options['outdir'] = _cfg['outdir'] if 'outdir' in _cfg else swd__ # Options from command line options['mode'] = opt.mode options['modeOpts'] = opt.modeOpts @@ -103,6 +105,7 @@ def leave(): print " --> Processes: %s"%options['procs'] print " --> Categories: %s"%options['cats'] print " --> Mass points: %s --> Low = %s, High = %s"%(options['massPoints'],options['massLow'],options['massHigh']) +print " --> Variable to fit: %s"%options['xvar'] print " --> Extension: %s"%options['ext'] print " --> Analysis: %s"%options['analysis'] print " --> Year: %s ::: Corresponds to intLumi = %.2f fb^-1"%(options['year'],lumiMap[options['year']]) @@ -135,7 +138,7 @@ def leave(): # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Make directory to store job scripts and output -if not os.path.isdir("%s/outdir_%s"%(swd__,options['ext'])): os.system("mkdir %s/outdir_%s"%(swd__,options['ext'])) +if not os.path.isdir("%s/%s"%(options['outdir'],options['ext'])): os.system("mkdir %s/%s"%(options['outdir'],options['ext'])) # Write submission files: style depends on batch system writeSubFiles(options) diff --git a/Signal/config_test_2016.py b/Signal/config_test_2016.py deleted file mode 100644 index 5c8de993..00000000 --- a/Signal/config_test_2016.py +++ /dev/null @@ -1,28 +0,0 @@ -# Config file: options for signal fitting - -_year = '2016' - -signalScriptCfg = { - - # Setup - 'inputWSDir':'/vols/cms/jl2117/hgg/ws/UL/Sept20/MC_final/signal_%s'%_year, - 'procs':'auto', # if auto: inferred automatically from filenames - 'cats':'auto', # if auto: inferred automatically from (0) workspace - 'ext':'dcb_%s'%_year, - 'analysis':'STXS', # To specify which replacement dataset mapping (defined in ./python/replacementMap.py) - 'year':'%s'%_year, # Use 'combined' if merging all years: not recommended - 'massPoints':'120,125,130', - - #Photon shape systematics - 'scales':'HighR9EB,HighR9EE,LowR9EB,LowR9EE,Gain1EB,Gain6EB', # separate nuisance per year - 'scalesCorr':'MaterialCentralBarrel,MaterialOuterBarrel,MaterialForward,FNUFEE,FNUFEB,ShowerShapeHighR9EE,ShowerShapeHighR9EB,ShowerShapeLowR9EE,ShowerShapeLowR9EB', # correlated across years - 'scalesGlobal':'NonLinearity,Geant4', # affect all processes equally, correlated across years - 'smears':'HighR9EBPhi,HighR9EBRho,HighR9EEPhi,HighR9EERho,LowR9EBPhi,LowR9EBRho,LowR9EEPhi,LowR9EERho', # separate nuisance per year - - # Job submission options - 'batch':'IC', # ['condor','SGE','IC','local'] - 'queue':'hep.q' - #'batch':'condor', # ['condor','SGE','IC','local'] - #'queue':'espresso', - -} diff --git a/Signal/config_test_2016postVFP.py b/Signal/config_test_2016postVFP.py new file mode 100644 index 00000000..06adec07 --- /dev/null +++ b/Signal/config_test_2016postVFP.py @@ -0,0 +1,26 @@ +# Config file: options for signal fitting + +_year = '2016postVFP' + +signalScriptCfg = { + # Setup + 'inputWSDir':'cards/signal_%s/'%_year, + 'procs':'auto', + 'cats':'auto', + 'ext':'2024-09-01_year%s'%_year, + 'analysis':'AC', # To specify which replacement dataset mapping (defined in ./python/replacementMap.py) + 'year':'%s'%_year, # Use 'combined' if merging all years: not recommended + 'massPoints':'120,125,130', + 'xvar': 'CMS_hgg_mass', + 'outdir': 'plots', + + #Photon shape systematics + 'scales':'HighR9EB,HighR9EE,LowR9EB,LowR9EE,Gain1EB,Gain6EB', # separate nuisance per year + 'scalesCorr':'MaterialCentralBarrel,MaterialOuterBarrel,MaterialForward,FNUFEE,FNUFEB,ShowerShapeHighR9EE,ShowerShapeHighR9EB,ShowerShapeLowR9EE,ShowerShapeLowR9EB', # correlated across years + 'scalesGlobal':'NonLinearity,Geant4', # affect all processes equally, correlated across years + 'smears':'HighR9EBPhi,HighR9EBRho,HighR9EEPhi,HighR9EERho,LowR9EBPhi,LowR9EBRho,LowR9EEPhi,LowR9EERho', # separate nuisance per year + + # Job submission options + 'batch':'condor', # ['condor','SGE','IC','Rome','local'] + 'queue':'longlunch' +} diff --git a/Signal/config_test_2016postVFP_NoSyst.py b/Signal/config_test_2016postVFP_NoSyst.py new file mode 100644 index 00000000..7b1da52d --- /dev/null +++ b/Signal/config_test_2016postVFP_NoSyst.py @@ -0,0 +1,26 @@ +# Config file: options for signal fitting + +_year = '2016postVFP' + +signalScriptCfg = { + # Setup + 'inputWSDir':'cards/signal_%s/'%_year, + 'procs':'auto', + 'cats':'auto', + 'ext':'2024-09-01_year%s'%_year, + 'analysis':'AC', # To specify which replacement dataset mapping (defined in ./python/replacementMap.py) + 'year':'%s'%_year, # Use 'combined' if merging all years: not recommended + 'massPoints':'120,125,130', + 'xvar': 'CMS_hgg_mass', + 'outdir': 'plots', + + #Photon shape systematics + 'scales':'', # separate nuisance per year + 'scalesCorr':'', # correlated across years + 'scalesGlobal':'', # affect all processes equally, correlated across years + 'smears':'', # separate nuisance per year + + # Job submission options + 'batch':'condor', # ['condor','SGE','IC','Rome','local'] + 'queue':'longlunch' +} diff --git a/Signal/config_test_2016preVFP.py b/Signal/config_test_2016preVFP.py new file mode 100644 index 00000000..98ff83ec --- /dev/null +++ b/Signal/config_test_2016preVFP.py @@ -0,0 +1,26 @@ +# Config file: options for signal fitting + +_year = '2016preVFP' + +signalScriptCfg = { + # Setup + 'inputWSDir':'cards/signal_%s/'%_year, + 'procs':'auto', + 'cats':'auto', + 'ext':'2024-09-01_year%s'%_year, + 'analysis':'AC', # To specify which replacement dataset mapping (defined in ./python/replacementMap.py) + 'year':'%s'%_year, # Use 'combined' if merging all years: not recommended + 'massPoints':'120,125,130', + 'xvar': 'CMS_hgg_mass', + 'outdir': 'plots', + + #Photon shape systematics + 'scales':'HighR9EB,HighR9EE,LowR9EB,LowR9EE,Gain1EB,Gain6EB', # separate nuisance per year + 'scalesCorr':'MaterialCentralBarrel,MaterialOuterBarrel,MaterialForward,FNUFEE,FNUFEB,ShowerShapeHighR9EE,ShowerShapeHighR9EB,ShowerShapeLowR9EE,ShowerShapeLowR9EB', # correlated across years + 'scalesGlobal':'NonLinearity,Geant4', # affect all processes equally, correlated across years + 'smears':'HighR9EBPhi,HighR9EBRho,HighR9EEPhi,HighR9EERho,LowR9EBPhi,LowR9EBRho,LowR9EEPhi,LowR9EERho', # separate nuisance per year + + # Job submission options + 'batch':'condor', # ['condor','SGE','IC','Rome','local'] + 'queue':'longlunch' +} diff --git a/Signal/config_test_2016preVFP_NoSyst.py b/Signal/config_test_2016preVFP_NoSyst.py new file mode 100644 index 00000000..15ec9604 --- /dev/null +++ b/Signal/config_test_2016preVFP_NoSyst.py @@ -0,0 +1,26 @@ +# Config file: options for signal fitting + +_year = '2016preVFP' + +signalScriptCfg = { + # Setup + 'inputWSDir':'cards/signal_%s/'%_year, + 'procs':'auto', + 'cats':'auto', + 'ext':'2024-09-01_year%s'%_year, + 'analysis':'AC', # To specify which replacement dataset mapping (defined in ./python/replacementMap.py) + 'year':'%s'%_year, # Use 'combined' if merging all years: not recommended + 'massPoints':'120,125,130', + 'xvar': 'CMS_hgg_mass', + 'outdir': 'plots', + + #Photon shape systematics + #Photon shape systematics + 'scales':'', # separate nuisance per year + 'scalesCorr':'', # correlated across years + 'scalesGlobal':'', # affect all processes equally, correlated across years + 'smears':'', # separate nuisance per year + # Job submission options + 'batch':'condor', # ['condor','SGE','IC','Rome','local'] + 'queue':'longlunch' +} diff --git a/Signal/config_test_2017.py b/Signal/config_test_2017.py index 1ca1a3a5..3afcb7ce 100644 --- a/Signal/config_test_2017.py +++ b/Signal/config_test_2017.py @@ -3,26 +3,24 @@ _year = '2017' signalScriptCfg = { - - # Setup - 'inputWSDir':'/vols/cms/jl2117/hgg/ws/UL/Sept20/MC_final/signal_%s'%_year, - 'procs':'auto', # if auto: inferred automatically from filenames - 'cats':'auto', # if auto: inferred automatically from (0) workspace - 'ext':'dcb_%s'%_year, - 'analysis':'STXS', # To specify which replacement dataset mapping (defined in ./python/replacementMap.py) - 'year':'%s'%_year, # Use 'combined' if merging all years: not recommended - 'massPoints':'120,125,130', + # Setup + 'inputWSDir':'cards/signal_%s'%_year, + 'procs':'auto', + 'cats':'auto', + 'ext':'2024-09-01_year%s'%_year, + 'analysis':'AC', # To specify which replacement dataset mapping (defined in ./python/replacementMap.py) + 'year':'%s'%_year, # Use 'combined' if merging all years: not recommended + 'massPoints':'120,125,130', + 'xvar': 'CMS_hgg_mass', + 'outdir': 'plots', - #Photon shape systematics - 'scales':'HighR9EB,HighR9EE,LowR9EB,LowR9EE,Gain1EB,Gain6EB', # separate nuisance per year - 'scalesCorr':'MaterialCentralBarrel,MaterialOuterBarrel,MaterialForward,FNUFEE,FNUFEB,ShowerShapeHighR9EE,ShowerShapeHighR9EB,ShowerShapeLowR9EE,ShowerShapeLowR9EB', # correlated across years - 'scalesGlobal':'NonLinearity,Geant4', # affect all processes equally, correlated across years - 'smears':'HighR9EBPhi,HighR9EBRho,HighR9EEPhi,HighR9EERho,LowR9EBPhi,LowR9EBRho,LowR9EEPhi,LowR9EERho', # separate nuisance per year - - # Job submission options - 'batch':'IC', # ['condor','SGE','IC','local'] - 'queue':'hep.q' - #'batch':'condor', # ['condor','SGE','IC','local'] - #'queue':'espresso', + #Photon shape systematics + 'scales':'HighR9EB,HighR9EE,LowR9EB,LowR9EE,Gain1EB,Gain6EB', # separate nuisance per year + 'scalesCorr':'MaterialCentralBarrel,MaterialOuterBarrel,MaterialForward,FNUFEE,FNUFEB,ShowerShapeHighR9EE,ShowerShapeHighR9EB,ShowerShapeLowR9EE,ShowerShapeLowR9EB', # correlated across years + 'scalesGlobal':'NonLinearity,Geant4', # affect all processes equally, correlated across years + 'smears':'HighR9EBPhi,HighR9EBRho,HighR9EEPhi,HighR9EERho,LowR9EBPhi,LowR9EBRho,LowR9EEPhi,LowR9EERho', # separate nuisance per year + # Job submission options + 'batch':'condor', # ['condor','SGE','IC','Rome','local'] + 'queue':'longlunch' } diff --git a/Signal/config_test_2017_NoSyst.py b/Signal/config_test_2017_NoSyst.py new file mode 100644 index 00000000..8b1d7c6f --- /dev/null +++ b/Signal/config_test_2017_NoSyst.py @@ -0,0 +1,27 @@ +# Config file: options for signal fitting + +_year = '2017' + +signalScriptCfg = { + # Setup + 'inputWSDir':'cards/signal_%s'%_year, + 'procs':'auto', + 'cats':'auto', + 'ext':'2024-09-01_year%s'%_year, + 'analysis':'AC', # To specify which replacement dataset mapping (defined in ./python/replacementMap.py) + 'year':'%s'%_year, # Use 'combined' if merging all years: not recommended + 'massPoints':'120,125,130', + 'xvar': 'CMS_hgg_mass', + 'outdir': 'plots', + + #Photon shape systematics + #Photon shape systematics + 'scales':'', # separate nuisance per year + 'scalesCorr':'', # correlated across years + 'scalesGlobal':'', # affect all processes equally, correlated across years + 'smears':'', # separate nuisance per year + + # Job submission options + 'batch':'condor', # ['condor','SGE','IC','Rome','local'] + 'queue':'longlunch' +} diff --git a/Signal/config_test_2018.py b/Signal/config_test_2018.py index 8c393eab..e5a23508 100644 --- a/Signal/config_test_2018.py +++ b/Signal/config_test_2018.py @@ -3,26 +3,24 @@ _year = '2018' signalScriptCfg = { - - # Setup - 'inputWSDir':'/vols/cms/jl2117/hgg/ws/UL/Sept20/MC_final/signal_%s'%_year, - 'procs':'auto', # if auto: inferred automatically from filenames - 'cats':'auto', # if auto: inferred automatically from (0) workspace - 'ext':'dcb_%s'%_year, - 'analysis':'STXS', # To specify which replacement dataset mapping (defined in ./python/replacementMap.py) - 'year':'%s'%_year, # Use 'combined' if merging all years: not recommended - 'massPoints':'120,125,130', + # Setup + 'inputWSDir':'cards/signal_%s/'%_year, + 'procs':'auto', + 'cats':'auto', + 'ext':'2024-09-01_year%s'%_year, + 'analysis':'AC', # To specify which replacement dataset mapping (defined in ./python/replacementMap.py) + 'year':'%s'%_year, # Use 'combined' if merging all years: not recommended + 'massPoints':'120,125,130', + 'xvar': 'CMS_hgg_mass', + 'outdir': 'plots', - #Photon shape systematics - 'scales':'HighR9EB,HighR9EE,LowR9EB,LowR9EE,Gain1EB,Gain6EB', # separate nuisance per year - 'scalesCorr':'MaterialCentralBarrel,MaterialOuterBarrel,MaterialForward,FNUFEE,FNUFEB,ShowerShapeHighR9EE,ShowerShapeHighR9EB,ShowerShapeLowR9EE,ShowerShapeLowR9EB', # correlated across years - 'scalesGlobal':'NonLinearity,Geant4', # affect all processes equally, correlated across years - 'smears':'HighR9EBPhi,HighR9EBRho,HighR9EEPhi,HighR9EERho,LowR9EBPhi,LowR9EBRho,LowR9EEPhi,LowR9EERho', # separate nuisance per year - - # Job submission options - 'batch':'IC', # ['condor','SGE','IC','local'] - 'queue':'hep.q' - #'batch':'condor', # ['condor','SGE','IC','local'] - #'queue':'espresso', + #Photon shape systematics + 'scales':'HighR9EB,HighR9EE,LowR9EB,LowR9EE,Gain1EB,Gain6EB', # separate nuisance per year + 'scalesCorr':'MaterialCentralBarrel,MaterialOuterBarrel,MaterialForward,FNUFEE,FNUFEB,ShowerShapeHighR9EE,ShowerShapeHighR9EB,ShowerShapeLowR9EE,ShowerShapeLowR9EB', # correlated across years + 'scalesGlobal':'NonLinearity,Geant4', # affect all processes equally, correlated across years + 'smears':'HighR9EBPhi,HighR9EBRho,HighR9EEPhi,HighR9EERho,LowR9EBPhi,LowR9EBRho,LowR9EEPhi,LowR9EERho', # separate nuisance per year + # Job submission options + 'batch':'condor', # ['condor','SGE','IC','Rome','local'] + 'queue':'longlunch' } diff --git a/Signal/config_test_2018_NoSyst.py b/Signal/config_test_2018_NoSyst.py new file mode 100644 index 00000000..0657e9cf --- /dev/null +++ b/Signal/config_test_2018_NoSyst.py @@ -0,0 +1,27 @@ +# Config file: options for signal fitting + +_year = '2018' + +signalScriptCfg = { + # Setup + 'inputWSDir':'cards/signal_%s/'%_year, + 'procs':'auto', + 'cats':'auto', + 'ext':'2024-09-01_year%s'%_year, + 'analysis':'AC', # To specify which replacement dataset mapping (defined in ./python/replacementMap.py) + 'year':'%s'%_year, # Use 'combined' if merging all years: not recommended + 'massPoints':'120,125,130', + 'xvar': 'CMS_hgg_mass', + 'outdir': 'plots', + + #Photon shape systematics + #Photon shape systematics + 'scales':'', # separate nuisance per year + 'scalesCorr':'', # correlated across years + 'scalesGlobal':'', # affect all processes equally, correlated across years + 'smears':'', # separate nuisance per year + + # Job submission options + 'batch':'condor', # ['condor','SGE','IC','Rome','local'] + 'queue':'longlunch' +} diff --git a/Signal/plots b/Signal/plots new file mode 120000 index 00000000..3115b63a --- /dev/null +++ b/Signal/plots @@ -0,0 +1 @@ +/eos/home-f/fderiggi/www \ No newline at end of file diff --git a/Signal/run_sequence.sh b/Signal/run_sequence.sh new file mode 100755 index 00000000..62d8a686 --- /dev/null +++ b/Signal/run_sequence.sh @@ -0,0 +1,90 @@ +#!/bin/bash + +YEAR=all +STEP=calcPhotonSyst + +usage(){ + echo "The script runs background scripts:" + echo "options:" + + echo "-h|--help) " + echo "-y|--year) " + echo "-s|--step) " + echo "-d|--dryRun) " +} +# options may be followed by one colon to indicate they have a required argument +if ! options=$(getopt -u -o s:y:dh -l help,step:,year:,dryRun -- "$@") +then +# something went wrong, getopt will put out an error message for us +exit 1 +fi +set -- $options +while [ $# -gt 0 ] +do +case $1 in +-h|--help) usage; exit 0;; +-y|--year) YEAR=$2; shift ;; +-s|--step) STEP=$2; shift ;; +-d|--dryRun) DR=$2; shift ;; +(--) shift; break;; +(-*) usage; echo "$0: error - unrecognized option $1" 1>&2; usage >> /dev/stderr; exit 1;; +(*) break;; +esac +shift +done + +echo "Requested to run the step ${STEP} for the year: ${YEAR}" +if [[ $YEAR != "all" && ($YEAR < 2016 || $YEAR>2018) ]]; then + echo "Year $YEAR does not belong to Run2. Exiting." + exit +fi + +DROPT="" +if [[ $DR ]]; then + DROPT=" --printOnly " +fi + +years=("2016preVFP" "2016postVFP" "2017" "2018") + +if [[ $STEP == "fTest" ]] || [[ $STEP == "calcPhotonSyst" ]] || [[ $STEP == "getEffAcc" ]] || [[ $STEP == 'signalFit' ]]; then + for year in ${years[*]} + do + if [[ $year == $YEAR ]] || [[ $YEAR == "all" ]]; then + echo "====> Running $STEP for year $year" + if [[ $STEP == "fTest" ]]; then + python RunSignalScripts.py --inputConfig config_test_${year}.py --mode fTest --modeOpts "--doPlots --outdir plots --nProcsToFTest -1" ${DROPT} + elif [[ $STEP == "calcPhotonSyst" ]]; then + python RunSignalScripts.py --inputConfig config_test_${year}.py --mode calcPhotonSyst ${DROPT} + elif [[ $STEP == 'getEffAcc' ]]; then + python RunSignalScripts.py --inputConfig config_test_${year}.py --mode getEffAcc --modeOpts="--year ${year} --skipCOWCorr" ${DROPT} + elif [[ $STEP == 'signalFit' ]]; then + python RunSignalScripts.py --inputConfig config_test_${year}.py --mode signalFit --modeOpts="--doPlots --doYield --outdir plots --doEffAccFromJson" ${DROPT} + fi + fi + done +elif [[ $STEP == 'packager' ]]; then + python RunPackager.py --cats "auto" --inputWSDir /afs/cern.ch/user/f/fderiggi/AC/CMSSW_10_2_13/src/flashggFinalFit/Signal/cards/signal_2017 --outputExt packaged --exts 2024-09-01_year2016preVFP,2024-09-01_year2016postVFP,2024-09-01_year2017,2024-09-01_year2018 --mergeYears --batch condor --queue espresso ${DROPT} +elif [[ $STEP == 'plotter' ]]; then + smprocs=("GG2H" "VBF" "TTH" "WMINUSH2HQQ" "WPLUSH2HQQ" "QQ2HLL") + smprocs=( "wh_ALT_0Mf05" "wh_ALT_0M" "VBF_ALT_0M" "VBF_ALT_0Mf05" "ZH_ALT0Mf05ph0" "ZH_ALT0M" ) + smprocs_csv=$(IFS=, ; echo "${smprocs[*]}") + # just plot all the (SM) processes, all the categories, all the years together. Can be split with --year ${YEAR}. Do not include BSM to maintain the expected total yield for SM + echo "Now plotting all categories for these SM processes: $smprocs_csv" + echo "RunPlotter.py --procs $smprocs_csv --cats "all" --year 2016preVFP,2016postVFP,2017,2018 --ext signal --outdir plots --translateCats ../Plots/cats_latex.json" + #python RunPlotter.py --procs $smprocs_csv --cats "all" --year 2016preVFP,2016postVFP,2017,2018 --ext signal --outdir plots --translateCats../Plots/cats_latex.json + # split by category, all processes together + significantCats=("RECO_VBFTOPO_ACGGH_Tag0" "RECO_VBFTOPO_ACGGH_Tag1" "RECO_VBFTOPO_ACVBFBSM_Tag0" "RECO_VBFTOPO_ACVBFBSM_Tag1" "RECO_VBFTOPO_ACVBFSM_Tag0""RECO_VBFTOPO_ACVHHADBSM_Tag0" "RECO_VBFTOPO_ACVHHADBSM_Tag1" "RECO_VBFTOPO_ACVHHADSM_Tag0" "RECO_VBFTOPO_ACVHHADSM_Tag1" "RECO_VBFTOPO_ACVHHADSM_Tag2") + significantCats=("RECO_VH_MET_Tag0" "RECO_VH_MET_Tag1" "RECO_VH_MET_Tag2" "RECO_VH_MET_Tag3" "RECO_WH_LEP_Tag0" "RECO_WH_LEP_Tag1" "RECO_WH_LEP_Tag2" "RECO_WH_LEP_Tag3" "RECO_ZH_LEP_Tag0" "RECO_ZH_LEP_Tag1") + significantCats_csv=$(IFS=, ; echo "${significantCats[*]}") + for cat in ${significantCats[*]} + do + echo "=> Now plotting all processes together for cat: $cat" + #python RunPlotter.py --procs $smprocs_csv --cats $cat --year 2016preVFP,2016postVFP,2017,2018 --outdir plots --ext packaged --outdir plots --translateCats ../Plots/cats_latex.json + done + # split by process, all the categories together (the SM + some alternatives) + for proc in ${smprocs[*]} + do + echo "=> Now plotting proc $proc for all categories" + python RunPlotter.py --procs $proc --cats "all" --year 2016preVFP,2016postVFP,2017,2018 --ext packaged --outdir plots --translateCats ../Plots/cats_latex.json + done +fi diff --git a/Signal/scripts/calcPhotonSyst.py b/Signal/scripts/calcPhotonSyst.py index d3efef40..b438b645 100644 --- a/Signal/scripts/calcPhotonSyst.py +++ b/Signal/scripts/calcPhotonSyst.py @@ -113,8 +113,9 @@ def getRateVar(_hists): # Loop over processes and add row to dataframe for _proc in opt.procs.split(","): # Glob M125 filename + print "%s/output*M125*%s.root"%(opt.inputWSDir,_proc) _WSFileName = glob.glob("%s/output*M125*%s.root"%(opt.inputWSDir,_proc))[0] - _nominalDataName = "%s_125_%s_%s"%(procToData(_proc.split("_")[0]),sqrts__,opt.cat) + _nominalDataName = "%s_125_%s_%s"%(procToData(_proc),sqrts__,opt.cat) data = data.append({'proc':_proc,'cat':opt.cat,'inputWSFile':_WSFileName,'nominalDataName':_nominalDataName}, ignore_index=True, sort=False) # Loop over rows in dataFrame and open ws diff --git a/Signal/scripts/checkSignalFits.py b/Signal/scripts/checkSignalFits.py new file mode 100644 index 00000000..74537c44 --- /dev/null +++ b/Signal/scripts/checkSignalFits.py @@ -0,0 +1,17 @@ +from os import access,F_OK + +year = "2016postVFP" +ext = "2023-02-13_year%s" % year + +base = "outdir_{ext}/signalFit/output/CMS-HGG_sigfit_{ext}".format(ext=ext) + +allcats = ["RECO_0J_PTH_0_10_Tag0","RECO_0J_PTH_0_10_Tag1","RECO_0J_PTH_0_10_Tag2","RECO_0J_PTH_GT10_Tag0","RECO_0J_PTH_GT10_Tag1","RECO_0J_PTH_GT10_Tag2","RECO_1J_PTH_0_60_Tag0","RECO_1J_PTH_0_60_Tag1","RECO_1J_PTH_0_60_Tag2","RECO_1J_PTH_120_200_Tag0","RECO_1J_PTH_120_200_Tag1","RECO_1J_PTH_120_200_Tag2","RECO_1J_PTH_60_120_Tag0","RECO_1J_PTH_60_120_Tag1","RECO_1J_PTH_60_120_Tag2","RECO_GE2J_PTH_0_60_Tag0","RECO_GE2J_PTH_0_60_Tag1","RECO_GE2J_PTH_0_60_Tag2","RECO_GE2J_PTH_120_200_Tag0","RECO_GE2J_PTH_120_200_Tag1","RECO_GE2J_PTH_120_200_Tag2","RECO_GE2J_PTH_60_120_Tag0","RECO_GE2J_PTH_60_120_Tag1","RECO_GE2J_PTH_60_120_Tag2","RECO_PTH_200_300_Tag0","RECO_PTH_200_300_Tag1","RECO_PTH_300_450_Tag0","RECO_PTH_300_450_Tag1","RECO_PTH_450_650_Tag0","RECO_PTH_GT650_Tag0","RECO_THQ_LEP","RECO_TTH_HAD_PTH_0_60_Tag0","RECO_TTH_HAD_PTH_0_60_Tag1","RECO_TTH_HAD_PTH_0_60_Tag2","RECO_TTH_HAD_PTH_120_200_Tag0","RECO_TTH_HAD_PTH_120_200_Tag1","RECO_TTH_HAD_PTH_120_200_Tag2","RECO_TTH_HAD_PTH_120_200_Tag3","RECO_TTH_HAD_PTH_200_300_Tag0","RECO_TTH_HAD_PTH_200_300_Tag1","RECO_TTH_HAD_PTH_200_300_Tag2","RECO_TTH_HAD_PTH_60_120_Tag0","RECO_TTH_HAD_PTH_60_120_Tag1","RECO_TTH_HAD_PTH_60_120_Tag2","RECO_TTH_HAD_PTH_GT300_Tag0","RECO_TTH_HAD_PTH_GT300_Tag1","RECO_TTH_LEP_PTH_0_60_Tag0","RECO_TTH_LEP_PTH_0_60_Tag1","RECO_TTH_LEP_PTH_0_60_Tag2","RECO_TTH_LEP_PTH_120_200_Tag0","RECO_TTH_LEP_PTH_120_200_Tag1","RECO_TTH_LEP_PTH_200_300_Tag0","RECO_TTH_LEP_PTH_60_120_Tag0","RECO_TTH_LEP_PTH_60_120_Tag1","RECO_TTH_LEP_PTH_60_120_Tag2","RECO_TTH_LEP_PTH_GT300_Tag0","RECO_VBFLIKEGGH_Tag0","RECO_VBFLIKEGGH_Tag1","RECO_VBFTOPO_ACGGH_Tag0","RECO_VBFTOPO_ACGGH_Tag1","RECO_VBFTOPO_ACVBFBSM_Tag0","RECO_VBFTOPO_ACVBFBSM_Tag1","RECO_VBFTOPO_ACVBFSM_Tag0","RECO_VBFTOPO_VHHAD_Tag0","RECO_VBFTOPO_VHHAD_Tag1","RECO_VH_MET_Tag0","RECO_VH_MET_Tag1","RECO_VH_MET_Tag2","RECO_WH_LEP_PTV_0_75_Tag0","RECO_WH_LEP_PTV_0_75_Tag1","RECO_WH_LEP_PTV_75_150_Tag0","RECO_WH_LEP_PTV_75_150_Tag1","RECO_WH_LEP_PTV_GT150_Tag0","RECO_ZH_LEP_Tag0","RECO_ZH_LEP_Tag1"] + +allprocs= ["GG2H","QQ2HLL","TTH","TTH_ALT0M","TTH_ALT0Mf05ph0","TTH_ALT0PM","VBF","VBF_ALT_0M","VBF_ALT_0Mf05","VBF_ALT_0PH","VBF_ALT_0PHf05","VBF_ALT_0PM","VBF_ALT_L1","VBF_ALT_L1Zg","VBF_ALT_L1Zgf05","VBF_ALT_L1f05","WH_ALT0L1f05ph0","WH_ALT0PH","WH_ALT0PHf05ph0","WH_ALT0PM","WMINUSH2HQQ","WPLUSH2HQQ","ZH_ALT0L1","ZH_ALT0L1Zg","ZH_ALT0L1Zgf05ph0","ZH_ALT0L1f05ph0","ZH_ALT0M","ZH_ALT0Mf05ph0","ZH_ALT0PH","ZH_ALT0PHf05ph0","ZH_ALT0PM"] + +for proc in allprocs: + for cat in allcats: + fname = "{base}_{proc}_{year}_{cat}.root".format(base=base,proc=proc,year=year,cat=cat) + if not access(fname,F_OK): + print "File ",fname," not present!" + diff --git a/Signal/scripts/fTest.py b/Signal/scripts/fTest.py index b5354edf..2e414550 100644 --- a/Signal/scripts/fTest.py +++ b/Signal/scripts/fTest.py @@ -29,6 +29,7 @@ def get_options(): parser = OptionParser() parser.add_option("--xvar", dest='xvar', default='CMS_hgg_mass', help="Observable to fit") parser.add_option("--inputWSDir", dest='inputWSDir', default='', help="Input flashgg WS directory") + parser.add_option("--outdir", dest='outdir', default=swd__, help="Output directory (default is the current one)") parser.add_option("--ext", dest='ext', default='', help="Extension") parser.add_option("--procs", dest='procs', default='', help="Signal processes") parser.add_option("--nProcsToFTest", dest='nProcsToFTest', default=5, type='int',help="Number of signal processes to fTest (ordered by sum entries), others are set to nRV=1,nWV=1. Set to -1 to run over all") @@ -48,7 +49,7 @@ def get_options(): ROOT.gStyle.SetOptStat(0) ROOT.gROOT.SetBatch(True) if opt.doPlots: - if not os.path.isdir("%s/outdir_%s/fTest/Plots"%(swd__,opt.ext)): os.system("mkdir %s/outdir_%s/fTest/Plots"%(swd__,opt.ext)) + if not os.path.isdir("%s/%s/fTest/Plots"%(opt.outdir,opt.ext)): os.system("mkdir -p %s/%s/fTest/Plots"%(opt.outdir,opt.ext)) # Load xvar to fit nominalWSFileName = glob.glob("%s/output*"%(opt.inputWSDir))[0] @@ -72,7 +73,10 @@ def get_options(): WSFileName = glob.glob("%s/output*M%s*%s.root"%(opt.inputWSDir,opt.mass,proc))[0] f = ROOT.TFile(WSFileName,"read") inputWS = f.Get(inputWSName__) - d = reduceDataset(inputWS.data("%s_%s_%s_%s"%(procToData(proc.split("_")[0]),opt.mass,sqrts__,opt.cat)),aset) + print WSFileName + print procToData(proc) + # print "===> In file ",WSFileName," look for roodataset = ","%s_%s_%s_%s"%(procToData(proc),opt.mass,sqrts__,opt.cat) + d = reduceDataset(inputWS.data("%s_%s_%s_%s"%(procToData(proc),opt.mass,sqrts__,opt.cat)),aset) df.loc[len(df)] = [proc,d.sumEntries(),1,1] inputWS.Delete() f.Close() @@ -89,7 +93,7 @@ def get_options(): WSFileName = glob.glob("%s/output*M%s*%s.root"%(opt.inputWSDir,opt.mass,proc))[0] f = ROOT.TFile(WSFileName,"read") inputWS = f.Get(inputWSName__) - d = reduceDataset(inputWS.data("%s_%s_%s_%s"%(procToData(proc.split("_")[0]),opt.mass,sqrts__,opt.cat)),aset) + d = reduceDataset(inputWS.data("%s_%s_%s_%s"%(procToData(proc),opt.mass,sqrts__,opt.cat)),aset) datasets_RV[opt.mass] = splitRVWV(d,aset,mode="RV") datasets_WV[opt.mass] = splitRVWV(d,aset,mode="WV") @@ -115,8 +119,10 @@ def get_options(): df.loc[df['proc']==proc,'nRV'] = nGauss_opt # Make plots if( opt.doPlots )&( len(ssfs.keys())!=0 ): - plotFTest(ssfs,_opt=nGauss_opt,_outdir="%s/outdir_%s/fTest/Plots"%(swd__,opt.ext),_extension="RV",_proc=proc,_cat=opt.cat,_mass=opt.mass) - plotFTestResults(ssfs,_opt=nGauss_opt,_outdir="%s/outdir_%s/fTest/Plots"%(swd__,opt.ext),_extension="RV",_proc=proc,_cat=opt.cat,_mass=opt.mass) + outdir="%s/%s/fTest/Plots"%(opt.outdir,opt.ext) + if os.path.exists("/afs/cern.ch"): os.system("cp /afs/cern.ch/user/g/gpetrucc/php/index.php "+outdir) + plotFTest(ssfs,_opt=nGauss_opt,_outdir=outdir,_extension="RV",_proc=proc,_cat=opt.cat,_mass=opt.mass) + plotFTestResults(ssfs,_opt=nGauss_opt,_outdir=outdir,_extension="RV",_proc=proc,_cat=opt.cat,_mass=opt.mass) # Run fTest: WV # If numEntries below threshold then keep as n = 1 @@ -140,15 +146,17 @@ def get_options(): df.loc[df['proc']==proc,'nWV'] = nGauss_opt # Make plots if( opt.doPlots )&( len(ssfs.keys())!=0 ): - plotFTest(ssfs,_opt=nGauss_opt,_outdir="%s/outdir_%s/fTest/Plots"%(swd__,opt.ext),_extension="WV",_proc=proc,_cat=opt.cat,_mass=opt.mass) - plotFTestResults(ssfs,_opt=nGauss_opt,_outdir="%s/outdir_%s/fTest/Plots"%(swd__,opt.ext),_extension="WV",_proc=proc,_cat=opt.cat,_mass=opt.mass) + outdir="%s/%s/fTest/Plots"%(opt.outdir,opt.ext) + if os.path.exists("/afs/cern.ch"): os.system("cp /afs/cern.ch/user/g/gpetrucc/php/index.php "+outdir) + plotFTest(ssfs,_opt=nGauss_opt,_outdir=outdir,_extension="WV",_proc=proc,_cat=opt.cat,_mass=opt.mass) + plotFTestResults(ssfs,_opt=nGauss_opt,_outdir=outdir,_extension="WV",_proc=proc,_cat=opt.cat,_mass=opt.mass) # Close ROOT file inputWS.Delete() f.Close() # Make output -if not os.path.isdir("%s/outdir_%s/fTest/json"%(swd__,opt.ext)): os.system("mkdir %s/outdir_%s/fTest/json"%(swd__,opt.ext)) +if not os.path.isdir("%s/outdir_%s/fTest/json"%(swd__,opt.ext)): os.system("mkdir -p %s/outdir_%s/fTest/json"%(swd__,opt.ext)) ff = open("%s/outdir_%s/fTest/json/nGauss_%s.json"%(swd__,opt.ext,opt.cat),"w") ff.write("{\n") # Iterate over rows in dataframe: sorted by sumEntries diff --git a/Signal/scripts/getEffAcc.py b/Signal/scripts/getEffAcc.py index 9c9f8862..6cccaeaa 100644 --- a/Signal/scripts/getEffAcc.py +++ b/Signal/scripts/getEffAcc.py @@ -25,6 +25,7 @@ def get_options(): parser.add_option('--inputWSDir', dest='inputWSDir', default='', help='Input WS directory') parser.add_option('--ext', default='test', help='Extension (to define analysis)') parser.add_option('--procs', dest='procs', default='', help='Signal processes') + parser.add_option('--year', dest='year', default='', help='year') parser.add_option('--massPoints', dest='massPoints', default='120,125,130', help='MH') parser.add_option('--skipCOWCorr', dest='skipCOWCorr', default=False, action="store_true", help="Skip centralObjectWeight correction for events in acceptance") parser.add_option('--doSTXSFractions', dest='doSTXSFractions', default=False, action="store_true", help="Fractional cross sections in each STXS bin (per stage0 process)") @@ -35,10 +36,10 @@ def get_options(): WSFileNames = extractWSFileNames(opt.inputWSDir) if not WSFileNames: leave() allCats = extractListOfCats(WSFileNames) -if containsNOTAG(WSFileNames): allCats += ",NOTAG" -else: - print " --> [ERROR] getEffAcc.py requires NOTAG dataset. Must use standard weights method in signalFit.py" - leave() +#if containsNOTAG(WSFileNames): allCats += ",NOTAG" +#else: + # print " --> [ERROR] getEffAcc.py requires NOTAG dataset. Must use standard weights method in signalFit.py" + #leave() # Define dataframe to store yields: cow = centralObjectWeight if opt.skipCOWCorr: columns_data = ['massPoint','proc','cat','granular_key','nominal_yield'] @@ -52,16 +53,21 @@ def get_options(): for _proc in opt.procs.split(","): print " * proc = %s"%_proc # Find corresponding file + if ("ALT" in _proc) and (_mp != '125'): continue + print("%s/output*M%s*%s.root"%(opt.inputWSDir,_mp,_proc)) _WSFileName = glob.glob("%s/output*M%s*%s.root"%(opt.inputWSDir,_mp,_proc))[0] + f = ROOT.TFile(_WSFileName,'read') inputWS = f.Get(inputWSName__) # Loop over categories for _cat in allCats.split(","): - nominalDataName = "%s_%s_%s_%s"%(procToData(_proc.split("_")[0]),_mp,sqrts__,_cat) + nominalDataName = "%s_%s_%s_%s"%(procToData(_proc),_mp,sqrts__,_cat) _granular_key = "%s__%s"%(_proc,_cat) + #print(nominalDataName) nominalData = inputWS.data(nominalDataName) _nominal_yield = nominalData.sumEntries() + # Central Object Weight corrections (for events in acceptance) if not opt.skipCOWCorr: # Loop over events and sum w/ centralObjectWeight @@ -95,14 +101,23 @@ def get_options(): for ir,r in df.iterrows(): if r['cat'] == "NOTAG": continue if opt.skipCOWCorr: proc_yield = df[df['proc']==r['proc']].nominal_yield.sum() - else: proc_yield = df[df['proc']==r['proc']].nominal_yield_COWCorr.sum() - ea = r['nominal_yield']/proc_yield + else: + + proc_yield = df[df['proc']==r['proc']].nominal_yield_COWCorr.sum() + + env = os.environ['CMSSW_BASE'] + + with open(env+'/src/flashggFinalFit/Trees2WS/NOTAG_entries.json', 'r') as file: + p_yield_NOTAG = json.load(file) + #ea = r['nominal_yield']/proc_yield + ea = r['nominal_yield']/(proc_yield+p_yield_NOTAG[r['proc']+'_'+_mp+'_'+opt.year]) if ea < 0.: ea = 0. effAcc[r['granular_key']] = ea # Write to file if opt.skipCOWCorr: outfileName = "%s/outdir_%s/getEffAcc/json/effAcc_M%s_%s_skipCOWCorr.json"%(swd__,opt.ext,_mp,opt.ext) else: outfileName = "%s/outdir_%s/getEffAcc/json/effAcc_M%s_%s.json"%(swd__,opt.ext,_mp,opt.ext) + print(outfileName) with open(outfileName,'w') as jsonfile: json.dump(effAcc,jsonfile) # Calculate fractional cross section of each STXS bin (in terms of stage0 bin) for normalisation: output in txt file diff --git a/Signal/scripts/packageSignal.py b/Signal/scripts/packageSignal.py index d836fcd9..8f75e581 100644 --- a/Signal/scripts/packageSignal.py +++ b/Signal/scripts/packageSignal.py @@ -6,6 +6,8 @@ import ROOT from optparse import OptionParser +MHNominal = '125' + def get_options(): parser = OptionParser() parser.add_option("--cat", dest='cat', default='RECO_0J_PTH_0_10_Tag0', help="RECO category to package") @@ -27,7 +29,6 @@ def rooiter(x): # Extract all files to be merged fNames = {} for ext in opt.exts.split(","): fNames[ext] = glob.glob("outdir_%s/signalFit/output/CMS-HGG_sigfit_%s_*_%s.root"%(ext,ext,opt.cat)) - # Define ouput packaged workspace print " --> Packaging output workspaces" packagedWS = ROOT.RooWorkspace("wsig_13TeV","wsig_13TeV") @@ -37,12 +38,13 @@ def rooiter(x): data_merged = {} data_merged_names = [] for mp in opt.massPoints.split(","): - data_merged["m%s"%mp] = ROOT.TFile(fNames[opt.exts.split(",")[0]][0]).Get("wsig_13TeV").data("sig_mass_m%s_%s"%(mp,opt.cat)).emptyClone("sig_mass_m%s_%s"%(mp,opt.cat)) + data_merged["m%s"%mp] = ROOT.TFile(fNames[opt.exts.split(",")[0]][0]).Get("wsig_13TeV").data("sig_mass_m%s_%s"%(MHNominal,opt.cat)).emptyClone("sig_mass_m%s_%s"%(mp,opt.cat)) data_merged_names.append( data_merged["m%s"%mp].GetName() ) for ext, fNames_by_ext in fNames.iteritems(): for fName in fNames_by_ext: for mp in opt.massPoints.split(","): + if 'ALT' in fName and mp!=MHNominal: continue d = ROOT.TFile(fName).Get("wsig_13TeV").data("sig_mass_m%s_%s"%(mp,opt.cat)) for i in range(d.numEntries()): p = d.get(i) diff --git a/Signal/scripts/renameTrees.py b/Signal/scripts/renameTrees.py new file mode 100644 index 00000000..b91a9fc8 --- /dev/null +++ b/Signal/scripts/renameTrees.py @@ -0,0 +1,42 @@ +# script to append a simple name of the process to the ROOT files from flashgg + +import os, sys, re +from optparse import OptionParser +import glob + +def get_options(): + parser = OptionParser() + parser.add_option('--inputWSDir', dest='inputWSDir', default='', help='Input WS directory') + parser.add_option('--dryRun', dest='dryRun', action='store_true', default=False, help='Just print the commands, do not rename files') + return parser.parse_args() +(opt,args) = get_options() + +cmds = [] + +procmap = {"GluGluH":"GG2H", "VBFHToGG":"VBF", "VBFHiggs0": "VBF_ALT", "ttHJet":"TTH", "ttHiggs0":"TTH_ALT", "ZH_HToGG":"ZH", "ZHiggs0":"ZH_ALT", "WminusH":"WH_WM", "WplusH": "WH_WP", "WHiggs0":"WH_ALT"} + +for fname in glob.glob("%s/output*M*.root"%opt.inputWSDir): + #print "changing name to ",fname + basename = fname.split(".root")[0] + newbasename = basename.replace("-pythia8","_pythia8").replace("_M-","_M") + p = re.compile("\S+Higgs(\S+)ToGG\S+") + m = p.match(newbasename) + altmodel = m.group(1) if m else "" + #print newbasename, " altmodel = ",altmodel + proc = "" + for prefix,suffix in procmap.iteritems(): + if prefix in fname: + proc = suffix + break + newname = "%s_%s%s.root" % (newbasename,proc,altmodel) + cmds.append("mv -i %s %s" % (fname,newname)) + if opt.dryRun: + print cmds[-1] + +if not opt.dryRun: + for c in cmds: + os.system(c) + + + + diff --git a/Signal/scripts/signalFit.py b/Signal/scripts/signalFit.py index eda2a095..d4e05c52 100644 --- a/Signal/scripts/signalFit.py +++ b/Signal/scripts/signalFit.py @@ -31,6 +31,7 @@ def get_options(): parser = OptionParser() parser.add_option("--xvar", dest='xvar', default='CMS_hgg_mass', help="Observable to fit") parser.add_option("--inputWSDir", dest='inputWSDir', default='', help="Input flashgg WS directory") + parser.add_option("--outdir", dest='outdir', default=swd__, help="Output directory (default is the current one)") parser.add_option("--ext", dest='ext', default='', help="Extension") parser.add_option("--proc", dest='proc', default='', help="Signal process") parser.add_option("--cat", dest='cat', default='', help="RECO category") @@ -53,7 +54,7 @@ def get_options(): parser.add_option("--scalesGlobal", dest='scalesGlobal', default='', help='Photon shape systematics: scalesGlobal') parser.add_option("--smears", dest='smears', default='', help='Photon shape systematics: smears') # Parameter values - parser.add_option('--replacementThreshold', dest='replacementThreshold', default=100, type='int', help="Nevent threshold to trigger replacement dataset") + parser.add_option('--replacementThreshold', dest='replacementThreshold', default=45, type='int', help="Nevent threshold to trigger replacement dataset") parser.add_option('--beamspotWidthData', dest='beamspotWidthData', default=3.4, type='float', help="Width of beamspot in data [cm]") parser.add_option('--beamspotWidthMC', dest='beamspotWidthMC', default=5.14, type='float', help="Width of beamspot in MC [cm]") parser.add_option('--MHPolyOrder', dest='MHPolyOrder', default=1, type='int', help="Order of polynomial for MH dependence") @@ -61,15 +62,16 @@ def get_options(): # Minimizer options parser.add_option('--minimizerMethod', dest='minimizerMethod', default='TNC', help="(Scipy) Minimizer method") parser.add_option('--minimizerTolerance', dest='minimizerTolerance', default=1e-8, type='float', help="(Scipy) Minimizer toleranve") + parser.add_option('--doYield', dest='doYield', default=False, action="store_true", help="Produce Signal FittingYield txt files") return parser.parse_args() (opt,args) = get_options() ROOT.gStyle.SetOptStat(0) ROOT.gROOT.SetBatch(True) +print opt.massPoints # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # SETUP: signal fit -print " --> Running fit for (proc,cat) = (%s,%s)"%(opt.proc,opt.cat) if( len(opt.massPoints.split(",")) == 1 )&( opt.MHPolyOrder > 0 ): print " --> [WARNING] Attempting to fit polynomials of O(MH^%g) for single mass point. Setting order to 0"%opt.MHPolyOrder opt.MHPolyOrder=0 @@ -89,7 +91,7 @@ def get_options(): else: xsbrMap = globalXSBRMap[opt.analysis] # Load RooRealVars -nominalWSFileName = glob.glob("%s/output*M%s*%s.root"%(opt.inputWSDir,MHNominal,opt.proc))[0] +nominalWSFileName = glob.glob("%s/output*%s*.root"%(opt.inputWSDir,MHNominal))[0] f0 = ROOT.TFile(nominalWSFileName,"read") inputWS0 = f0.Get(inputWSName__) xvar = inputWS0.var(opt.xvar) @@ -105,10 +107,10 @@ def get_options(): if opt.skipZeroes: # Extract nominal mass dataset and see if entries == 0 - WSFileName = glob.glob("%s/output*M%s*%s.root"%(opt.inputWSDir,MHNominal,opt.proc))[0] + WSFileName = glob.glob("%s/output*%s*.root"%(opt.inputWSDir,MHNominal))[0] f = ROOT.TFile(WSFileName,"read") inputWS = f.Get(inputWSName__) - d = reduceDataset(inputWS.data("%s_%s_%s_%s"%(procToData(opt.proc.split("_")[0]),MHNominal,sqrts__,opt.cat)),aset) + d = reduceDataset(inputWS.data("%s_%s_%s_%s"%(procToData(opt.proc),MHNominal,sqrts__,opt.cat)),aset) if( d.numEntries() == 0. )|( d.sumEntries <= 0. ): print " --> (%s,%s) has zero events. Will not construct signal model"%(opt.proc,opt.cat) exit() @@ -151,10 +153,12 @@ def get_options(): # For RV (or if skipping vertex scenario split) datasetRVForFit = od() for mp in opt.massPoints.split(","): - WSFileName = glob.glob("%s/output*M%s*%s.root"%(opt.inputWSDir,mp,procRVFit))[0] + if 'ALT' in procRVFit and mp!=MHNominal: continue + print "##########",glob.glob("%s/output*%s*%s.root"%(opt.inputWSDir,mp,procRVFit)) + WSFileName = glob.glob("%s/output*%s*%s.root"%(opt.inputWSDir,mp,procRVFit))[0] f = ROOT.TFile(WSFileName,"read") inputWS = f.Get(inputWSName__) - d = reduceDataset(inputWS.data("%s_%s_%s_%s"%(procToData(procRVFit.split("_")[0]),mp,sqrts__,catRVFit)),aset) + d = reduceDataset(inputWS.data("%s_%s_%s_%s"%(procToData(procRVFit),mp,sqrts__,catRVFit)),aset) nominalDatasets[mp] = d.Clone() if opt.skipVertexScenarioSplit: datasetRVForFit[mp] = d else: datasetRVForFit[mp] = splitRVWV(d,aset,mode="RV") @@ -166,10 +170,12 @@ def get_options(): nominal_numEntries = datasetRVForFit[MHNominal].numEntries() procReplacementFit, catReplacementFit = rMap['procRVMap'][opt.cat], rMap['catRVMap'][opt.cat] for mp in opt.massPoints.split(","): - WSFileName = glob.glob("%s/output*M%s*%s.root"%(opt.inputWSDir,mp,procReplacementFit))[0] + if 'ALT' in procRVFit and mp!=MHNominal: continue + print("%s/output*%s*%s.root"%(opt.inputWSDir,mp,procReplacementFit)) + WSFileName = glob.glob("%s/output*%s*%s.root"%(opt.inputWSDir,mp,procReplacementFit))[0] f = ROOT.TFile(WSFileName,"read") inputWS = f.Get(inputWSName__) - d = reduceDataset(inputWS.data("%s_%s_%s_%s"%(procToData(procReplacementFit.split("_")[0]),mp,sqrts__,catReplacementFit)),aset) + d = reduceDataset(inputWS.data("%s_%s_%s_%s"%(procToData(procReplacementFit),mp,sqrts__,catReplacementFit)),aset) if opt.skipVertexScenarioSplit: datasetRVForFit[mp] = d else: datasetRVForFit[mp] = splitRVWV(d,aset,mode="RV") inputWS.Delete() @@ -185,30 +191,35 @@ def get_options(): if opt.skipVertexScenarioSplit: print " --> Too few entries in nominal dataset (%g < %g). Using replacement (proc,cat) = (%s,%s) for extracting shape"%(nominal_numEntries,opt.replacementThreshold,procRVFit,catRVFit) for mp in opt.massPoints.split(","): + if 'ALT' in procRVFit and mp!=MHNominal: continue print " * MH = %s GeV: numEntries = %g, sumEntries = %.6f"%(mp,datasetRVForFit[mp].numEntries(),datasetRVForFit[mp].sumEntries()) else: print " --> RV: Too few entries in nominal dataset (%g < %g). Using replacement (proc,cat) = (%s,%s) for extracting shape"%(nominal_numEntries,opt.replacementThreshold,procRVFit,catRVFit) for mp in opt.massPoints.split(","): + if 'ALT' in procRVFit and mp!=MHNominal: continue print " * MH = %s: numEntries = %g, sumEntries = %.6f"%(mp,datasetRVForFit[mp].numEntries(),datasetRVForFit[mp].sumEntries()) else: if opt.skipVertexScenarioSplit: print " --> Using (proc,cat) = (%s,%s) for extracting shape"%(procRVFit,catRVFit) for mp in opt.massPoints.split(","): + if 'ALT' in procRVFit and mp!=MHNominal: continue print " * MH = %s: numEntries = %g, sumEntries = %.6f"%(mp,datasetRVForFit[mp].numEntries(),datasetRVForFit[mp].sumEntries()) else: print " --> RV: Using (proc,cat) = (%s,%s) for extracting shape"%(procRVFit,catRVFit) for mp in opt.massPoints.split(","): + if 'ALT' in procRVFit and mp!=MHNominal: continue print " * MH = %s: numEntries = %g, sumEntries = %.6f"%(mp,datasetRVForFit[mp].numEntries(),datasetRVForFit[mp].sumEntries()) # Repeat for WV scenario if not opt.skipVertexScenarioSplit: datasetWVForFit = od() for mp in opt.massPoints.split(","): - WSFileName = glob.glob("%s/output*M%s*%s.root"%(opt.inputWSDir,mp,procWVFit))[0] + if 'ALT' in procWVFit and mp!=MHNominal: continue + WSFileName = glob.glob("%s/output*%s*%s.root"%(opt.inputWSDir,mp,procWVFit))[0] f = ROOT.TFile(WSFileName,"read") inputWS = f.Get(inputWSName__) - d = reduceDataset(inputWS.data("%s_%s_%s_%s"%(procToData(procWVFit.split("_")[0]),mp,sqrts__,catWVFit)),aset) + d = reduceDataset(inputWS.data("%s_%s_%s_%s"%(procToData(procWVFit),mp,sqrts__,catWVFit)),aset) datasetWVForFit[mp] = splitRVWV(d,aset,mode="WV") inputWS.Delete() f.Close() @@ -216,27 +227,34 @@ def get_options(): # Check nominal mass dataset if( datasetWVForFit[MHNominal].numEntries() < opt.replacementThreshold )|( datasetWVForFit[MHNominal].sumEntries() < 0. ): nominal_numEntries = datasetWVForFit[MHNominal].numEntries() + procReplacementFit, catReplacementFit = rMap['procWV'], rMap['catWV'] for mp in opt.massPoints.split(","): - WSFileName = glob.glob("%s/output*M%s*%s.root"%(opt.inputWSDir,mp,procReplacementFit))[0] + if 'ALT' in procReplacementFit and mp!=MHNominal: continue + WSFileName = glob.glob("%s/output*%s*%s.root"%(opt.inputWSDir,mp,procReplacementFit))[0] f = ROOT.TFile(WSFileName,"read") inputWS = f.Get(inputWSName__) - d = reduceDataset(inputWS.data("%s_%s_%s_%s"%(procToData(procReplacementFit.split("_")[0]),mp,sqrts__,catReplacementFit)),aset) + print WSFileName, inputWS + d = reduceDataset(inputWS.data("%s_%s_%s_%s"%(procToData(procReplacementFit),mp,sqrts__,catReplacementFit)),aset) datasetWVForFit[mp] = splitRVWV(d,aset,mode="WV") inputWS.Delete() f.Close() # Check if replacement dataset has too few entries: if so throw error if( datasetWVForFit[MHNominal].numEntries() < opt.replacementThreshold )|( datasetWVForFit[MHNominal].sumEntries() < 0. ): print " --> [ERROR] replacement dataset (%s,%s) has too few entries (%g < %g)"%(procReplacementFit,catReplacementFit,datasetWVForFit[MHNominal].numEntries,opt.replacementThreshold) + + sys.exit(1) else: procWVFit, catWVFit = procReplacementFit, catReplacementFit print " --> WV: Too few entries in nominal dataset (%g < %g). Using replacement (proc,cat) = (%s,%s) for extracting shape"%(nominal_numEntries,opt.replacementThreshold,procWVFit,catWVFit) for mp in opt.massPoints.split(","): + if 'ALT' in procWVFit and mp!=MHNominal: continue print " * MH = %s: numEntries = %g, sumEntries = %.6f"%(mp,datasetWVForFit[mp].numEntries(),datasetWVForFit[mp].sumEntries()) else: print " --> WV: Using (proc,cat) = (%s,%s) for extracting shape"%(procWVFit,catRVFit) for mp in opt.massPoints.split(","): + if 'ALT' in procWVFit and mp!=MHNominal: continue print " * MH = %s: numEntries = %g, sumEntries = %.6f"%(mp,datasetWVForFit[mp].numEntries(),datasetWVForFit[mp].sumEntries()) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -244,15 +262,19 @@ def get_options(): if not opt.skipBeamspotReweigh: # Datasets for fit for mp,d in datasetRVForFit.iteritems(): + if 'ALT' in d and mp!=MHNominal: continue drw = beamspotReweigh(datasetRVForFit[mp],opt.beamspotWidthData,opt.beamspotWidthMC,xvar,dZ,_x=opt.xvar) datasetRVForFit[mp] = drw if not opt.skipVertexScenarioSplit: for mp,d in datasetWVForFit.iteritems(): + if mp!=MHNominal: continue drw = beamspotReweigh(datasetWVForFit[mp],opt.beamspotWidthData,opt.beamspotWidthMC,xvar,dZ,_x=opt.xvar) datasetWVForFit[mp] = drw - print " --> Beamspot reweigh: RV(sumEntries) = %.6f, WV(sumEntries) = %.6f"%(datasetRVForFit[mp].sumEntries(),datasetWVForFit[mp].sumEntries()) + print " --> Beamspot reweigh: RV(sumEntries) = %.6f, WV(sumEntries) = %.6f"%(datasetRVForFit[mp].sumEntries(),datasetWVForFit[mp].sumEntries()) else: - print " --> Beamspot reweigh: sumEntries = %.6f"%datasetRVForFit[mp].sumEntries() + for mp,d in datasetRVForFit.iteritems(): + if mp!=MHNominal: continue + print " --> Beamspot reweigh: sumEntries = %.6f"%datasetRVForFit[mp].sumEntries() # Nominal datasets for saving to output Workspace: preserve norm for eff * acc calculation for mp,d in nominalDatasets.iteritems(): @@ -263,6 +285,7 @@ def get_options(): # If using nGaussian fit then extract nGaussians from fTest json file if not opt.useDCB: with open("%s/outdir_%s/fTest/json/nGauss_%s.json"%(swd__,opt.ext,catRVFit)) as jf: ngauss = json.load(jf) + print "%s/outdir_%s/fTest/json/nGauss_%s.json"%(swd__,opt.ext,catRVFit) nRV = int(ngauss["%s__%s"%(procRVFit,catRVFit)]['nRV']) if opt.skipVertexScenarioSplit: print " --> Fitting function: convolution of nGaussians (%g)"%nRV else: @@ -311,17 +334,43 @@ def get_options(): fm.save(outWS) outWS.Write() fout.Close() + # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # PLOTTING -if opt.doPlots: +if opt.doPlots: print "\n --> Making plots..." - if not os.path.isdir("%s/outdir_%s/signalFit/Plots"%(swd__,opt.ext)): os.system("mkdir %s/outdir_%s/signalFit/Plots"%(swd__,opt.ext)) + outdir="%s/%s/signalFit/Plots"%(opt.outdir,opt.ext) + if not os.path.isdir(outdir): os.system("mkdir -p %s"%outdir) + if os.path.exists("/afs/cern.ch"): os.system("cp /afs/cern.ch/user/g/gpetrucc/php/index.php "+outdir) if opt.skipVertexScenarioSplit: - plotPdfComponents(ssfRV,_outdir="%s/outdir_%s/signalFit/Plots"%(swd__,opt.ext),_extension="total_",_proc=procRVFit,_cat=catRVFit) + plotPdfComponents(ssfRV,_outdir=outdir,_extension="total_",_proc=procRVFit,_cat=catRVFit) if not opt.skipVertexScenarioSplit: - plotPdfComponents(ssfRV,_outdir="%s/outdir_%s/signalFit/Plots"%(swd__,opt.ext),_extension="RV_",_proc=procRVFit,_cat=catRVFit) - plotPdfComponents(ssfWV,_outdir="%s/outdir_%s/signalFit/Plots"%(swd__,opt.ext),_extension="WV_",_proc=procWVFit,_cat=catRVFit) + plotPdfComponents(ssfRV,_outdir=outdir,_extension="RV_",_proc=procRVFit,_cat=catRVFit) + plotPdfComponents(ssfWV,_outdir=outdir,_extension="WV_",_proc=procWVFit,_cat=catRVFit) # Plot interpolation - plotInterpolation(fm,_outdir="%s/outdir_%s/signalFit/Plots"%(swd__,opt.ext)) - plotSplines(fm,_outdir="%s/outdir_%s/signalFit/Plots"%(swd__,opt.ext),_nominalMass=MHNominal) + + plotInterpolation(fm,_outdir=outdir) + plotSplines(fm,_outdir=outdir,_nominalMass=MHNominal) + + +if opt.doYield: + print "\n --> Making Yield..." + outdir="%s/%s/signalFit/Yield"%(opt.outdir,opt.ext) + if not os.path.isdir(outdir): os.system("mkdir -p %s"%outdir) + if os.path.exists("/afs/cern.ch"): os.system("cp /afs/cern.ch/user/g/gpetrucc/php/index.php "+outdir) + # Plot interpolation + fm.Functions['final_normThisLumi'].getVal() + print("Creating Yield File: %s/Yield_%s_%s"%(outdir,opt.proc, opt.cat)) + with open("%s/Yield_%s_%s"%(outdir,opt.proc, opt.cat), 'w') as file: + # Write header line + file.write("proc cat yield entries\n") + + # Format the output string and write it + fm.MH.setVal(125) + + fm.intLumi.setVal(lumiScaleFactor*float(lumiMap[fm.year])) + + + + file.write("%s %s %f %d \n" % (opt.proc, opt.cat, fm.Functions['final_normThisLumi'].getVal(), fm.datasets['125'].numEntries())) \ No newline at end of file diff --git a/Signal/tools/XSBRMap.py b/Signal/tools/XSBRMap.py index 548a93aa..3593ad1d 100644 --- a/Signal/tools/XSBRMap.py +++ b/Signal/tools/XSBRMap.py @@ -143,3 +143,48 @@ globalXSBRMap['STXS']['THQ'] = {'mode':'tHq','factor':0.9721} globalXSBRMap['STXS']['THW_FWDH'] = {'mode':'tHW','factor':0.0106} globalXSBRMap['STXS']['THW'] = {'mode':'tHW','factor':0.9894} + + + +# AC analysis: add factor for category composition +globalXSBRMap['AC'] = od() +globalXSBRMap['AC']['decay'] = {'mode':'hgg'} +globalXSBRMap['AC']['GG2H'] = {'mode':'ggH'} +globalXSBRMap['AC']['VBF'] = {'mode':'qqH'} +globalXSBRMap['AC']['TTH'] = {'mode':'ttH'} +globalXSBRMap['AC']['WH'] = {'mode':'WH'} +globalXSBRMap['AC']['wh'] = {'mode':'WH'} +globalXSBRMap['AC']['QQ2HLNU'] = {'mode':'WH'} +globalXSBRMap['AC']['WMINUSH2HQQ'] = {'mode':'WH','factor':0.5} +globalXSBRMap['AC']['WPLUSH2HQQ'] = {'mode':'WH','factor':0.5} +globalXSBRMap['AC']['ZH'] = {'mode':'qqZH'} +globalXSBRMap['AC']['QQ2HLL'] = {'mode':'qqZH'} +# not sure the following for the ALT modes is correct. From the spline plot it seems OK: takes the MH=125 GeV value from JHU sample and then extrapolate with the relative ratio of qqH SM. In any case the fit for the fai profiles mu +globalXSBRMap['AC']['VBF_ALT_0PM'] = {'mode':'qqH'} +globalXSBRMap['AC']['VBF_ALT_0PH'] = {'mode':'qqH'} +globalXSBRMap['AC']['VBF_ALT_0PHf05'] = {'mode':'qqH'} +globalXSBRMap['AC']['VBF_ALT_0M'] = {'mode':'qqH'} +globalXSBRMap['AC']['VBF_ALT_0Mf05'] = {'mode':'qqH'} +globalXSBRMap['AC']['VBF_ALT_L1'] = {'mode':'qqH'} +globalXSBRMap['AC']['VBF_ALT_L1f05'] = {'mode':'qqH'} +globalXSBRMap['AC']['VBF_ALT_L1Zg'] = {'mode':'qqH'} +globalXSBRMap['AC']['VBF_ALT_L1Zgf05'] = {'mode':'qqH'} +globalXSBRMap['AC']['WH_ALT0L1f05ph0'] = {'mode':'WH'} +globalXSBRMap['AC']['WH_ALT0PHf05ph0'] = {'mode':'WH'} +globalXSBRMap['AC']['wh_ALT_0M'] = {'mode':'WH'} +globalXSBRMap['AC']['wh_ALT_L1'] = {'mode':'WH'} +globalXSBRMap['AC']['WH_ALT0PH'] = {'mode':'WH'} +globalXSBRMap['AC']['WH_ALT0PM'] = {'mode':'WH'} +globalXSBRMap['AC']['ZH_ALT0L1f05ph0'] = {'mode':'qqZH'} +globalXSBRMap['AC']['ZH_ALT0L1'] = {'mode':'qqZH'} +globalXSBRMap['AC']['ZH_ALT0L1Zgf05ph0'] = {'mode':'qqZH'} +globalXSBRMap['AC']['ZH_ALT0L1Zg'] = {'mode':'qqZH'} +globalXSBRMap['AC']['ZH_ALT0Mf05ph0'] = {'mode':'qqZH'} +globalXSBRMap['AC']['ZH_ALT0M'] = {'mode':'qqZH'} +globalXSBRMap['AC']['ZH_ALT0PHf05ph0'] = {'mode':'qqZH'} +globalXSBRMap['AC']['ZH_ALT0PH'] = {'mode':'qqZH'} +globalXSBRMap['AC']['ZH_ALT0PM'] = {'mode':'qqZH'} +globalXSBRMap['AC']['TTH_ALT0Mf05ph0'] = {'mode':'ttH'} +globalXSBRMap['AC']['TTH_ALT0M'] = {'mode':'ttH'} +globalXSBRMap['AC']['TTH_ALT0PM'] = {'mode':'ttH'} + diff --git a/Signal/tools/finalModel.py b/Signal/tools/finalModel.py index b9c33bba..3d1064b5 100644 --- a/Signal/tools/finalModel.py +++ b/Signal/tools/finalModel.py @@ -136,6 +136,9 @@ def __init__(self,_ssfMap,_proc,_cat,_ext,_year,_sqrts,_datasets,_xvar,_MH,_MHLo # Functions to get XS, BR and EA splines for given proc/decay from map def buildXSBRSplines(self): mh = np.linspace(120.,130.,101) + # the alternative models have only the nominal mass. Take the trends from the SM one + self.proc_full = self.proc + if "ALT" in self.proc: self.proc = self.proc.split("_")[0] # XS fp = self.xsbrMap[self.proc]['factor'] if 'factor' in self.xsbrMap[self.proc] else 1. mp = self.xsbrMap[self.proc]['mode'] @@ -154,17 +157,29 @@ def buildEffAccSpline(self): for mp in self.massPoints.split(","): mh.append(float(mp)) if self.doEffAccFromJson: - jfname = "%s/outdir_%s/getEffAcc/json/effAcc_M%s_%s.json"%(swd__,self.ext,mp,self.ext) + jfname = "%s/outdir_%s/getEffAcc/json/effAcc_M%s_%s_skipCOWCorr.json"%(swd__,self.ext,mp,self.ext) if not os.path.exists(jfname): print " --> [ERROR] effAcc json file (%s) does not exist for mass point = %s. Run getEffAcc first."%(jfname,mp) sys.exit(1) with open(jfname,'r') as jf: ea_data = json.load(jf) - ea.append(float(ea_data['%s__%s'%(self.proc,self.cat)])) + print(self.proc,self.cat) + #if ('ALT' in self.proc_full) and (mp != '125'): continue + print("%s/outdir_%s/getEffAcc/json/effAcc_M%s_%s_skipCOWCorr.json"%(swd__,self.ext,mp,self.ext)) + + + ea.append(float(ea_data['%s__%s'%(self.proc_full,self.cat)])) + print('json ea= ') + print(ea) else: sumw = self.datasets[mp].sumEntries() self.MH.setVal(float(mp)) + xs,br = self.Splines['xs'].getVal(), self.Splines['br'].getVal() + print(sumw) ea.append(sumw/(lumiScaleFactor*xs*br)) + print(lumiScaleFactor,xs,br,lumiScaleFactor*xs*br) + print('ea= ') + print(ea) # If single mass point then add MHLow and MHHigh dummy points for constant ea if len(ea) == 1: ea, mh = [ea[0],ea[0],ea[0]], [float(self.MHLow),mh[0],float(self.MHHigh)] # Convert to numpy arrays and make spline @@ -313,6 +328,7 @@ def buildPdf(self,ssf,ext='',useDCB=False,_recursive=True): self.buildMean('dm_g%g_%s'%(g,extStr),skipSystematics=self.skipSystematics) self.buildSigma('sigma_g%g_%s'%(g,extStr),skipSystematics=self.skipSystematics) # Build Gaussian + if self.doVoigtian: self.Pdfs['gaus_g%g_%s'%(g,extStr)] = ROOT.RooVoigtian("gaus_g%g_%s"%(g,extStr),"gaus_g%g_%s"%(g,extStr),self.xvar,self.Functions["mean_g%g_%s"%(g,extStr)],self.GammaH,self.Functions["sigma_g%g_%s"%(g,extStr)]) else: diff --git a/Signal/tools/plottingTools.py b/Signal/tools/plottingTools.py index 9af2ec94..4cb6fad0 100644 --- a/Signal/tools/plottingTools.py +++ b/Signal/tools/plottingTools.py @@ -90,6 +90,7 @@ def plotFTest(ssfs,_opt=1,_outdir='./',_extension='',_proc='',_cat='',_mass='125 # Extract data histogram hists['data'] = ssf.xvar.createHistogram("h_data%s"%_extension,ROOT.RooFit.Binning(ssf.nBins)) ssf.DataHists[_mass].fillHistogram(hists['data'],ROOT.RooArgList(ssf.xvar)) + hists['data'].Scale(1./hists['data'].Integral()) hists['data'].Scale(float(ssf.nBins)/1600) hists['data'].SetMarkerStyle(20) hists['data'].SetMarkerColor(1) @@ -307,7 +308,7 @@ def plotInterpolation(_finalModel,_outdir='./',_massPoints='120,121,122,123,124, dh[mp] = ROOT.RooDataHist("dh_%s"%mp,"dh_%s"%mp,ROOT.RooArgSet(_finalModel.xvar),_finalModel.Datasets[mp]) hists['data_%s'%mp] = _finalModel.xvar.createHistogram("h_data_%s"%mp,ROOT.RooFit.Binning(_finalModel.xvar.getBins())) dh[mp].fillHistogram(hists['data_%s'%mp],ROOT.RooArgList(_finalModel.xvar)) - if norm == 0.: hists['data_%s'%mp].Scale(0) + if norm == 0. or hists['data_%s'%mp].Integral()==0: hists['data_%s'%mp].Scale(0) else: hists['data_%s'%mp].Scale(norm/(hists['data_%s'%mp].Integral())) hists['data_%s'%mp].SetMarkerStyle(20) hists['data_%s'%mp].SetMarkerColor(colorMap[mp]) @@ -357,7 +358,7 @@ def plotSplines(_finalModel,_outdir="./",_nominalMass='125',splinesToPlot=['xs', xnom = od() _finalModel.MH.setVal(float(_nominalMass)) for sp in splinesToPlot: xnom[sp] = _finalModel.Splines[sp].getVal() - _finalModel.intLumi.setVal(float(lumiMap[_finalModel.year])) + _finalModel.intLumi.setVal(lumiScaleFactor*float(lumiMap[_finalModel.year])) xnom['norm'] = _finalModel.Functions['final_normThisLumi'].getVal() # Loop over mass points p = 0 @@ -424,7 +425,7 @@ def plotSplines(_finalModel,_outdir="./",_nominalMass='125',splinesToPlot=['xs', # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Function for plotting final signal model: neat def plotSignalModel(_hists,_opt,_outdir=".",offset=0.02): - colorMap = {'2016':38,'2017':30,'2018':46} + colorMap = {'2016':38,'2016preVFP': 38, '2016postVFP': 35,'2017':30,'2018':46} canv = ROOT.TCanvas("c","c",650,600) canv.SetBottomMargin(0.12) canv.SetLeftMargin(0.15) @@ -556,7 +557,7 @@ def plotSignalModel(_hists,_opt,_outdir=".",offset=0.02): if _opt.cats == 'all': catStr, catExt = "All categories", "all" elif _opt.cats == 'wall': catStr, catExt = "#splitline{All categories}{S/(S+B) weighted}", "wall" - elif len(_opt.cats.split(","))>1: procStr, procExt = "Multiple categories", "multipleCats" + elif len(_opt.cats.split(","))>1: catStr, catExt = "Multiple categories", "multipleCats" else: catStr, catExt = Translate(_opt.cats,translateCats), _opt.cats lat1.DrawLatex(0.85,0.86,"%s"%catStr) diff --git a/Signal/tools/replacementMap.py b/Signal/tools/replacementMap.py index d8a21832..7cd27f1d 100644 --- a/Signal/tools/replacementMap.py +++ b/Signal/tools/replacementMap.py @@ -113,11 +113,11 @@ globalReplacementMap["STXS"]["procRVMap"]["RECO_VH_MET_Tag0"] = "QQ2HLL_PTV_150_250_0J" globalReplacementMap["STXS"]["procRVMap"]["RECO_VH_MET_Tag1"] = "QQ2HLL_PTV_75_150" globalReplacementMap["STXS"]["procRVMap"]["RECO_VH_MET_Tag2"] = "QQ2HLL_PTV_75_150" -globalReplacementMap["STXS"]["procRVMap"]["RECO_WH_LEP_PTV_0_75_Tag0"] = "QQ2HLNU_PTV_0_75" -globalReplacementMap["STXS"]["procRVMap"]["RECO_WH_LEP_PTV_0_75_Tag1"] = "QQ2HLNU_PTV_0_75" -globalReplacementMap["STXS"]["procRVMap"]["RECO_WH_LEP_PTV_75_150_Tag0"] = "QQ2HLNU_PTV_75_150" -globalReplacementMap["STXS"]["procRVMap"]["RECO_WH_LEP_PTV_75_150_Tag1"] = "QQ2HLNU_PTV_75_150" -globalReplacementMap["STXS"]["procRVMap"]["RECO_WH_LEP_PTV_GT150_Tag0"] = "QQ2HLNU_PTV_150_250_0J" +globalReplacementMap["STXS"]["procRVMap"]["RECO_WH_LEP_PTV_0_75_Tag0"] = "WMINUSH2HQQ_PTV_0_75" +globalReplacementMap["STXS"]["procRVMap"]["RECO_WH_LEP_PTV_0_75_Tag1"] = "WMINUSH2HQQ_PTV_0_75" +globalReplacementMap["STXS"]["procRVMap"]["RECO_WH_LEP_PTV_75_150_Tag0"] = "WMINUSH2HQQ_PTV_75_150" +globalReplacementMap["STXS"]["procRVMap"]["RECO_WH_LEP_PTV_75_150_Tag1"] = "WMINUSH2HQQ_PTV_75_150" +globalReplacementMap["STXS"]["procRVMap"]["RECO_WH_LEP_PTV_GT150_Tag0"] = "WMINUSH2HQQ_PTV_150_250_0J" globalReplacementMap["STXS"]["procRVMap"]["RECO_ZH_LEP_Tag0"] = "QQ2HLL_PTV_0_75" globalReplacementMap["STXS"]["procRVMap"]["RECO_ZH_LEP_Tag1"] = "QQ2HLL_PTV_0_75" # Replacement category for RV fit @@ -206,3 +206,181 @@ globalReplacementMap["STXS"]["catRVMap"]["RECO_WH_LEP_PTV_GT150_Tag0"] = "RECO_WH_LEP_PTV_GT150_Tag0" globalReplacementMap["STXS"]["catRVMap"]["RECO_ZH_LEP_Tag0"] = "RECO_ZH_LEP_Tag0" globalReplacementMap["STXS"]["catRVMap"]["RECO_ZH_LEP_Tag1"] = "RECO_ZH_LEP_Tag1" + + + + +# AC analysis +globalReplacementMap['AC'] = od() +# For WRONG VERTEX SCENARIO: +# * single proc x cat for wrong vertex since for dZ > 1cm shape independent of proc x cat +# * use proc x cat with highest number of WV events +globalReplacementMap['AC']['procWV'] = "GG2H" +globalReplacementMap['AC']['catWV'] = "RECO_0J_PTH_GT10_Tag1" +# For RIGHT VERTEX SCENARIO: +# * default mapping is to use diagonal process from given category +# * if few events in diagonal process then may need to change the category aswell (see catRVMap) +# * map must contain entry for all cats being processed (for replacement proc and cat) +globalReplacementMap['AC']['procRVMap'] = od() +globalReplacementMap["AC"]["procRVMap"]["RECO_0J_PTH_0_10_Tag0"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_0J_PTH_0_10_Tag1"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_0J_PTH_0_10_Tag2"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_0J_PTH_GT10_Tag0"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_0J_PTH_GT10_Tag1"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_0J_PTH_GT10_Tag2"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_1J_PTH_0_60_Tag0"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_1J_PTH_0_60_Tag1"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_1J_PTH_0_60_Tag2"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_1J_PTH_120_200_Tag0"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_1J_PTH_120_200_Tag1"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_1J_PTH_120_200_Tag2"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_1J_PTH_60_120_Tag0"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_1J_PTH_60_120_Tag1"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_1J_PTH_60_120_Tag2"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_GE2J_PTH_0_60_Tag0"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_GE2J_PTH_0_60_Tag1"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_GE2J_PTH_0_60_Tag2"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_GE2J_PTH_120_200_Tag0"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_GE2J_PTH_120_200_Tag1"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_GE2J_PTH_120_200_Tag2"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_GE2J_PTH_60_120_Tag0"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_GE2J_PTH_60_120_Tag1"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_GE2J_PTH_60_120_Tag2"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_PTH_200_300_Tag0"] = "VBF" +globalReplacementMap["AC"]["procRVMap"]["RECO_PTH_200_300_Tag1"] = "VBF" +globalReplacementMap["AC"]["procRVMap"]["RECO_PTH_300_450_Tag0"] = "VBF" +globalReplacementMap["AC"]["procRVMap"]["RECO_PTH_300_450_Tag1"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_PTH_450_650_Tag0"] = "VBF" +globalReplacementMap["AC"]["procRVMap"]["RECO_PTH_GT650_Tag0"] = "VBF" +globalReplacementMap["AC"]["procRVMap"]["RECO_THQ_LEP"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_0_60_Tag0"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_0_60_Tag1"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_0_60_Tag2"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_120_200_Tag0"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_120_200_Tag1"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_120_200_Tag2"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_120_200_Tag3"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_200_300_Tag0"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_200_300_Tag1"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_200_300_Tag2"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_60_120_Tag0"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_60_120_Tag1"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_60_120_Tag2"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_GT300_Tag0"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_HAD_PTH_GT300_Tag1"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_LEP_PTH_0_60_Tag0"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_LEP_PTH_0_60_Tag1"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_LEP_PTH_0_60_Tag2"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_LEP_PTH_120_200_Tag0"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_LEP_PTH_120_200_Tag1"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_LEP_PTH_200_300_Tag0"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_LEP_PTH_60_120_Tag0"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_LEP_PTH_60_120_Tag1"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_LEP_PTH_60_120_Tag2"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_TTH_LEP_PTH_GT300_Tag0"] = "TTH" +globalReplacementMap["AC"]["procRVMap"]["RECO_VBFLIKEGGH_Tag0"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_VBFLIKEGGH_Tag1"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_VBFTOPO_ACGGH_Tag0"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_VBFTOPO_ACGGH_Tag1"] = "GG2H" +globalReplacementMap["AC"]["procRVMap"]["RECO_VBFTOPO_ACVBFBSM_Tag0"] = "VBF" +globalReplacementMap["AC"]["procRVMap"]["RECO_VBFTOPO_ACVBFBSM_Tag1"] = "VBF" +globalReplacementMap["AC"]["procRVMap"]["RECO_VBFTOPO_ACVBFSM_Tag0"] = "VBF" +globalReplacementMap["AC"]["procRVMap"]["RECO_VBFTOPO_ACVHHADBSM_Tag0"] = "VBF" +globalReplacementMap["AC"]["procRVMap"]["RECO_VBFTOPO_ACVHHADBSM_Tag1"] = "VBF" +globalReplacementMap["AC"]["procRVMap"]["RECO_VBFTOPO_ACVHHADSM_Tag0"] = "VBF" +globalReplacementMap["AC"]["procRVMap"]["RECO_VBFTOPO_ACVHHADSM_Tag1"] = "VBF" +globalReplacementMap["AC"]["procRVMap"]["RECO_VBFTOPO_ACVHHADSM_Tag2"] = "VBF" +globalReplacementMap["AC"]["procRVMap"]["RECO_VH_MET_Tag0"] = "WMINUSH2HQQ" +globalReplacementMap["AC"]["procRVMap"]["RECO_VH_MET_Tag1"] = "WMINUSH2HQQ" +globalReplacementMap["AC"]["procRVMap"]["RECO_VH_MET_Tag2"] = "WMINUSH2HQQ" +globalReplacementMap["AC"]["procRVMap"]["RECO_VH_MET_Tag3"] = "WMINUSH2HQQ" +globalReplacementMap["AC"]["procRVMap"]["RECO_VH_MET_Tag4"] = "WMINUSH2HQQ" +globalReplacementMap["AC"]["procRVMap"]["RECO_WH_LEP_Tag0"] = "WMINUSH2HQQ" +globalReplacementMap["AC"]["procRVMap"]["RECO_WH_LEP_Tag1"] = "WMINUSH2HQQ" +globalReplacementMap["AC"]["procRVMap"]["RECO_WH_LEP_Tag2"] = "WMINUSH2HQQ" +globalReplacementMap["AC"]["procRVMap"]["RECO_WH_LEP_Tag3"] = "WMINUSH2HQQ" +globalReplacementMap["AC"]["procRVMap"]["RECO_ZH_LEP_Tag0"] = "QQ2HLL" +globalReplacementMap["AC"]["procRVMap"]["RECO_ZH_LEP_Tag1"] = "QQ2HLL" + + +# Replacement category for RV fit +globalReplacementMap['AC']["catRVMap"] = od() +globalReplacementMap["AC"]["catRVMap"]["RECO_0J_PTH_0_10_Tag0"] = "RECO_0J_PTH_0_10_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_0J_PTH_0_10_Tag1"] = "RECO_0J_PTH_0_10_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_0J_PTH_0_10_Tag2"] = "RECO_0J_PTH_0_10_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_0J_PTH_GT10_Tag0"] = "RECO_0J_PTH_GT10_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_0J_PTH_GT10_Tag1"] = "RECO_0J_PTH_GT10_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_0J_PTH_GT10_Tag2"] = "RECO_0J_PTH_GT10_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_1J_PTH_0_60_Tag0"] = "RECO_1J_PTH_0_60_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_1J_PTH_0_60_Tag1"] = "RECO_1J_PTH_0_60_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_1J_PTH_0_60_Tag2"] = "RECO_1J_PTH_0_60_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_1J_PTH_120_200_Tag0"] = "RECO_1J_PTH_120_200_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_1J_PTH_120_200_Tag1"] = "RECO_1J_PTH_120_200_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_1J_PTH_120_200_Tag2"] = "RECO_1J_PTH_120_200_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_1J_PTH_60_120_Tag0"] = "RECO_1J_PTH_60_120_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_1J_PTH_60_120_Tag1"] = "RECO_1J_PTH_60_120_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_1J_PTH_60_120_Tag2"] = "RECO_1J_PTH_60_120_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_GE2J_PTH_0_60_Tag0"] = "RECO_GE2J_PTH_0_60_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_GE2J_PTH_0_60_Tag1"] = "RECO_GE2J_PTH_0_60_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_GE2J_PTH_0_60_Tag2"] = "RECO_GE2J_PTH_0_60_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_GE2J_PTH_120_200_Tag0"] = "RECO_GE2J_PTH_120_200_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_GE2J_PTH_120_200_Tag1"] = "RECO_GE2J_PTH_120_200_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_GE2J_PTH_120_200_Tag2"] = "RECO_GE2J_PTH_120_200_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_GE2J_PTH_60_120_Tag0"] = "RECO_GE2J_PTH_60_120_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_GE2J_PTH_60_120_Tag1"] = "RECO_GE2J_PTH_60_120_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_GE2J_PTH_60_120_Tag2"] = "RECO_GE2J_PTH_60_120_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_PTH_200_300_Tag0"] = "RECO_PTH_200_300_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_PTH_200_300_Tag1"] = "RECO_PTH_200_300_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_PTH_300_450_Tag0"] = "RECO_PTH_300_450_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_PTH_300_450_Tag1"] = "RECO_PTH_300_450_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_PTH_450_650_Tag0"] = "RECO_PTH_450_650_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_PTH_GT650_Tag0"] = "RECO_PTH_GT650_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_THQ_LEP"] = "RECO_THQ_LEP" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_0_60_Tag0"] = "RECO_TTH_HAD_PTH_0_60_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_0_60_Tag1"] = "RECO_TTH_HAD_PTH_0_60_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_0_60_Tag2"] = "RECO_TTH_HAD_PTH_0_60_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_120_200_Tag0"] = "RECO_TTH_HAD_PTH_120_200_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_120_200_Tag1"] = "RECO_TTH_HAD_PTH_120_200_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_120_200_Tag2"] = "RECO_TTH_HAD_PTH_120_200_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_120_200_Tag3"] = "RECO_TTH_HAD_PTH_120_200_Tag3" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_200_300_Tag0"] = "RECO_TTH_HAD_PTH_200_300_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_200_300_Tag1"] = "RECO_TTH_HAD_PTH_200_300_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_200_300_Tag2"] = "RECO_TTH_HAD_PTH_200_300_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_60_120_Tag0"] = "RECO_TTH_HAD_PTH_60_120_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_60_120_Tag1"] = "RECO_TTH_HAD_PTH_60_120_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_60_120_Tag2"] = "RECO_TTH_HAD_PTH_60_120_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_GT300_Tag0"] = "RECO_TTH_HAD_PTH_GT300_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_HAD_PTH_GT300_Tag1"] = "RECO_TTH_HAD_PTH_GT300_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_LEP_PTH_0_60_Tag0"] = "RECO_TTH_LEP_PTH_0_60_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_LEP_PTH_0_60_Tag1"] = "RECO_TTH_LEP_PTH_0_60_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_LEP_PTH_0_60_Tag2"] = "RECO_TTH_LEP_PTH_0_60_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_LEP_PTH_120_200_Tag0"] = "RECO_TTH_LEP_PTH_120_200_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_LEP_PTH_120_200_Tag1"] = "RECO_TTH_LEP_PTH_120_200_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_LEP_PTH_200_300_Tag0"] = "RECO_TTH_LEP_PTH_200_300_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_LEP_PTH_60_120_Tag0"] = "RECO_TTH_LEP_PTH_60_120_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_LEP_PTH_60_120_Tag1"] = "RECO_TTH_LEP_PTH_60_120_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_LEP_PTH_60_120_Tag2"] = "RECO_TTH_LEP_PTH_60_120_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_TTH_LEP_PTH_GT300_Tag0"] = "RECO_TTH_LEP_PTH_GT300_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_VBFLIKEGGH_Tag0"] = "RECO_VBFTOPO_ACGGH_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_VBFLIKEGGH_Tag1"] = "RECO_VBFTOPO_ACGGH_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_VBFTOPO_ACGGH_Tag0"] = "RECO_VBFTOPO_ACGGH_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_VBFTOPO_ACGGH_Tag1"] = "RECO_VBFTOPO_ACGGH_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_VBFTOPO_ACVBFBSM_Tag0"] = "RECO_VBFTOPO_ACVBFBSM_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_VBFTOPO_ACVBFBSM_Tag1"] = "RECO_VBFTOPO_ACVBFBSM_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_VBFTOPO_ACVBFSM_Tag0"] = "RECO_VBFTOPO_ACVBFSM_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_VBFTOPO_ACVHHADBSM_Tag0"] = "RECO_VBFTOPO_ACVHHADBSM_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_VBFTOPO_ACVHHADBSM_Tag1"] = "RECO_VBFTOPO_ACVHHADBSM_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_VBFTOPO_ACVHHADSM_Tag0"] = "RECO_VBFTOPO_ACVHHADSM_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_VBFTOPO_ACVHHADSM_Tag1"] = "RECO_VBFTOPO_ACVHHADSM_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_VBFTOPO_ACVHHADSM_Tag2"] = "RECO_VBFTOPO_ACVHHADSM_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_VH_MET_Tag0"] = "RECO_VH_MET_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_VH_MET_Tag1"] = "RECO_VH_MET_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_VH_MET_Tag2"] = "RECO_VH_MET_Tag2" +globalReplacementMap["AC"]["catRVMap"]["RECO_VH_MET_Tag3"] = "RECO_VH_MET_Tag3" +globalReplacementMap["AC"]["catRVMap"]["RECO_VH_MET_Tag4"] = "RECO_VH_MET_Tag4" +globalReplacementMap["AC"]["catRVMap"]["RECO_WH_LEP_Tag0"] = "RECO_WH_LEP_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_WH_LEP_Tag1"] = "RECO_WH_LEP_Tag1" +globalReplacementMap["AC"]["catRVMap"]["RECO_WH_LEP_Tag2"] = "RECO_WH_LEP_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_WH_LEP_Tag3"] = "RECO_WH_LEP_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_ZH_LEP_Tag0"] = "RECO_ZH_LEP_Tag0" +globalReplacementMap["AC"]["catRVMap"]["RECO_ZH_LEP_Tag1"] = "RECO_ZH_LEP_Tag1" diff --git a/Signal/tools/simultaneousFit.py b/Signal/tools/simultaneousFit.py index 411b2079..26b59416 100644 --- a/Signal/tools/simultaneousFit.py +++ b/Signal/tools/simultaneousFit.py @@ -216,7 +216,7 @@ def prepareDataHists(self): self.Vars['weight'] = ROOT.RooRealVar("weight","weight",-10000,10000) for i in range(0,d.numEntries()): self.xvar.setVal(d.get(i).getRealValue(self.xvar.GetName())) - self.Vars['weight'].setVal((1/sumw)*d.weight()) + self.Vars['weight'].setVal(0 if sumw==0 else (1/sumw)*d.weight()) drw.add(ROOT.RooArgSet(self.xvar,self.Vars['weight']),self.Vars['weight'].getVal()) # Convert to RooDataHist self.DataHists[k] = ROOT.RooDataHist("%s_hist"%d.GetName(),"%s_hist"%d.GetName(),ROOT.RooArgSet(self.xvar),drw) diff --git a/Signal/tools/submissionTools.py b/Signal/tools/submissionTools.py index b761a068..78e66615 100644 --- a/Signal/tools/submissionTools.py +++ b/Signal/tools/submissionTools.py @@ -3,6 +3,8 @@ import re from commonObjects import * +MHNominal = '125' + def run(cmd): print "%s\n\n"%cmd os.system(cmd) @@ -61,8 +63,9 @@ def writeSubFiles(_opts): for cidx in range(_opts['nCats']): pcidx = pidx*_opts['nCats']+cidx p,c = _opts['procs'].split(",")[pidx], _opts['cats'].split(",")[cidx] + mps = MHNominal if 'ALT' in p else _opts['massPoints'] _f.write("if [ $1 -eq %g ]; then\n"%pcidx) - _f.write(" python %s/scripts/signalFit.py --inputWSDir %s --ext %s --proc %s --cat %s --year %s --analysis %s --massPoints %s --scales \'%s\' --scalesCorr \'%s\' --scalesGlobal \'%s\' --smears \'%s\' %s\n"%(swd__,_opts['inputWSDir'],_opts['ext'],p,c,_opts['year'],_opts['analysis'],_opts['massPoints'],_opts['scales'],_opts['scalesCorr'],_opts['scalesGlobal'],_opts['smears'],_opts['modeOpts'])) + _f.write(" python %s/scripts/signalFit.py --inputWSDir %s --ext %s --proc %s --cat %s --year %s --analysis %s --massPoints %s --scales \'%s\' --scalesCorr \'%s\' --scalesGlobal \'%s\' --smears \'%s\' %s\n"%(swd__,_opts['inputWSDir'],_opts['ext'],p,c,_opts['year'],_opts['analysis'],mps,_opts['scales'],_opts['scalesCorr'],_opts['scalesGlobal'],_opts['smears'],_opts['modeOpts'])) _f.write("fi\n") # For looping over categories @@ -72,7 +75,8 @@ def writeSubFiles(_opts): _f.write("if [ $1 -eq %g ]; then\n"%cidx) for pidx in range(_opts['nProcs']): p = _opts['procs'].split(",")[pidx] - _f.write(" python %s/scripts/signalFit.py --inputWSDir %s --ext %s --proc %s --cat %s --year %s --analysis %s --massPoints %s --scales \'%s\' --scalesCorr \'%s\' --scalesGlobal \'%s\' --smears \'%s\' %s\n"%(swd__,_opts['inputWSDir'],_opts['ext'],p,c,_opts['year'],_opts['analysis'],_opts['massPoints'],_opts['scales'],_opts['scalesCorr'],_opts['scalesGlobal'],_opts['smears'],_opts['modeOpts'])) + mps = MHNominal if 'ALT' in p else _opts['massPoints'] + _f.write(" python %s/scripts/signalFit.py --inputWSDir %s --ext %s --proc %s --cat %s --year %s --analysis %s --massPoints %s --scales \'%s\' --scalesCorr \'%s\' --scalesGlobal \'%s\' --smears \'%s\' %s\n"%(swd__,_opts['inputWSDir'],_opts['ext'],p,c,_opts['year'],_opts['analysis'],mps,_opts['scales'],_opts['scalesCorr'],_opts['scalesGlobal'],_opts['smears'],_opts['modeOpts'])) _f.write("fi\n") elif _opts['mode'] == "calcPhotonSyst": @@ -86,7 +90,7 @@ def writeSubFiles(_opts): for cidx in range(_opts['nCats']): c = _opts['cats'].split(",")[cidx] _f.write("if [ $1 -eq %g ]; then\n"%cidx) - _f.write(" python %s/scripts/fTest.py --cat %s --procs %s --ext %s --inputWSDir %s %s\n"%(swd__,c,_opts['procs'],_opts['ext'],_opts['inputWSDir'],_opts['modeOpts'])) + _f.write(" python %s/scripts/fTest.py --cat %s --procs %s --xvar %s --ext %s --inputWSDir %s --outdir %s %s\n"%(swd__,c,_opts['procs'],_opts['xvar'],_opts['ext'],_opts['inputWSDir'],_opts['outdir'],_opts['modeOpts'])) _f.write("fi\n") elif _opts['mode'] == "packageSignal": @@ -116,7 +120,7 @@ def writeSubFiles(_opts): _fsub.close() # SGE... - if (_opts['batch'] == "IC")|(_opts['batch'] == "SGE")|(_opts['batch'] == "local" ): + if (_opts['batch'] == "IC")|(_opts['batch'] == "SGE")|(_opts['batch'] == "Rome")|(_opts['batch'] == "local" ): _executable = "sub_%s_%s"%(_opts['mode'],_opts['ext']) # Write details depending on mode @@ -129,7 +133,8 @@ def writeSubFiles(_opts): p,c = _opts['procs'].split(",")[pidx], _opts['cats'].split(",")[cidx] _f = open("%s/%s_%g.sh"%(_jobdir,_executable,pcidx),"w") writePreamble(_f) - _f.write("python %s/scripts/signalFit.py --inputWSDir %s --ext %s --proc %s --cat %s --year %s --analysis %s --massPoints %s --scales \'%s\' --scalesCorr \'%s\' --scalesGlobal \'%s\' --smears \'%s\' %s\n"%(swd__,_opts['inputWSDir'],_opts['ext'],p,c,_opts['year'],_opts['analysis'],_opts['massPoints'],_opts['scales'],_opts['scalesCorr'],_opts['scalesGlobal'],_opts['smears'],_opts['modeOpts'])) + mps = MHNominal if 'ALT' in p else _opts['massPoints'] + _f.write("python %s/scripts/signalFit.py --inputWSDir %s --ext %s --proc %s --cat %s --year %s --analysis %s --massPoints %s --scales \'%s\' --scalesCorr \'%s\' --scalesGlobal \'%s\' --smears \'%s\' %s\n"%(swd__,_opts['inputWSDir'],_opts['ext'],p,c,_opts['year'],_opts['analysis'],mps,_opts['scales'],_opts['scalesCorr'],_opts['scalesGlobal'],_opts['smears'],_opts['modeOpts'])) _f.close() os.system("chmod 775 %s/%s_%g.sh"%(_jobdir,_executable,pcidx)) @@ -139,9 +144,10 @@ def writeSubFiles(_opts): c = _opts['cats'].split(",")[cidx] _f = open("%s/%s_%s.sh"%(_jobdir,_executable,c),"w") writePreamble(_f) + mps = MHNominal if 'ALT' in p else _opts['massPoints'] for pidx in range(_opts['nProcs']): p = _opts['procs'].split(",")[pidx] - _f.write("python %s/scripts/signalFit.py --inputWSDir %s --ext %s --proc %s --cat %s --year %s --analysis %s --massPoints %s --scales \'%s\' --scalesCorr \'%s\' --scalesGlobal \'%s\' --smears \'%s\' %s\n\n"%(swd__,_opts['inputWSDir'],_opts['ext'],p,c,_opts['year'],_opts['analysis'],_opts['massPoints'],_opts['scales'],_opts['scalesCorr'],_opts['scalesGlobal'],_opts['smears'],_opts['modeOpts'])) + _f.write("python %s/scripts/signalFit.py --inputWSDir %s --ext %s --proc %s --cat %s --year %s --analysis %s --massPoints %s --scales \'%s\' --scalesCorr \'%s\' --scalesGlobal \'%s\' --smears \'%s\' %s\n\n"%(swd__,_opts['inputWSDir'],_opts['ext'],p,c,_opts['year'],_opts['analysis'],mps,_opts['scales'],_opts['scalesCorr'],_opts['scalesGlobal'],_opts['smears'],_opts['modeOpts'])) _f.close() os.system("chmod 775 %s/%s_%s.sh"%(_jobdir,_executable,c)) @@ -199,8 +205,9 @@ def submitFiles(_opts): print " --> Finished submitting files" # SGE - elif _opts['batch'] in ['IC','SGE']: + elif _opts['batch'] in ['IC','SGE','Rome']: _executable = "sub_%s_%s"%(_opts['mode'],_opts['ext']) + _subcmd = 'bsub' if _opts['batch']=='Rome' else 'qsub' # Extract job opts jobOptsStr = _opts['jobOpts'] @@ -211,19 +218,19 @@ def submitFiles(_opts): for cidx in range(_opts['nCats']): pcidx = pidx*_opts['nCats']+cidx _subfile = "%s/%s_%g"%(_jobdir,_executable,pcidx) - cmdLine = "qsub -q hep.q %s -o %s.log -e %s.err %s.sh"%(jobOptsStr,_subfile,_subfile,_subfile) + cmdLine = "%s -q %s %s -o %s.log -e %s.err %s.sh"%(_subcmd,_opts['queue'],jobOptsStr,_subfile,_subfile,_subfile) run(cmdLine) # Separate submission per category elif( _opts['mode'] == "packageSignal" )|( _opts['mode'] == "fTest" )|( _opts['mode'] == "calcPhotonSyst" )|(( _opts['mode'] == "signalFit" )&( _opts['groupSignalFitJobsByCat'] )): for cidx in range(_opts['nCats']): c = _opts['cats'].split(",")[cidx] _subfile = "%s/%s_%s"%(_jobdir,_executable,c) - cmdLine = "qsub -q hep.q %s -o %s.log -e %s.err %s.sh"%(jobOptsStr,_subfile,_subfile,_subfile) + cmdLine = "%s -q %s %s -o %s.log -e %s.err %s.sh"%(_subcmd,_opts['queue'],jobOptsStr,_subfile,_subfile,_subfile) run(cmdLine) # Single submission elif(_opts['mode'] == "getEffAcc")|(_opts['mode'] == "getDiagProc"): _subfile = "%s/%s"%(_jobdir,_executable) - cmdLine = "qsub -q hep.q %s -o %s.log -e %s.err %s.sh"%(jobOptsStr,_subfile,_subfile,_subfile) + cmdLine = "%s -q %s %s -o %s.log -e %s.err %s.sh"%(_subcmd,_opts['queue'],jobOptsStr,_subfile,_subfile,_subfile) run(cmdLine) print " --> Finished submitting files" diff --git a/Trees2WS/CardsReplacement.csv b/Trees2WS/CardsReplacement.csv new file mode 100644 index 00000000..a6ba7a73 --- /dev/null +++ b/Trees2WS/CardsReplacement.csv @@ -0,0 +1,11 @@ +proc,from,to,mass +VBF,2016postVFP,2017,125 +VBF,2016postVFP,2018,125 +wh_ALT_L1,2016preVFP,2016postVFP,125 +wh_ALT_0M,2016preVFP,2016postVFP,125 +wh_ALT_0Mf05,2016preVFP,2016postVFP,125 +GG2H,2016preVFP,2017,125 +VBF_ALT_L1f05,2018,2017,125 +VBF_ALT_L1Zg,2018,2017,125 +TTH,2017,2018,130 +TTH,2016postVFP,2016preVFP,130 \ No newline at end of file diff --git a/Trees2WS/CardsReplacement.py b/Trees2WS/CardsReplacement.py new file mode 100644 index 00000000..25ffac40 --- /dev/null +++ b/Trees2WS/CardsReplacement.py @@ -0,0 +1,14 @@ +import os +import pandas as pd +# Non tutte le produzioni sono andate a buon fine, quindi sostituiremo alcuni campioni con quelli di altri anni + +df = pd.read_csv('CardsReplacement.csv') + +cards_dir = 'cards_NOTAG' +for index, row in df.iterrows(): + print(f'ln -s {cards_dir}/{row['from']}/*{row['mass']}{row['proc']}.* {cards_dir}/{row['to']}/') + os.system(f'ln -s {cards_dir}/{row['from']}/*{row['mass']}{row['proc']}.* {cards_dir}/{row['to']}/') + + + + diff --git a/Trees2WS/ChangeName.py b/Trees2WS/ChangeName.py new file mode 100644 index 00000000..08fd3cb9 --- /dev/null +++ b/Trees2WS/ChangeName.py @@ -0,0 +1,18 @@ +import os +import glob + +cards_dir = 'cards_NOTAG' +for y in ['2018','2017','2016preVFP','2016postVFP']: + + fs = glob.glob(cards_dir+'/'+y+'/*minus*') + + for f in fs: + string = f.split('QQ2HLNU.root')[0] + print('mv '+f+' '+string+'WMINUSH2HQQ.root') + + fs = glob.glob(cards_dir+'/'+y+'/*plus*') + for f in fs: + string = f.split('QQ2HLNU.root')[0] + print('mv '+f+' '+string+'WPLUSH2HQQ.root') + + diff --git a/Trees2WS/NOTAG_Yield.py b/Trees2WS/NOTAG_Yield.py new file mode 100644 index 00000000..02ac83ac --- /dev/null +++ b/Trees2WS/NOTAG_Yield.py @@ -0,0 +1,84 @@ + + +#import commonObjects +import os + + + +import uproot +import glob +import os +import ROOT +print " ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Tree NOTAG Yields to compute Efficienties ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ " +import os, sys +import re +from optparse import OptionParser +import ROOT +import pandas as pd +import glob +import pickle + +import json +from commonTools import * + + + + +def get_weighted_entries(file_path, cat): + # Open the ROOT file + with uproot.open(file_path) as file: + # Navigate to the tree + if massFromFileName(file_path) == 130 : + mass_string = '_130_' + elif massFromFileName(file_path) == 120: + mass_string = '_120_' + elif massFromFileName(file_path) == 125: + mass_string = '_125_' + + tree = file["tagsDumper/trees/"+signalFromFileName(file_path)[0]+mass_string+"13TeV_"+ cat] + combined_weights = tree["weight"].array() + + return combined_weights.sum() + +def get_options(): + parser = OptionParser() + parser.add_option('--tree_dir', dest='tree_dir', default='Merged', help='Directiory of the trees') + return parser.parse_args() +(opt,args) = get_options() + +r = {} + +year = ["2016preVFP" , "2016postVFP", "2017", "2018" ] + + + +df = pd.read_csv('CardsReplacement.csv') + + +for y in year: + + root_files = glob.glob(opt.tree_dir+'/'+y+'/*output_*.root') + for root_file in root_files: + weighted_entries = get_weighted_entries(root_file,'NOTAG') + proc = extFromFileName(root_file)[0] + mass = str(massFromFileName(root_file)) + r[proc+'_'+mass+'_'+y]=float(weighted_entries) + + for index, row in df[(df['proc']==proc) & (df['mass']==eval(mass)) & (df['from']==y) ].iterrows(): + print("SONO QUI ",proc,eval(mass),y) + r[proc+'_'+mass+'_'+row['to']]=float(weighted_entries) + print(proc+'_'+str(massFromFileName(root_file))+'_'+y,float(weighted_entries)) + +print('creating the json file NOTAG_entries.json') +with open("NOTAG_entries.json", "w") as file: + json.dump(r,file,indent=4) + + + + + + + + + + diff --git a/Trees2WS/NOTAG_entries.json b/Trees2WS/NOTAG_entries.json new file mode 100644 index 00000000..67273845 --- /dev/null +++ b/Trees2WS/NOTAG_entries.json @@ -0,0 +1,162 @@ +{ + "ZH_ALT0L1Zgf05ph0_125_2018": 1.1989097595214844, + "ZH_ALT0M_125_2017": 1.0435991287231445, + "wh_ALT_0M_125_2018": 1.8507248163223267, + "WH_ALT0PH_125_2017": 1.9123972654342651, + "wh_ALT_0M_125_2016postVFP": 1.763871669769287, + "VBF_ALT_L1_125_2016preVFP": 3.7570157051086426, + "WH_ALT0PH_125_2018": 1.9309591054916382, + "ZH_ALT0PH_125_2016preVFP": 1.3017263412475586, + "VBF_120_2016preVFP": 5.53654670715332, + "QQ2HLL_125_2017": 1.160643219947815, + "VBF_125_2016postVFP": 5.257660865783691, + "WMINUSH2HQQ_125_2017": 1.0400376319885254, + "VBF_ALT_0PHf05_125_2016preVFP": 5.289504051208496, + "VBF_ALT_0Mf05_125_2018": 4.4438371658325195, + "VBF_ALT_L1Zgf05_125_2016postVFP": 4.689101696014404, + "ZH_ALT0L1Zg_125_2018": 0.8740636110305786, + "VBF_120_2016postVFP": 5.2456374168396, + "wh_ALT_0Mf05_125_2018": 1.9303120374679565, + "QQ2HLL_120_2018": 1.4105761051177979, + "WPLUSH2HQQ_125_2016postVFP": 0.9292153120040894, + "VBF_125_2016preVFP": 5.616977214813232, + "QQ2HLL_130_2016preVFP": 1.3961467742919922, + "wh_ALT_0Mf05_125_2017": 1.9605364799499512, + "QQ2HLL_120_2016preVFP": 1.3909958600997925, + "VBF_ALT_0M_125_2016postVFP": 3.3905715942382812, + "TTH_125_2016preVFP": 0.7012138366699219, + "VBF_ALT_L1Zgf05_125_2018": 4.340205669403076, + "ZH_ALT0PHf05ph0_125_2018": 1.1562162637710571, + "TTH_120_2018": 0.7118246555328369, + "WMINUSH2HQQ_130_2016postVFP": 1.0295274257659912, + "wh_ALT_0Mf05_125_2016postVFP": 1.91534423828125, + "TTH_120_2017": 0.738832414150238, + "VBF_ALT_0PH_125_2017": 4.570183277130127, + "VBF_ALT_L1Zg_125_2017": 3.7132153511047363, + "QQ2HLL_120_2017": 1.3851238489151, + "WPLUSH2HQQ_125_2018": 1.1250399351119995, + "WMINUSH2HQQ_120_2016preVFP": 1.0412663221359253, + "ZH_ALT0PHf05ph0_125_2017": 1.1000185012817383, + "VBF_ALT_0PH_125_2018": 4.394056797027588, + "WPLUSH2HQQ_125_2017": 1.102858066558838, + "WPLUSH2HQQ_120_2016preVFP": 1.0991027355194092, + "VBF_ALT_L1Zgf05_125_2017": 4.583035945892334, + "ZH_ALT0L1Zgf05ph0_125_2016preVFP": 1.116413950920105, + "WH_ALT0PHf05ph0_125_2018": 1.8904783725738525, + "VBF_ALT_L1Zg_125_2018": 3.7132153511047363, + "QQ2HLL_120_2016postVFP": 0.9669924378395081, + "GG2H_125_2018": 40.9776611328125, + "ZH_ALT0M_125_2016preVFP": 1.3489832878112793, + "WMINUSH2HQQ_120_2016postVFP": 0.025082401931285858, + "WH_ALT0PH_125_2016preVFP": 2.034543991088867, + "VBF_ALT_L1_125_2016postVFP": 3.3063652515411377, + "WH_ALT0L1f05ph0_125_2016postVFP": 1.3991436958312988, + "VBF_ALT_L1f05_125_2016preVFP": 4.805684566497803, + "GG2H_130_2016postVFP": 57.70191192626953, + "VBF_130_2017": 5.092952251434326, + "WMINUSH2HQQ_125_2016preVFP": 1.0664012432098389, + "ZH_ALT0L1f05ph0_125_2016postVFP": 0.8501898050308228, + "VBF_ALT_0Mf05_125_2016preVFP": 4.918069362640381, + "VBF_ALT_0M_125_2017": 4.236538410186768, + "WMINUSH2HQQ_120_2018": 1.0289227962493896, + "ZH_ALT0M_125_2018": 1.186974048614502, + "GG2H_120_2016preVFP": 75.80400848388672, + "ZH_ALT0L1_125_2018": 0.9695231914520264, + "wh_ALT_0Mf05_125_2016preVFP": 1.91534423828125, + "QQ2HLL_125_2016postVFP": 1.3691455125808716, + "ZH_ALT0Mf05ph0_125_2018": 1.3183611631393433, + "ZH_ALT0L1_125_2017": 0.9102658033370972, + "VBF_ALT_L1f05_125_2018": 4.214972019195557, + "TTH_120_2016postVFP": 0.7083141207695007, + "VBF_ALT_0PHf05_125_2018": 4.778690814971924, + "VBF_ALT_0PHf05_125_2017": 4.55863094329834, + "VBF_ALT_0Mf05_125_2017": 4.397775173187256, + "QQ2HLL_130_2016postVFP": 1.359933614730835, + "VBF_ALT_L1f05_125_2017": 4.214972019195557, + "ZH_ALT0PH_125_2017": 1.179024338722229, + "QQ2HLL_125_2016preVFP": 1.3832674026489258, + "TTH_125_2018": 0.6254857182502747, + "WH_ALT0PHf05ph0_125_2016preVFP": 1.6441869735717773, + "WMINUSH2HQQ_125_2018": 1.0683207511901855, + "ZH_ALT0Mf05ph0_125_2016preVFP": 1.2525018453598022, + "WPLUSH2HQQ_120_2018": 1.1304855346679688, + "GG2H_130_2018": 59.57413864135742, + "VBF_120_2018": 5.176122188568115, + "VBF_120_2017": 5.114029407501221, + "WPLUSH2HQQ_120_2017": 1.1038929224014282, + "WH_ALT0L1f05ph0_125_2016preVFP": 1.5370222330093384, + "TTH_130_2016preVFP": 0.5637041926383972, + "VBF_ALT_L1_125_2018": 3.8837289810180664, + "WPLUSH2HQQ_120_2016postVFP": 0.7396562099456787, + "VBF_ALT_L1Zgf05_125_2016preVFP": 4.43612813949585, + "GG2H_125_2016postVFP": 64.8388442993164, + "VBF_130_2016preVFP": 5.0064377784729, + "wh_ALT_L1_125_2016postVFP": 1.459783673286438, + "WH_ALT0PHf05ph0_125_2017": 1.7229928970336914, + "ZH_ALT0M_125_2016postVFP": 1.1783111095428467, + "WH_ALT0PH_125_2016postVFP": 1.9622939825057983, + "WH_ALT0PHf05ph0_125_2016postVFP": 1.713988184928894, + "ZH_ALT0Mf05ph0_125_2016postVFP": 1.1920608282089233, + "ZH_ALT0L1Zg_125_2016preVFP": 0.8777278065681458, + "GG2H_120_2017": 69.56082153320312, + "GG2H_120_2018": 75.45105743408203, + "ZH_ALT0PH_125_2016postVFP": 1.2513394355773926, + "VBF_ALT_0M_125_2016preVFP": 3.82059907913208, + "QQ2HLL_125_2018": 1.3792426586151123, + "VBF_ALT_0PHf05_125_2016postVFP": 4.176018238067627, + "wh_ALT_0M_125_2016preVFP": 1.763871669769287, + "GG2H_125_2017": 69.89240264892578, + "ZH_ALT0L1Zgf05ph0_125_2017": 0.976284921169281, + "GG2H_120_2016postVFP": 63.098838806152344, + "ZH_ALT0L1Zg_125_2017": 0.9236503839492798, + "GG2H_125_2016preVFP": 69.89240264892578, + "VBF_ALT_0M_125_2018": 4.625398635864258, + "GG2H_130_2016preVFP": 64.27923583984375, + "ZH_ALT0L1f05ph0_125_2016preVFP": 0.9946238994598389, + "ZH_ALT0L1Zgf05ph0_125_2016postVFP": 1.160673975944519, + "WPLUSH2HQQ_130_2016postVFP": 0.9054625630378723, + "TTH_130_2017": 0.6188207268714905, + "TTH_130_2016postVFP": 0.5637041926383972, + "QQ2HLL_130_2017": 1.3656038045883179, + "QQ2HLL_130_2018": 1.2726795673370361, + "VBF_ALT_L1_125_2017": 3.913494825363159, + "TTH_130_2018": 0.6188207268714905, + "GG2H_130_2017": 61.662357330322266, + "wh_ALT_L1_125_2016preVFP": 1.459783673286438, + "ZH_ALT0L1f05ph0_125_2018": 0.8950009942054749, + "ZH_ALT0L1f05ph0_125_2017": 0.8552485704421997, + "ZH_ALT0PH_125_2018": 1.245112657546997, + "WPLUSH2HQQ_130_2018": 1.1194560527801514, + "ZH_ALT0PHf05ph0_125_2016preVFP": 1.035818338394165, + "VBF_ALT_0PH_125_2016preVFP": 3.8884999752044678, + "WPLUSH2HQQ_130_2017": 1.0922507047653198, + "ZH_ALT0L1Zg_125_2016postVFP": 0.9148343801498413, + "wh_ALT_L1_125_2018": 1.3744580745697021, + "VBF_ALT_L1f05_125_2016postVFP": 4.348669052124023, + "WMINUSH2HQQ_120_2017": 1.0489352941513062, + "wh_ALT_L1_125_2017": 1.4123969078063965, + "WMINUSH2HQQ_130_2018": 0.8915627598762512, + "WPLUSH2HQQ_125_2016preVFP": 1.0881699323654175, + "VBF_ALT_L1Zg_125_2016preVFP": 4.077937126159668, + "VBF_ALT_0PH_125_2016postVFP": 4.117892742156982, + "VBF_ALT_L1Zg_125_2016postVFP": 3.6402816772460938, + "ZH_ALT0PHf05ph0_125_2016postVFP": 1.1144261360168457, + "WMINUSH2HQQ_130_2017": 1.0346907377243042, + "WH_ALT0L1f05ph0_125_2017": 1.31551992893219, + "ZH_ALT0L1_125_2016preVFP": 0.9621473550796509, + "WH_ALT0L1f05ph0_125_2018": 1.5759902000427246, + "TTH_120_2016preVFP": 0.7795042991638184, + "VBF_125_2017": 5.257660865783691, + "ZH_ALT0L1_125_2016postVFP": 0.9334370493888855, + "WMINUSH2HQQ_130_2016preVFP": 0.7694205045700073, + "TTH_125_2017": 0.6987448930740356, + "VBF_130_2018": 4.993570327758789, + "VBF_125_2018": 1.6449321508407593, + "ZH_ALT0Mf05ph0_125_2017": 1.107650876045227, + "wh_ALT_0M_125_2017": 1.8232421875, + "VBF_130_2016postVFP": 5.134899139404297, + "VBF_ALT_0Mf05_125_2016postVFP": 3.890061616897583, + "WPLUSH2HQQ_130_2016preVFP": 1.117303729057312, + "WMINUSH2HQQ_125_2016postVFP": 1.0200529098510742, + "TTH_125_2016postVFP": 0.7082273364067078 +} \ No newline at end of file diff --git a/Trees2WS/RunWSScripts.py b/Trees2WS/RunWSScripts.py index 735a4fa3..38d64a6d 100644 --- a/Trees2WS/RunWSScripts.py +++ b/Trees2WS/RunWSScripts.py @@ -19,6 +19,7 @@ def get_options(): parser.add_option('--year', dest='year', default='2016', help="Year of trees to process") parser.add_option('--mode', dest='mode', default='', help="Which mode to run. Options: ['trees2ws','trees2ws_data','haddMC','haddData','mass_shift']") parser.add_option('--modeOpts', dest='modeOpts', default='', help="Additional options to add to command line when running different scripts (specify all within quotes e.g. \"--XYZ ABC\")") + parser.add_option('--selectProcess', dest='selectProcess', action="append", default=[], help="select only these processes to be converted in workspaces (can be specified multiple times)") # Specifically for hadding parser.add_option('--flashggPath', dest='flashggPath', default='', help="Path to flashgg area required for hadding") parser.add_option('--outputWSDir', dest='outputWSDir', default='', help="Location to store output workspaces of hadding script") @@ -26,8 +27,8 @@ def get_options(): parser.add_option('--inputMass', dest='inputMass', default='125', help="Input mass of workspace") parser.add_option('--targetMasses', dest='targetMasses', default='120,130', help="Comma separated list of target masses") # Job submission options - parser.add_option('--batch', dest='batch', default='IC', help='Batch') - parser.add_option('--queue', dest='queue', default='hep.q', help='Queue: can take a while if including all systematics for many categories') + parser.add_option('--batch', dest='batch', default='condor', help='Batch') + parser.add_option('--queue', dest='queue', default='longlunch', help='Queue: can take a while if including all systematics for many categories') parser.add_option('--jobOpts', dest='jobOpts', default='', help="Additional options to add to job submission. For Condor separate individual options with a colon (specify all within quotes e.g. \"option_xyz = abc+option_123 = 456\")") parser.add_option('--printOnly', dest='printOnly', default=False, action="store_true", help="Dry run: print submission files only") return parser.parse_args() @@ -47,6 +48,7 @@ def leave(): options['year'] = opt.year options['mode'] = opt.mode options['modeOpts'] = opt.modeOpts +options['selectProcess'] = opt.selectProcess options['flashggPath'] = opt.flashggPath options['outputWSDir'] = opt.outputWSDir if opt.outputWSDir != '' else "%s/outdir_%s/%s"%(twd__,opt.ext,opt.mode) options['inputMass'] = opt.inputMass @@ -75,6 +77,8 @@ def leave(): elif options['mode'] == "haddMC": print " --> Hadd MC workspaces..." elif options['mode'] == "haddData": print " --> Hadd data workspaces..." elif options['mode'] == "mass_shift": print " --> Ad-hoc shifting of mass in RooWorkspaces..." + +if options['mode'] == "trees2ws" and len(options['selectProcess']): print " ==> Chosen to convert only the processes in this list: ",options['selectProcess'] print " ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/Trees2WS/WSRenamer.py b/Trees2WS/WSRenamer.py index bcff4a66..162820f0 100644 --- a/Trees2WS/WSRenamer.py +++ b/Trees2WS/WSRenamer.py @@ -20,11 +20,26 @@ def get_options(): fnew = re.sub("QQ2HQQ","VBF",f) os.system("mv %s %s"%(f,fnew)) +vbf = glob.glob("%s/*_VBFH*_vbf*"%opt.inputDir) +for f in vbf: + fnew = re.sub("_vbf","_VBF",f) + os.system("mv %s %s"%(f,fnew)) + wh = glob.glob("%s/*_WHToGG_*_QQ2HQQ_*"%opt.inputDir) for f in wh: fnew = re.sub("QQ2HQQ","WH2HQQ",f) os.system("mv %s %s"%(f,fnew)) +wplush = glob.glob("%s/*_WplusH_HToGG_*_QQ2HLNU*"%opt.inputDir) +for f in wplush: + fnew = re.sub("QQ2HLNU","WPLUSH2HQQ",f) + os.system("mv %s %s"%(f,fnew)) + +wminush = glob.glob("%s/*_WminusH_HToGG_*_QQ2HLNU*"%opt.inputDir) +for f in wminush: + fnew = re.sub("QQ2HLNU","WMINUSH2HQQ",f) + os.system("mv %s %s"%(f,fnew)) + zh = glob.glob("%s/*_ZHToGG_*_QQ2HQQ_*"%opt.inputDir) for f in zh: fnew = re.sub("QQ2HQQ","ZH2HQQ",f) diff --git a/Trees2WS/config.py b/Trees2WS/config.py new file mode 100644 index 00000000..b4b462c9 --- /dev/null +++ b/Trees2WS/config.py @@ -0,0 +1,23 @@ +# Input config file for running trees2ws + +trees2wsCfg = { + + # Name of RooDirectory storing input tree + 'inputTreeDir':'tagsDumper/trees', + + # Variables to be added to dataframe: use wildcard * for common strings + 'mainVars':["CMS_hgg_mass","weight","vbfNLOweight","vhhadNLOweight","dZ","*sigma","*Weight"], # Vars to add to nominal RooDatasets + 'dataVars':["CMS_hgg_mass","weight"], # Vars for data workspace (trees2ws_data.py script) + 'stxsVar':'stage1p2bin', # Var for STXS splitting: if using option doSTXSSplitting + 'notagVars':["weight","*sigma"], # Vars to add to NOTAG RooDataset + 'systematicsVars':["CMS_hgg_mass","weight"], # Variables to add to sytematic RooDataHists + 'theoryWeightContainers':{'alphaSWeights':2,'scaleWeights':9,'pdfWeights':60}, # Theory weights to add to nominal + NOTAG RooDatasets, value corresponds to number of weights (0-N) + + # List of systematics: use string YEAR for year-dependent systematics + 'systematics':['FNUFEB', 'FNUFEE', 'JEC', 'JER', 'MCScaleGain1EB', 'MCScaleGain6EB', 'MCScaleHighR9EB', 'MCScaleHighR9EE', 'MCScaleLowR9EB', 'MCScaleLowR9EE', 'MCSmearHighR9EBPhi', 'MCSmearHighR9EBRho', 'MCSmearHighR9EEPhi', 'MCSmearHighR9EERho', 'MCSmearLowR9EBPhi', 'MCSmearLowR9EBRho', 'MCSmearLowR9EEPhi', 'MCSmearLowR9EERho', 'MaterialCentralBarrel', 'MaterialForward', 'MaterialOuterBarrel', 'MvaShift', 'PUJIDShift', 'ShowerShapeHighR9EB', 'ShowerShapeHighR9EE', 'ShowerShapeLowR9EB', 'ShowerShapeLowR9EE', 'SigmaEOverEShift'], + + # Analysis categories: python list of cats or use 'auto' to extract from input tree + 'cats':['RECO_0J_PTH_0_10_Tag0','RECO_0J_PTH_0_10_Tag1','RECO_0J_PTH_0_10_Tag2','RECO_0J_PTH_GT10_Tag0','RECO_0J_PTH_GT10_Tag1','RECO_0J_PTH_GT10_Tag2','RECO_1J_PTH_0_60_Tag0','RECO_1J_PTH_0_60_Tag1','RECO_1J_PTH_0_60_Tag2','RECO_1J_PTH_120_200_Tag0','RECO_1J_PTH_120_200_Tag1','RECO_1J_PTH_120_200_Tag2','RECO_1J_PTH_60_120_Tag0','RECO_1J_PTH_60_120_Tag1','RECO_1J_PTH_60_120_Tag2','RECO_GE2J_PTH_0_60_Tag0','RECO_GE2J_PTH_0_60_Tag1','RECO_GE2J_PTH_0_60_Tag2','RECO_GE2J_PTH_120_200_Tag0','RECO_GE2J_PTH_120_200_Tag1','RECO_GE2J_PTH_120_200_Tag2','RECO_GE2J_PTH_60_120_Tag0','RECO_GE2J_PTH_60_120_Tag1','RECO_GE2J_PTH_60_120_Tag2','RECO_PTH_200_300_Tag0','RECO_PTH_200_300_Tag1','RECO_PTH_300_450_Tag0','RECO_PTH_300_450_Tag1','RECO_PTH_450_650_Tag0','RECO_PTH_GT650_Tag0','RECO_THQ_LEP','RECO_TTH_HAD_PTH_0_60_Tag0','RECO_TTH_HAD_PTH_0_60_Tag1','RECO_TTH_HAD_PTH_0_60_Tag2','RECO_TTH_HAD_PTH_120_200_Tag0','RECO_TTH_HAD_PTH_120_200_Tag1','RECO_TTH_HAD_PTH_120_200_Tag2','RECO_TTH_HAD_PTH_120_200_Tag3','RECO_TTH_HAD_PTH_200_300_Tag0','RECO_TTH_HAD_PTH_200_300_Tag1','RECO_TTH_HAD_PTH_200_300_Tag2','RECO_TTH_HAD_PTH_60_120_Tag0','RECO_TTH_HAD_PTH_60_120_Tag1','RECO_TTH_HAD_PTH_60_120_Tag2','RECO_TTH_HAD_PTH_GT300_Tag0','RECO_TTH_HAD_PTH_GT300_Tag1','RECO_TTH_LEP_PTH_0_60_Tag0','RECO_TTH_LEP_PTH_0_60_Tag1','RECO_TTH_LEP_PTH_0_60_Tag2','RECO_TTH_LEP_PTH_120_200_Tag0','RECO_TTH_LEP_PTH_120_200_Tag1','RECO_TTH_LEP_PTH_200_300_Tag0','RECO_TTH_LEP_PTH_60_120_Tag0','RECO_TTH_LEP_PTH_60_120_Tag1','RECO_TTH_LEP_PTH_60_120_Tag2','RECO_TTH_LEP_PTH_GT300_Tag0','RECO_VBFLIKEGGH_Tag0','RECO_VBFLIKEGGH_Tag1','RECO_VBFTOPO_ACGGH_Tag0','RECO_VBFTOPO_ACGGH_Tag1','RECO_VBFTOPO_ACVBFBSM_Tag0','RECO_VBFTOPO_ACVBFBSM_Tag1','RECO_VBFTOPO_ACVBFSM_Tag0','RECO_VBFTOPO_ACVHHADBSM_Tag0','RECO_VBFTOPO_ACVHHADBSM_Tag1','RECO_VBFTOPO_ACVHHADSM_Tag0','RECO_VBFTOPO_ACVHHADSM_Tag1','RECO_VBFTOPO_ACVHHADSM_Tag2','RECO_VH_MET_Tag0','RECO_VH_MET_Tag1','RECO_VH_MET_Tag2','RECO_VH_MET_Tag3','RECO_WH_LEP_Tag0','RECO_WH_LEP_Tag1','RECO_WH_LEP_Tag2','RECO_WH_LEP_Tag3','RECO_ZH_LEP_Tag0','RECO_ZH_LEP_Tag1'] + +} + diff --git a/Trees2WS/run_sequence.sh b/Trees2WS/run_sequence.sh new file mode 100755 index 00000000..63cfd34c --- /dev/null +++ b/Trees2WS/run_sequence.sh @@ -0,0 +1,87 @@ +#!/bin/bash + +YEAR=-753 +STEP=0 + +usage(){ + echo "The script runs background scripts:" + echo "options:" + + echo "-h|--help) " + echo "-y|--year): can be the yearId or all" + echo "-s|--step) " + echo "-d|--dryRun) " + echo "-i|--interactive) " +} +# options may be followed by one colon to indicate they have a required argument +if ! options=$(getopt -u -o s:y:dih -l help,step:,year:,dryRun,interactive -- "$@") +then +# something went wrong, getopt will put out an error message for us +exit 1 +fi +set -- $options +while [ $# -gt 0 ] +do +case $1 in +-h|--help) usage; exit 0;; +-y|--year) YEAR=$2; shift ;; +-s|--step) STEP=$2; shift ;; +-d|--dryRun) DR=$2; shift ;; +-i|--interactive) I=$2; shift ;; +(--) shift; break;; +(-*) usage; echo "$0: error - unrecognized option $1" 1>&2; usage >> /dev/stderr; exit 1;; +(*) break;; +esac +shift +done + +fggDir="/eos/home-f/fderiggi/CMSSW_10_6_29/src/" + +DROPT="" +if [[ $DR ]]; then + DROPT=" --printOnly " +fi + +QUEUE="" +if [[ $I ]]; then + QUEUE=" --batch local " +else + QUEUE=" --batch condor --queue longlunch " +fi + +years=("2016preVFP" "2016postVFP" "2017" "2018") + +for year in ${years[*]} +do + if [[ $year == $YEAR ]] || [[ $YEAR == "all" ]]; then + if [[ $STEP == "t2ws-mc" ]]; then + python RunWSScripts.py --inputConfig config.py --inputDir Merged/${year} --mode trees2ws --modeOpts " --doSystematics" --year ${year} --ext ${year} ${QUEUE} ${DROPT} + elif [[ $STEP == "t2ws-mc-ggh" ]]; then + python RunWSScripts.py --inputConfig config.py --inputDir MergeGGH --mode trees2ws --modeOpts " --doSystematics" --year ${year} --ext ggh_${year} ${QUEUE} ${DROPT} + elif [[ $STEP == "t2ws-mc-vbf" ]]; then + python RunWSScripts.py --inputConfig config.py --inputDir MergeVBF --mode trees2ws --modeOpts " --doSystematics" --year ${year} --ext vbf_${year} ${QUEUE} ${DROPT} + + elif [[ $STEP == "mkdir" ]]; then + mkdir -p ~/eos/AC/cards + mkdir -p ~/eos/AC/cards/${year} + elif [[ $STEP == "clear" ]]; then + python3 ChangeName.py + python3 CardsReplacement.py + python3 NOTAG_Yield.py --tree_dir Merged + ls -l cards_NOTAG/*/*.root | wc -l + echo "it should be 160" + echo "if not there's an error" + elif [[ $STEP == "t2ws-data" ]]; then + python RunWSScripts.py --inputConfig config.py --inputDir trees/data_${year} --mode trees2ws_data --year ${year} --ext ${year} ${QUEUE} ${DROPT} + elif [[ $STEP == "hadd-mc-vbf" ]]; then + python RunWSScripts.py --inputDir MergeVBF --mode haddMC --year ${year} --ext vbf_${year} --flashggPath ${fggDir} ${QUEUE} ${DROPT} + elif [[ $STEP == "hadd-mc-ggh" ]]; then + python RunWSScripts.py --inputDir MergeGGH --mode haddMC --year ${year} --ext ggh_${year} --flashggPath ${fggDir} ${QUEUE} ${DROPT} + elif [[ $STEP == "hadd-data" ]]; then + python RunWSScripts.py --inputDir trees/data_${year} --mode haddData --year ${year} --ext ${year} --flashggPath ${fggDir} ${QUEUE} ${DROPT} + else + echo "Step $STEP is not one among mc, data. Exiting." + fi + fi +done + diff --git a/Trees2WS/tools/submissionTools.py b/Trees2WS/tools/submissionTools.py index efd9a833..d6c812c0 100644 --- a/Trees2WS/tools/submissionTools.py +++ b/Trees2WS/tools/submissionTools.py @@ -36,6 +36,7 @@ def writeCondorSub(_file,_exec,_queue,_nJobs,_jobOpts,doHoldOnFailure=True,doPer _file.write("# Periodically retry the jobs every 10 minutes, up to a maximum of 5 retries.\n") _file.write("periodic_release = (NumJobStarts < 3) && ((CurrentTime - EnteredCurrentStatus) > 600)\n\n") _file.write("+JobFlavour = \"%s\"\n"%_queue) + _file.write('+SingularityImage = "/cvmfs/unpacked.cern.ch/registry.hub.docker.com/cmssw/el7:x86_64"\n\n') _file.write("queue %g"%_nJobs) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -53,17 +54,25 @@ def writeSubFiles(_opts): if _opts['batch'] == "condor": _executable = "condor_%s_%s"%(_opts['mode'],_opts['ext']) _f = open("%s/%s.sh"%(_jobdir,_executable),"w") # single .sh script split into separate jobs - writePreamble(_f) + # Write details depending on mode if( _opts['mode'] == "trees2ws" ): # Extract list of files + writePreamble(_f) tfiles = glob.glob("%s/*.root"%_opts['inputDir']) # Run separate command per file for tfidx,tf in enumerate(tfiles): + if len(_opts['selectProcess']): + skipMe = True + for p0 in _opts['selectProcess']: + for p in p0.split(","): + if os.path.basename(tf).split('.root')[0].replace('output_','') in p: skipMe = False + if skipMe: continue # Extract production mode (and decay extension if required) p, d = signalFromFileName(tf) - _cmd = "python %s/trees2ws.py --inputConfig %s --inputTreeFile %s --productionMode %s --year %s"%(twd__,_opts['inputConfig'],tf,p,_opts['year']) + m = massFromFileName(tf) + _cmd = "python %s/trees2ws.py --inputConfig %s --inputTreeFile %s --productionMode %s --year %s --inputMass %d"%(twd__,_opts['inputConfig'],tf,p,_opts['year'],m) if d is not None: _cmd += " --decayExt %s"%d if _opts['modeOpts'] != '': _cmd += " %s"%_opts['modeOpts'] _f.write("if [ $1 -eq %g ]; then\n"%tfidx) @@ -71,6 +80,7 @@ def writeSubFiles(_opts): _f.write("fi\n") elif( _opts['mode'] == "trees2ws_data" ): + writePreamble(_f) # Extract list of files tfiles = glob.glob("%s/*.root"%_opts['inputDir']) # Run separate command per file @@ -82,20 +92,23 @@ def writeSubFiles(_opts): _f.write("fi\n") elif( _opts['mode'] == "haddMC" ): + writePreamble(_f,_otherBase=_opts['flashggPath']) # Extract list of ws folders: one for each process wsdirs = glob.glob("%s/ws_*"%_opts['inputDir']) # Run separate command per process for widx,wsdir in enumerate(wsdirs): + print(widx,wsdir) # Extract process name p = "_".join(wsdir.split("/")[-1].split("_")[1:]) # Define output file name outf = "_".join(re.sub("_%s.root"%p,"",glob.glob("%s/*.root"%wsdir)[0].split("/")[-1]).split("_")[0:-1])+"_%s.root"%p outfFullName = "%s/%s"%(_opts['outputWSDir'],outf) - _f.write("if [ $1 -eq %g ]; then\n"%tfidx) - _f.write(" hadd_workspaces %s %s/*.root\n"%(outfFullName,wsdir)) + _f.write("if [ $1 -eq %g ]; then\n"%widx) + _f.write(" hadd_workspaces %s %s/%s/*.root\n"%(outfFullName,twd__,wsdir)) _f.write("fi\n") elif( _opts['mode'] == "haddData" ): + writePreamble(_f) # Extract folder wsdir = "%s/ws"%_opts['inputDir'] # Define output file name @@ -126,7 +139,7 @@ def writeSubFiles(_opts): _fsub.close() # SGE... - if (_opts['batch'] == "IC")|(_opts['batch'] == "SGE")|(_opts['batch'] == "local" ): + if (_opts['batch'] == "IC")|(_opts['batch'] == "SGE")|(_opts['batch'] == "Rome")|(_opts['batch'] == "local" ): _executable = "sub_%s_%s"%(_opts['mode'],_opts['ext']) # Write details depending on mode @@ -135,11 +148,18 @@ def writeSubFiles(_opts): tfiles = glob.glob("%s/*.root"%_opts['inputDir']) # Create separate submission file per script for tfidx,tf in enumerate(tfiles): + if len(_opts['selectProcess']): + skipMe = True + for p0 in _opts['selectProcess']: + for p in p0.split(","): + if os.path.basename(tf).split('.root')[0].replace('output_','') in p: skipMe = False + if skipMe: continue _f = open("%s/%s_%g.sh"%(_jobdir,_executable,tfidx),"w") writePreamble(_f) # Extract production mode (and decay extension if required) p, d = signalFromFileName(tf) - _cmd = "python %s/trees2ws.py --inputConfig %s --inputTreeFile %s --productionMode %s --year %s"%(twd__,_opts['inputConfig'],tf,p,_opts['year']) + m = massFromFileName(tf) + _cmd = "python %s/trees2ws.py --inputConfig %s --inputTreeFile %s --productionMode %s --year %s --inputMass %d"%(twd__,_opts['inputConfig'],tf,p,_opts['year'],m) if d is not None: _cmd += " --decayExt %s"%d if _opts['modeOpts'] != '': _cmd += " %s"%_opts['modeOpts'] _f.write("%s\n"%_cmd) @@ -160,18 +180,20 @@ def writeSubFiles(_opts): os.system("chmod 775 %s/%s_%g.sh"%(_jobdir,_executable,tfidx)) elif( _opts['mode'] == "haddMC" ): + # Extract list of ws folders: one for each process wsdirs = glob.glob("%s/ws_*"%_opts['inputDir']) # Separate submission file per process for widx,wsdir in enumerate(wsdirs): _f = open("%s/%s_%g.sh"%(_jobdir,_executable,widx),"w") + writePreamble(_f,_otherBase=_opts['flashggPath']) # Extract process name p = "_".join(wsdir.split("/")[-1].split("_")[1:]) # Define output file name: remove number from files outf = "_".join(re.sub("_%s.root"%p,"",glob.glob("%s/*.root"%wsdir)[0].split("/")[-1]).split("_")[0:-1])+"_%s.root"%p outfFullName = "%s/%s"%(_opts['outputWSDir'],outf) - _f.write("hadd_workspaces %s %s/*.root\n"%(outfFullName,wsdir)) + _f.write("hadd_workspaces %s %s/%s/*.root\n"%(outfFullName,twd__,wsdir)) _f.close() os.system("chmod 775 %s/%s_%g.sh"%(_jobdir,_executable,widx)) @@ -211,8 +233,9 @@ def submitFiles(_opts): print " --> Finished submitting files" # SGE - elif _opts['batch'] in ['IC','SGE']: + elif _opts['batch'] in ['IC','SGE','Rome']: _executable = "sub_%s_%s"%(_opts['mode'],_opts['ext']) + _subcmd = 'bsub' if _opts['batch']=='Rome' else 'qsub' # Extract job opts jobOptsStr = _opts['jobOpts'] @@ -220,27 +243,34 @@ def submitFiles(_opts): if( _opts['mode'] == "trees2ws" )|( _opts['mode'] == 'trees2ws_data' ): tfiles = glob.glob("%s/*.root"%_opts['inputDir']) for tfidx in range(len(tfiles)): + if len(_opts['selectProcess']): + skipMe = True + for p0 in _opts['selectProcess']: + for p in p0.split(","): + if os.path.basename(tfiles[tfidx]).split('.root')[0].replace('output_','') in p: skipMe = False + if skipMe: continue _subfile = "%s/%s_%g"%(_jobdir,_executable,tfidx) - cmdLine = "qsub -q %s %s -o %s.log -e %s.err %s.sh"%(_opts['queue'],jobOptsStr,_subfile,_subfile,_subfile) + cmdLine = "%s -q %s %s -o %s.log -e %s.err %s.sh"%(_subcmd,_opts['queue'],jobOptsStr,_subfile,_subfile,_subfile) + print " -----------------> "+ cmdLine run(cmdLine) elif( _opts['mode'] == 'haddMC' ): wsdirs = glob.glob("%s/ws_*"%_opts['inputDir']) for widx in range(len(wsdirs)): _subfile = "%s/%s_%g"%(_jobdir,_executable,widx) - cmdLine = "qsub -q %s %s -o %s.log -e %s.err %s.sh"%(_opts['queue'],jobOptsStr,_subfile,_subfile,_subfile) + cmdLine = "%s -q %s %s -o %s.log -e %s.err %s.sh"%(_subcmd,_opts['queue'],jobOptsStr,_subfile,_subfile,_subfile) run(cmdLine) elif( _opts['mode'] == 'haddData' ): _subfile = "%s/%s"%(_jobdir,_executable) - cmdLine = "qsub -q %s %s -o %s.log -e %s.err %s.sh"%(_opts['queue'],jobOptsStr,_subfile,_subfile,_subfile) + cmdLine = "%s -q %s %s -o %s.log -e %s.err %s.sh"%(_subcmd,_opts['queue'],jobOptsStr,_subfile,_subfile,_subfile) run(cmdLine) elif( _opts['mode'] == 'mass_shift' ): wsfiles = glob.glob("%s/*.root"%_opts['inputDir']) for fidx in range(len(wsfiles)): _subfile = "%s/%s_%g"%(_jobdir,_executable,fidx) - cmdLine = "qsub -q %s %s -o %s.log -e %s.err %s.sh"%(_opts['queue'],jobOptsStr,_subfile,_subfile,_subfile) + cmdLine = "%s -q %s %s -o %s.log -e %s.err %s.sh"%(_subcmd,_opts['queue'],jobOptsStr,_subfile,_subfile,_subfile) run(cmdLine) print " --> Finished submitting files" diff --git a/Trees2WS/trees2ws.py b/Trees2WS/trees2ws.py index 015a08fc..defb7012 100644 --- a/Trees2WS/trees2ws.py +++ b/Trees2WS/trees2ws.py @@ -8,6 +8,7 @@ import os, sys import re from optparse import OptionParser +from datetime import datetime def get_options(): parser = OptionParser() @@ -31,7 +32,7 @@ def get_options(): import pandas import numpy as np import uproot -from root_numpy import array2tree +from root_numpy import array2tree, tree2array from commonTools import * from commonObjects import * @@ -107,7 +108,7 @@ def make_argset(_ws=None,_varNames=None): theoryWeightColumns = {} for ts, nWeights in theoryWeightContainers.iteritems(): theoryWeightColumns[ts] = ["%s_%g"%(ts[:-1],i) for i in range(0,nWeights)] # drop final s from container name -# If year == 2018, add HET +#If year == 2018, add HET if opt.year == '2018': systematics.append("JetHEM") @@ -141,7 +142,9 @@ def make_argset(_ws=None,_varNames=None): if opt.doSystematics: sdata = pandas.DataFrame() # Loop over categories: fill dataframe +tot = 0 for cat in cats: +#for cat in cats: print " --> Extracting events from category: %s"%cat if inputTreeDir == '': treeName = "%s_%s_%s_%s"%(opt.productionMode,opt.inputMass,sqrts__,cat) else: treeName = "%s/%s_%s_%s_%s"%(inputTreeDir,opt.productionMode,opt.inputMass,sqrts__,cat) @@ -151,23 +154,38 @@ def make_argset(_ws=None,_varNames=None): if len(t) == 0: continue # Convert tree to pandas dataframe + dfs = {} # Theory weights for ts, tsColumns in theoryWeightColumns.iteritems(): - if opt.productionMode in modesToSkipTheoryWeights: + if opt.productionMode in modesToSkipTheoryWeights or 'ALT' in opt.productionMode: dfs[ts] = pandas.DataFrame(np.ones(shape=(len(t),theoryWeightContainers[ts]))) else: #dfs[ts] = t.pandas.df(ts) dfs[ts] = pandas.DataFrame(np.reshape(np.array(t[ts].array()),(len(t),len(tsColumns)))) dfs[ts].columns = tsColumns - # Main variables to add to nominal RooDataSets dfs['main'] = t.pandas.df(mainVars) if cat!='NOTAG' else t.pandas.df(notagVars) - + + # Concatenate current dataframes df = pandas.concat(dfs.values(), axis=1) - + #print list(df.columns ) + # Add NLO scale factor + if "vbf" in opt.productionMode and "ALT" in opt.productionMode and ("ACGGH" in cat): + df['weightNLO'] = df['weight']* df['vbfNLOweight'] * 66.85/76.72 + elif "vbf" in opt.productionMode and "ALT" in opt.productionMode and ("VBFTOPO_ACVBF" in cat): + df['weightNLO'] = df['weight']* df['vbfNLOweight'] * 67.82/57.96 + elif "wh" in opt.productionMode and "ALT" in opt.productionMode: + df['weightNLO'] = df['weight']* df['vhhadNLOweight'] + elif "zh" in opt.productionMode and "ALT" in opt.productionMode: + df['weightNLO'] = df['weight']* df['vhhadNLOweight'] + else : df['weightNLO'] = df['weight'] + print('-------------------') + tot = tot + df['weight'].sum() + print(df['weight'].sum()) + # Add STXS splitting var if splitting necessary if opt.doSTXSSplitting: df[stxsVar] = t.pandas.df(stxsVar) @@ -205,6 +223,7 @@ def make_argset(_ws=None,_varNames=None): if opt.doSystematics: if cat == "NOTAG": continue sdf = pandas.DataFrame() + print systematics for s in systematics: print " --> Systematic: %s"%re.sub("YEAR",opt.year,s) for direction in ['Up','Down']: @@ -212,6 +231,7 @@ def make_argset(_ws=None,_varNames=None): # If year in streeName then replace by year being processed streeName = re.sub("YEAR",opt.year,streeName) st = f[streeName] + if len(st)==0: continue sdf = st.pandas.df(systematicsVars) sdf['type'] = "%s%s"%(s,direction) @@ -267,10 +287,12 @@ def make_argset(_ws=None,_varNames=None): # Open file and initiate workspace fout = ROOT.TFile(outputWSFile,"RECREATE") + print "-------> file is created %s"%outputWSFile foutdir = fout.mkdir(inputWSName__.split("/")[0]) + print "----> Creating Directory %s"%inputWSName__.split("/")[0] foutdir.cd() + print "----> Creating Directory cd" ws = ROOT.RooWorkspace(inputWSName__.split("/")[1],inputWSName__.split("/")[1]) - # Add variables to workspace varNames = add_vars_to_workspace(ws,df,stxsVar) @@ -280,6 +302,7 @@ def make_argset(_ws=None,_varNames=None): # a) make RooDataSets: type = nominal/notag mask = (df['cat']==cat) # Convert dataframe to structured array, then to ROOT tree + sa = df[mask].to_records() t = array2tree(sa) @@ -290,7 +313,7 @@ def make_argset(_ws=None,_varNames=None): aset = make_argset(ws,varNames) # Convert tree to RooDataset and add to workspace - d = ROOT.RooDataSet(dName,dName,t,aset,'','weight') + d= ROOT.RooDataSet(dName,dName,t,aset,'','weight') getattr(ws,'import')(d) # Delete trees and RooDataSet from heap @@ -315,13 +338,14 @@ def make_argset(_ws=None,_varNames=None): # Make argset systematicsVarsDropWeight = [] for var in systematicsVars: - if var != "weight": systematicsVarsDropWeight.append(var) + if "weight" not in var: systematicsVarsDropWeight.append(var) + # ORIGINAL if var != "weight": systematicsVarsDropWeight.append(var) aset = make_argset(ws,systematicsVarsDropWeight) - + print aset h = ROOT.RooDataHist(hName,hName,aset) for ev in t: for v in systematicsVars: - if v == "weight": continue + if "weight" in v: continue else: ws.var(v).setVal(getattr(ev,v)) h.add(aset,getattr(ev,'weight')) @@ -340,3 +364,9 @@ def make_argset(_ws=None,_varNames=None): fout.Close() ws.Delete() fout.Delete() + current_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + filename = 'Total_Yields.txt' + with open(filename, "a") as file: + file.write("{} -Year: {} - Prod mode: {} - Mass: {} - Total Weight: {}\n".format(current_date,opt.year,opt.inputMass, opt.productionMode, tot)) + + print('Total Weight = '+str(tot)) diff --git a/Yield/ALL__BSM.tex b/Yield/ALL__BSM.tex new file mode 100644 index 00000000..2b3d18ce --- /dev/null +++ b/Yield/ALL__BSM.tex @@ -0,0 +1,17 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrrrrrrrrrrrrrrrrrr} +\toprule +proc & vbf $f_{L1Zg}=0.5$ & vbf $f_{L1Zg}=1$ & vbf $f_{L1}=0.5$ & vbf $f_{L1}=1$ & vbf $f_{a2}=0.5$ & vbf $f_{a2}=1$ & vbf $f_{a3}=0.5$ & vbf $f_{a3}=1$ & wh $f_{L1}=0.5$ & wh $f_{L1}=1$ & wh $f_{a2}=0.5$ & wh $f_{a2}=1$ & wh $f_{a3}=0.5$ & wh $f_{a3}=1$ & zh $f_{L1Zg}=0.5$ & zh $f_{L1Zg}=1$ & zh $f_{L1}=0.5$ & zh $f_{L1}=1$ & zh $f_{a2}=0.5$ & zh $f_{a2}=1$ & zh $f_{a3}=0.5$ & zh $f_{a3}=1$ \\ +Category & & & & & & & & & & & & & & & & & & & & & & \\ +\midrule +qqH BSM like Tag0 & 105.525 & 190.166 & 83.011 & 203.523 & 58.178 & 126.053 & 75.023 & 150.183 & 6.713 & 7.330 & 2.115 & 1.167 & 0.799 & 1.556 & 1.568 & 3.685 & 3.522 & 3.273 & 1.071 & 0.549 & 0.368 & 0.735 \\ +qqH BSM like Tag1 & 57.140 & 95.488 & 70.703 & 106.787 & 42.726 & 79.388 & 70.167 & 122.648 & 0.691 & 0.554 & 0.816 & 0.531 & 0.369 & 0.682 & 0.127 & 0.255 & 0.163 & 0.243 & 0.426 & 0.258 & 0.165 & 0.359 \\ +qqH SM like & 43.169 & 29.718 & 49.296 & 25.370 & 35.321 & 25.626 & 37.073 & 14.812 & 0.076 & 0.138 & 0.437 & 0.368 & 0.285 & 0.490 & 0.061 & 0.095 & 0.042 & 0.067 & 0.223 & 0.179 & 0.134 & 0.231 \\ +qqH ggH like Tag0 & 34.373 & 18.688 & 38.794 & 15.608 & 50.298 & 27.627 & 33.015 & 12.565 & 0.419 & 0.403 & 1.200 & 1.316 & 1.323 & 1.374 & 0.468 & 0.200 & 0.239 & 0.255 & 0.586 & 0.720 & 0.659 & 0.787 \\ +qqH ggH like Tag1 & 26.233 & 40.522 & 35.399 & 47.495 & 57.303 & 84.720 & 38.631 & 63.393 & 0.180 & 0.303 & 1.115 & 0.780 & 0.478 & 0.718 & 0.149 & 0.171 & 0.095 & 0.196 & 0.690 & 0.445 & 0.233 & 0.359 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/ALL__BSM_err.tex b/Yield/ALL__BSM_err.tex new file mode 100644 index 00000000..22561e3a --- /dev/null +++ b/Yield/ALL__BSM_err.tex @@ -0,0 +1,17 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrrrrrrrrrrrrrrrrrr} +\toprule +proc & vbf $f_{L1Zg}=0.5$ & vbf $f_{L1Zg}=1$ & vbf $f_{L1}=0.5$ & vbf $f_{L1}=1$ & vbf $f_{a2}=0.5$ & vbf $f_{a2}=1$ & vbf $f_{a3}=0.5$ & vbf $f_{a3}=1$ & wh $f_{L1}=0.5$ & wh $f_{L1}=1$ & wh $f_{a2}=0.5$ & wh $f_{a2}=1$ & wh $f_{a3}=0.5$ & wh $f_{a3}=1$ & zh $f_{L1Zg}=0.5$ & zh $f_{L1Zg}=1$ & zh $f_{L1}=0.5$ & zh $f_{L1}=1$ & zh $f_{a2}=0.5$ & zh $f_{a2}=1$ & zh $f_{a3}=0.5$ & zh $f_{a3}=1$ \\ +Category & & & & & & & & & & & & & & & & & & & & & & \\ +\midrule +qqH BSM like Tag0 & 0.002 & 0.002 & 0.003 & 0.001 & 0.003 & 0.002 & 0.003 & 0.002 & 0.004 & 0.004 & 0.009 & 0.012 & 0.013 & 0.010 & 0.007 & 0.005 & 0.005 & 0.005 & 0.009 & 0.013 & 0.016 & 0.012 \\ +qqH BSM like Tag1 & 0.003 & 0.002 & 0.003 & 0.002 & 0.003 & 0.002 & 0.003 & 0.002 & 0.023 & 0.019 & 0.017 & 0.022 & 0.025 & 0.018 & 0.032 & 0.028 & 0.028 & 0.023 & 0.017 & 0.023 & 0.029 & 0.022 \\ +qqH SM like & 0.004 & 0.005 & 0.003 & 0.004 & 0.004 & 0.005 & 0.004 & 0.006 & 0.059 & 0.040 & 0.026 & 0.029 & 0.030 & 0.024 & 0.051 & 0.053 & 0.064 & 0.047 & 0.027 & 0.030 & 0.036 & 0.029 \\ +qqH ggH like Tag0 & 0.003 & 0.005 & 0.003 & 0.005 & 0.003 & 0.004 & 0.003 & 0.006 & 0.026 & 0.024 & 0.016 & 0.015 & 0.014 & 0.014 & 0.019 & 0.033 & 0.027 & 0.024 & 0.016 & 0.015 & 0.016 & 0.017 \\ +qqH ggH like Tag1 & 0.004 & 0.003 & 0.004 & 0.003 & 0.002 & 0.002 & 0.003 & 0.002 & 0.036 & 0.026 & 0.015 & 0.019 & 0.023 & 0.019 & 0.031 & 0.034 & 0.038 & 0.027 & 0.014 & 0.018 & 0.027 & 0.022 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/ALL_vbf_BSM.tex b/Yield/ALL_vbf_BSM.tex new file mode 100644 index 00000000..6ff46736 --- /dev/null +++ b/Yield/ALL_vbf_BSM.tex @@ -0,0 +1,17 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrrrr} +\toprule +proc & vbf $f_{L1Zg}=0.5$ & vbf $f_{L1Zg}=1$ & vbf $f_{L1}=0.5$ & vbf $f_{L1}=1$ & vbf $f_{a2}=0.5$ & vbf $f_{a2}=1$ & vbf $f_{a3}=0.5$ & vbf $f_{a3}=1$ \\ +Category & & & & & & & & \\ +\midrule +qqH BSM like Tag0 & 105.525 & 190.166 & 83.011 & 203.523 & 58.178 & 126.053 & 75.023 & 150.183 \\ +qqH BSM like Tag1 & 57.140 & 95.488 & 70.703 & 106.787 & 42.726 & 79.388 & 70.167 & 122.648 \\ +qqH SM like & 43.169 & 29.718 & 49.296 & 25.370 & 35.321 & 25.626 & 37.073 & 14.812 \\ +qqH ggH like Tag0 & 34.373 & 18.688 & 38.794 & 15.608 & 50.298 & 27.627 & 33.015 & 12.565 \\ +qqH ggH like Tag1 & 26.233 & 40.522 & 35.399 & 47.495 & 57.303 & 84.720 & 38.631 & 63.393 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/ALL_vbf_BSM_err.tex b/Yield/ALL_vbf_BSM_err.tex new file mode 100644 index 00000000..d43a9f08 --- /dev/null +++ b/Yield/ALL_vbf_BSM_err.tex @@ -0,0 +1,17 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrrrr} +\toprule +proc & vbf $f_{L1Zg}=0.5$ & vbf $f_{L1Zg}=1$ & vbf $f_{L1}=0.5$ & vbf $f_{L1}=1$ & vbf $f_{a2}=0.5$ & vbf $f_{a2}=1$ & vbf $f_{a3}=0.5$ & vbf $f_{a3}=1$ \\ +Category & & & & & & & & \\ +\midrule +qqH BSM like Tag0 & 0.002 & 0.002 & 0.003 & 0.001 & 0.003 & 0.002 & 0.003 & 0.002 \\ +qqH BSM like Tag1 & 0.003 & 0.002 & 0.003 & 0.002 & 0.003 & 0.002 & 0.003 & 0.002 \\ +qqH SM like & 0.004 & 0.005 & 0.003 & 0.004 & 0.004 & 0.005 & 0.004 & 0.006 \\ +qqH ggH like Tag0 & 0.003 & 0.005 & 0.003 & 0.005 & 0.003 & 0.004 & 0.003 & 0.006 \\ +qqH ggH like Tag1 & 0.004 & 0.003 & 0.004 & 0.003 & 0.002 & 0.002 & 0.003 & 0.002 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/ALL_wh_BSM.tex b/Yield/ALL_wh_BSM.tex new file mode 100644 index 00000000..066be655 --- /dev/null +++ b/Yield/ALL_wh_BSM.tex @@ -0,0 +1,17 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrr} +\toprule +proc & wh $f_{L1}=0.5$ & wh $f_{L1}=1$ & wh $f_{a2}=0.5$ & wh $f_{a2}=1$ & wh $f_{a3}=0.5$ & wh $f_{a3}=1$ \\ +Category & & & & & & \\ +\midrule +qqH BSM like Tag0 & 6.713 & 7.330 & 2.115 & 1.167 & 0.799 & 1.556 \\ +qqH BSM like Tag1 & 0.691 & 0.554 & 0.816 & 0.531 & 0.369 & 0.682 \\ +qqH SM like & 0.076 & 0.138 & 0.437 & 0.368 & 0.285 & 0.490 \\ +qqH ggH like Tag0 & 0.419 & 0.403 & 1.200 & 1.316 & 1.323 & 1.374 \\ +qqH ggH like Tag1 & 0.180 & 0.303 & 1.115 & 0.780 & 0.478 & 0.718 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/ALL_wh_BSM_err.tex b/Yield/ALL_wh_BSM_err.tex new file mode 100644 index 00000000..882aacc0 --- /dev/null +++ b/Yield/ALL_wh_BSM_err.tex @@ -0,0 +1,17 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrr} +\toprule +proc & wh $f_{L1}=0.5$ & wh $f_{L1}=1$ & wh $f_{a2}=0.5$ & wh $f_{a2}=1$ & wh $f_{a3}=0.5$ & wh $f_{a3}=1$ \\ +Category & & & & & & \\ +\midrule +qqH BSM like Tag0 & 0.004 & 0.004 & 0.009 & 0.012 & 0.013 & 0.010 \\ +qqH BSM like Tag1 & 0.023 & 0.019 & 0.017 & 0.022 & 0.025 & 0.018 \\ +qqH SM like & 0.059 & 0.040 & 0.026 & 0.029 & 0.030 & 0.024 \\ +qqH ggH like Tag0 & 0.026 & 0.024 & 0.016 & 0.015 & 0.014 & 0.014 \\ +qqH ggH like Tag1 & 0.036 & 0.026 & 0.015 & 0.019 & 0.023 & 0.019 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/ALL_zh_BSM.tex b/Yield/ALL_zh_BSM.tex new file mode 100644 index 00000000..35922069 --- /dev/null +++ b/Yield/ALL_zh_BSM.tex @@ -0,0 +1,17 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrrrr} +\toprule +proc & zh $f_{L1Zg}=0.5$ & zh $f_{L1Zg}=1$ & zh $f_{L1}=0.5$ & zh $f_{L1}=1$ & zh $f_{a2}=0.5$ & zh $f_{a2}=1$ & zh $f_{a3}=0.5$ & zh $f_{a3}=1$ \\ +Category & & & & & & & & \\ +\midrule +qqH BSM like Tag0 & 1.568 & 3.685 & 3.522 & 3.273 & 1.071 & 0.549 & 0.368 & 0.735 \\ +qqH BSM like Tag1 & 0.127 & 0.255 & 0.163 & 0.243 & 0.426 & 0.258 & 0.165 & 0.359 \\ +qqH SM like & 0.061 & 0.095 & 0.042 & 0.067 & 0.223 & 0.179 & 0.134 & 0.231 \\ +qqH ggH like Tag0 & 0.468 & 0.200 & 0.239 & 0.255 & 0.586 & 0.720 & 0.659 & 0.787 \\ +qqH ggH like Tag1 & 0.149 & 0.171 & 0.095 & 0.196 & 0.690 & 0.445 & 0.233 & 0.359 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/ALL_zh_BSM_err.tex b/Yield/ALL_zh_BSM_err.tex new file mode 100644 index 00000000..4de1e6fb --- /dev/null +++ b/Yield/ALL_zh_BSM_err.tex @@ -0,0 +1,17 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrrrr} +\toprule +proc & zh $f_{L1Zg}=0.5$ & zh $f_{L1Zg}=1$ & zh $f_{L1}=0.5$ & zh $f_{L1}=1$ & zh $f_{a2}=0.5$ & zh $f_{a2}=1$ & zh $f_{a3}=0.5$ & zh $f_{a3}=1$ \\ +Category & & & & & & & & \\ +\midrule +qqH BSM like Tag0 & 0.007 & 0.005 & 0.005 & 0.005 & 0.009 & 0.013 & 0.016 & 0.012 \\ +qqH BSM like Tag1 & 0.032 & 0.028 & 0.028 & 0.023 & 0.017 & 0.023 & 0.029 & 0.022 \\ +qqH SM like & 0.051 & 0.053 & 0.064 & 0.047 & 0.027 & 0.030 & 0.036 & 0.029 \\ +qqH ggH like Tag0 & 0.019 & 0.033 & 0.027 & 0.024 & 0.016 & 0.015 & 0.016 & 0.017 \\ +qqH ggH like Tag1 & 0.031 & 0.034 & 0.038 & 0.027 & 0.014 & 0.018 & 0.027 & 0.022 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/Category_MAP.tex b/Yield/Category_MAP.tex new file mode 100644 index 00000000..7aa7e439 --- /dev/null +++ b/Yield/Category_MAP.tex @@ -0,0 +1,28 @@ +\begin{tabular}{ll} +\toprule + Cat & Category \\ +\midrule + RECO_VBFLIKEGGH_Tag0 & RECO_VBFLIKEGGH_Tag0 \\ + RECO_VBFLIKEGGH_Tag1 & RECO_VBFLIKEGGH_Tag1 \\ + $D_{bkg}$ $<$0.05,D0−$>$0.6,$D_{bsm}$ $<$0.97 & qqH SM like \\ + $D_{bkg}$$>$0.05,D0− $<$0.6,$D_{bsm}$ $<$0.97 & qqH ggH like Tag1 \\ + 0.229$>$STXS$>$-0.135\&1.00$>$ANOM$>$-0.16 & ZH-lep Tag0 \\ + 1.000$>$STXS$>$0.385\&1.00$>$ANOM$>$0.79 & WH-lep Tag3 \\ + 1.000$>$STXS$>$0.385\&0.79$>$ANOM$>$-0.68 & WH-lep Tag2 \\ + 0.385$>$STXS$>$0.125\&1.00$>$ANOM$>$0.89 & WH-lep Tag1 \\ + 0.385$>$STXS$>$0.125\&0.89$>$ANOM$>$-0.68 & WH-lep Tag0 \\ + 0.798$>$STXS$>$0.619\&0.92$>$ANOM$>$-1.00 & VH-MET Tag3 \\ + 0.798$>$STXS$>$0.619\&1.00$>$ANOM$>$0.92 & VH-MET Tag2 \\ + 1.000$>$STXS$>$0.798\&0.86$>$ANOM$>$-1.00 & VH-MET Tag1 \\ + 1.000$>$STXS$>$0.798\&1.00$>$ANOM$>$0.86 & VH-MET Tag0 \\ + 1.000$>$STXS$>$0.229\&1.00$>$ANOM$>$-0.68 & ZH-lep Tag1 \\ + $DNN_{bsm}$$<$0.45\&−2.50 $<$log($DNN_{bkg}$)$... & VH had SM like Tag1 \\ + $DNN_{bsm}$$<$0.56\&log($DNN_{bkg}$) $<$−2.50 & VH had SM like Tag0 \\ + $DNN_{bsm}$$>$0.75\¬ ($DNN_{bsm}$$>$0.89\&l... & VH had BSM like Tag1 \\ + $DNN_{bsm}$$>$0.89\&log($DNN_{bkg}$) $<$−2.50 & VH had BSM like Tag0 \\ + $D_{bkg}$ $<$0.05,D0− $<$0.6,$D_{bsm}$ $<$0.97 & qqH BSM like Tag1 \\ + $D_{bkg}$ $<$0.05,D0− $<$0.6,$D_{bsm}$$>$0.97 & qqH BSM like Tag0 \\ + $D_{bkg}$$>$0.05,D0−$>$0.6,$D_{bsm}$$<$0.97 & qqH ggH like Tag0 \\ + $DNN_{bsm}$$<$0.45\&−1.38 $<$log($DNN_{bkg}$)$... & VH had SM like Tag2 \\ +\bottomrule +\end{tabular} diff --git a/Yield/ComputeYieldTable.py b/Yield/ComputeYieldTable.py new file mode 100644 index 00000000..29b0e4a7 --- /dev/null +++ b/Yield/ComputeYieldTable.py @@ -0,0 +1,96 @@ + + +import ROOT +import pandas as pd +from procToLatex import * +from catToLatex import * +from catToLatex2 import * +from math import floor +import codecs + +def err(x): + if x == 0: return 0 + else: return (1/(x)**0.5) +def truncate_to_two_decimals(x): + if isinstance(x, (int, float)): + return math.floor(x * 1000) / 1000 + return x + + +lumiMap = {"2016preVFP":19.51,"2016postVFP":16.8,"2017":41.48,"2018":59.83 } +year = ["2016preVFP" , "2016postVFP", "2017", "2018" ] + +df_combined = pd.DataFrame() + +for y in year: + df= pd.read_csv('yield_%s.txt'%(y), sep=r'\s+',header=0) + df_combined = pd.concat([df_combined, df], ignore_index=True) + +df_combined["proc"] = df_combined["proc"].apply(procToLatex) +df_combined["Category"] = df_combined["cat"].apply(catToLatex2) +df_combined["Cat"] = df_combined["cat"].apply(catToLatex) + + +df_combined["yield"] = df_combined["yield"].apply(truncate_to_two_decimals) +df_combined = df_combined.groupby(['proc', 'cat',"Category","Cat"], as_index=False).sum() +df_combined["err"] = df_combined["entries"].apply(err) +df_combined["err"] = df_combined["err"].apply(truncate_to_two_decimals) +df_combined = df_combined.sort_values(by=['proc'], ascending=[True]) +#################################Category_MAP.tex############################ +df = df_combined[(df_combined["proc"]=='vbf') & +((df_combined["cat"].str.contains('VBF') | df_combined["Category"].str.contains('H-') ))][['Cat',"Category"]] +latex_table = df.to_latex(index=False, escape=False ) +with codecs.open('Category_MAP.tex', 'w', 'utf-8') as f: + f.write(latex_table) +######################################################################################### +##############################Standard Model ######################################### +df_combined_proc = df_combined[df_combined["Category"].str.contains(" ")] +df_combined_proc = df_combined_proc[(~df_combined_proc["proc"].str.contains('f_'))] +pivot_table = df_combined_proc.pivot(index="Category", columns="proc", values="yield") +latex_table = pivot_table.to_latex(index=True, escape=False) +with codecs.open('SM.tex', 'w', 'utf-8') as f: + f.write(latex_table) +pivot_table = df_combined_proc.pivot(index="Category", columns="proc", values="err") +latex_table = pivot_table.to_latex(index=True, escape=False) + # Salvataggio in un file .tex (opzionale) +with codecs.open('SM_err.tex', 'w', 'utf-8') as f: + f.write(latex_table) + + + +######################################################################################### +############################################ BSM ######################################### + + + + + +Analysis = [['VH_LEP', '-','wh'],['VH_LEP', '-','zh'], + ['VHHAD', 'had','wh'],['VHHAD', 'had','zh'], + ['VBF', 'qqH','vbf']] + +Analysis = [['ALL', 'qqH','vbf'],['ALL', 'qqH','wh'],['ALL', 'qqH','zh']] +for a in Analysis: + # BSM + df_combined_proc = df_combined[df_combined["Category"].str.contains(a[1])] + df_combined_proc = df_combined_proc[(df_combined_proc["proc"].str.contains('f_')) + & (df_combined_proc["proc"].str.contains(a[2]))] + + pivot_table = df_combined_proc.pivot(index="Category", columns="proc", values="yield") + latex_table = pivot_table.to_latex(index=True, escape=False) + with codecs.open('%s_%s_BSM.tex'%(a[0],a[2]), 'w', 'utf-8') as f: + f.write('\\begin{table}[H] \n \\tiny \n \\right \n \\makebox[\\textwidth][c]{% \n') + f.write(latex_table) + f.write('}\\end{table}') + + pivot_table = df_combined_proc.pivot(index="Category", columns="proc", values="err") + latex_table = pivot_table.to_latex(index=True, escape=False) + + with codecs.open('%s_%s_BSM_err.tex'%(a[0],a[2]), 'w', 'utf-8') as f: + + f.write('\\begin{table}[H] \n \\tiny \n \\right \n \\makebox[\\textwidth][c]{% \n') + f.write(latex_table) + f.write('}\\end{table}') + + + diff --git a/Yield/SM.tex b/Yield/SM.tex new file mode 100644 index 00000000..724d37fa --- /dev/null +++ b/Yield/SM.tex @@ -0,0 +1,27 @@ +\begin{tabular}{lrrrrrr} +\toprule +proc & ggh & tth & vbf & wh_{minus} & wh_{plus} & zh \\ +Category & & & & & & \\ +\midrule +VH had BSM like Tag0 & 3.431 & 0.049 & 0.340 & 0.191 & 0.144 & 0.257 \\ +VH had BSM like Tag1 & 8.684 & 0.141 & 0.612 & 0.836 & 0.706 & 0.825 \\ +VH had SM like Tag0 & 7.588 & 0.603 & 0.461 & 2.309 & 2.448 & 3.195 \\ +VH had SM like Tag1 & 22.480 & 1.423 & 1.027 & 3.607 & 4.210 & 4.881 \\ +VH had SM like Tag2 & 74.291 & 4.031 & 6.056 & 4.610 & 5.291 & 6.185 \\ +VH-MET Tag0 & 0.015 & 0.003 & 0.025 & 0.150 & 0.123 & 0.746 \\ +VH-MET Tag1 & 0.054 & 0.012 & 0.021 & 0.385 & 0.419 & 1.341 \\ +VH-MET Tag2 & 0.444 & 0.046 & 0.187 & 0.158 & 0.128 & 0.274 \\ +VH-MET Tag3 & 0.798 & 0.184 & 0.355 & 1.108 & 1.248 & 2.988 \\ +WH-lep Tag0 & 0.041 & 0.042 & 0.012 & 0.672 & 0.560 & 0.053 \\ +WH-lep Tag1 & 0.036 & 0.076 & 0.013 & 2.642 & 2.779 & 0.286 \\ +WH-lep Tag2 & 0.071 & 0.047 & 0.026 & 0.112 & 0.109 & 0.031 \\ +WH-lep Tag3 & 0.135 & 0.306 & 0.025 & 1.328 & 1.455 & 0.379 \\ +ZH-lep Tag0 & 0.000 & 0.007 & 0.000 & 0.000 & 0.000 & 1.173 \\ +ZH-lep Tag1 & 0.000 & 0.031 & 0.000 & 0.000 & 0.000 & 0.137 \\ +qqH BSM like Tag0 & 9.398 & 0.076 & 1.411 & 0.176 & 0.124 & 0.154 \\ +qqH BSM like Tag1 & 12.019 & 0.129 & 18.229 & 0.125 & 0.109 & 0.115 \\ +qqH SM like & 19.410 & 0.024 & 59.601 & 0.037 & 0.036 & 0.043 \\ +qqH ggH like Tag0 & 64.884 & 1.023 & 50.199 & 0.799 & 0.971 & 1.010 \\ +qqH ggH like Tag1 & 41.707 & 3.615 & 14.818 & 1.304 & 1.307 & 1.491 \\ +\bottomrule +\end{tabular} diff --git a/Yield/SM_err.tex b/Yield/SM_err.tex new file mode 100644 index 00000000..62394364 --- /dev/null +++ b/Yield/SM_err.tex @@ -0,0 +1,27 @@ +\begin{tabular}{lrrrrrr} +\toprule +proc & ggh & tth & vbf & wh_{minus} & wh_{plus} & zh \\ +Category & & & & & & \\ +\midrule +VH had BSM like Tag0 & 0.022 & 0.042 & 0.029 & 0.034 & 0.043 & 0.034 \\ +VH had BSM like Tag1 & 0.013 & 0.025 & 0.020 & 0.018 & 0.020 & 0.019 \\ +VH had SM like Tag0 & 0.015 & 0.012 & 0.023 & 0.010 & 0.011 & 0.009 \\ +VH had SM like Tag1 & 0.007 & 0.008 & 0.012 & 0.008 & 0.008 & 0.007 \\ +VH had SM like Tag2 & 0.004 & 0.004 & 0.005 & 0.007 & 0.007 & 0.007 \\ +VH-MET Tag0 & 0.213 & 0.130 & 0.093 & 0.040 & 0.046 & 0.020 \\ +VH-MET Tag1 & 0.229 & 0.094 & 0.104 & 0.023 & 0.024 & 0.014 \\ +VH-MET Tag2 & 0.062 & 0.043 & 0.030 & 0.038 & 0.048 & 0.033 \\ +VH-MET Tag3 & 0.043 & 0.023 & 0.019 & 0.014 & 0.015 & 0.009 \\ +WH-lep Tag0 & 0.277 & 0.051 & 0.145 & 0.018 & 0.022 & 0.071 \\ +WH-lep Tag1 & 0.204 & 0.034 & 0.102 & 0.009 & 0.010 & 0.031 \\ +WH-lep Tag2 & 0.158 & 0.041 & 0.088 & 0.045 & 0.051 & 0.093 \\ +WH-lep Tag3 & 0.107 & 0.017 & 0.061 & 0.013 & 0.014 & 0.027 \\ +ZH-lep Tag0 & 0.000 & 0.090 & 0.000 & 0.000 & 1.000 & 0.015 \\ +ZH-lep Tag1 & 0.000 & 0.050 & 0.000 & 0.000 & 0.000 & 0.047 \\ +qqH BSM like Tag0 & 0.012 & 0.028 & 0.011 & 0.036 & 0.049 & 0.047 \\ +qqH BSM like Tag1 & 0.010 & 0.025 & 0.003 & 0.044 & 0.054 & 0.052 \\ +qqH SM like & 0.007 & 0.054 & 0.001 & 0.077 & 0.089 & 0.080 \\ +qqH ggH like Tag0 & 0.004 & 0.008 & 0.001 & 0.017 & 0.018 & 0.017 \\ +qqH ggH like Tag1 & 0.005 & 0.005 & 0.003 & 0.013 & 0.015 & 0.014 \\ +\bottomrule +\end{tabular} diff --git a/Yield/VBF_vbf_BSM.tex b/Yield/VBF_vbf_BSM.tex new file mode 100644 index 00000000..6ff46736 --- /dev/null +++ b/Yield/VBF_vbf_BSM.tex @@ -0,0 +1,17 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrrrr} +\toprule +proc & vbf $f_{L1Zg}=0.5$ & vbf $f_{L1Zg}=1$ & vbf $f_{L1}=0.5$ & vbf $f_{L1}=1$ & vbf $f_{a2}=0.5$ & vbf $f_{a2}=1$ & vbf $f_{a3}=0.5$ & vbf $f_{a3}=1$ \\ +Category & & & & & & & & \\ +\midrule +qqH BSM like Tag0 & 105.525 & 190.166 & 83.011 & 203.523 & 58.178 & 126.053 & 75.023 & 150.183 \\ +qqH BSM like Tag1 & 57.140 & 95.488 & 70.703 & 106.787 & 42.726 & 79.388 & 70.167 & 122.648 \\ +qqH SM like & 43.169 & 29.718 & 49.296 & 25.370 & 35.321 & 25.626 & 37.073 & 14.812 \\ +qqH ggH like Tag0 & 34.373 & 18.688 & 38.794 & 15.608 & 50.298 & 27.627 & 33.015 & 12.565 \\ +qqH ggH like Tag1 & 26.233 & 40.522 & 35.399 & 47.495 & 57.303 & 84.720 & 38.631 & 63.393 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/VBF_vbf_BSM_err.tex b/Yield/VBF_vbf_BSM_err.tex new file mode 100644 index 00000000..d43a9f08 --- /dev/null +++ b/Yield/VBF_vbf_BSM_err.tex @@ -0,0 +1,17 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrrrr} +\toprule +proc & vbf $f_{L1Zg}=0.5$ & vbf $f_{L1Zg}=1$ & vbf $f_{L1}=0.5$ & vbf $f_{L1}=1$ & vbf $f_{a2}=0.5$ & vbf $f_{a2}=1$ & vbf $f_{a3}=0.5$ & vbf $f_{a3}=1$ \\ +Category & & & & & & & & \\ +\midrule +qqH BSM like Tag0 & 0.002 & 0.002 & 0.003 & 0.001 & 0.003 & 0.002 & 0.003 & 0.002 \\ +qqH BSM like Tag1 & 0.003 & 0.002 & 0.003 & 0.002 & 0.003 & 0.002 & 0.003 & 0.002 \\ +qqH SM like & 0.004 & 0.005 & 0.003 & 0.004 & 0.004 & 0.005 & 0.004 & 0.006 \\ +qqH ggH like Tag0 & 0.003 & 0.005 & 0.003 & 0.005 & 0.003 & 0.004 & 0.003 & 0.006 \\ +qqH ggH like Tag1 & 0.004 & 0.003 & 0.004 & 0.003 & 0.002 & 0.002 & 0.003 & 0.002 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/VHHAD_wh_BSM.tex b/Yield/VHHAD_wh_BSM.tex new file mode 100644 index 00000000..9134e3fa --- /dev/null +++ b/Yield/VHHAD_wh_BSM.tex @@ -0,0 +1,17 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrr} +\toprule +proc & wh $f_{L1}=0.5$ & wh $f_{L1}=1$ & wh $f_{a2}=0.5$ & wh $f_{a2}=1$ & wh $f_{a3}=0.5$ & wh $f_{a3}=1$ \\ +Category & & & & & & \\ +\midrule +VH had BSM like Tag0 & 5.771 & 8.159 & 5.454 & 3.267 & 2.805 & 5.306 \\ +VH had BSM like Tag1 & 4.091 & 9.038 & 6.456 & 4.293 & 3.804 & 6.547 \\ +VH had SM like Tag0 & 0.623 & 4.026 & 4.306 & 3.395 & 3.462 & 3.186 \\ +VH had SM like Tag1 & 2.026 & 2.363 & 3.971 & 4.015 & 4.869 & 3.002 \\ +VH had SM like Tag2 & 2.813 & 1.852 & 3.648 & 4.658 & 5.035 & 3.166 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/VHHAD_wh_BSM_err.tex b/Yield/VHHAD_wh_BSM_err.tex new file mode 100644 index 00000000..90014186 --- /dev/null +++ b/Yield/VHHAD_wh_BSM_err.tex @@ -0,0 +1,17 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrr} +\toprule +proc & wh $f_{L1}=0.5$ & wh $f_{L1}=1$ & wh $f_{a2}=0.5$ & wh $f_{a2}=1$ & wh $f_{a3}=0.5$ & wh $f_{a3}=1$ \\ +Category & & & & & & \\ +\midrule +VH had BSM like Tag0 & 0.004 & 0.004 & 0.005 & 0.006 & 0.006 & 0.004 \\ +VH had BSM like Tag1 & 0.005 & 0.004 & 0.004 & 0.006 & 0.005 & 0.004 \\ +VH had SM like Tag0 & 0.015 & 0.006 & 0.006 & 0.007 & 0.006 & 0.006 \\ +VH had SM like Tag1 & 0.009 & 0.008 & 0.006 & 0.006 & 0.005 & 0.007 \\ +VH had SM like Tag2 & 0.009 & 0.011 & 0.008 & 0.007 & 0.006 & 0.008 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/VHHAD_zh_BSM.tex b/Yield/VHHAD_zh_BSM.tex new file mode 100644 index 00000000..74b1f7da --- /dev/null +++ b/Yield/VHHAD_zh_BSM.tex @@ -0,0 +1,17 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrrrr} +\toprule +proc & zh $f_{L1Zg}=0.5$ & zh $f_{L1Zg}=1$ & zh $f_{L1}=0.5$ & zh $f_{L1}=1$ & zh $f_{a2}=0.5$ & zh $f_{a2}=1$ & zh $f_{a3}=0.5$ & zh $f_{a3}=1$ \\ +Category & & & & & & & & \\ +\midrule +VH had BSM like Tag0 & 2.744 & 5.862 & 4.919 & 5.612 & 3.907 & 2.136 & 1.858 & 3.750 \\ +VH had BSM like Tag1 & 2.861 & 5.167 & 3.058 & 5.265 & 3.965 & 2.434 & 2.269 & 4.203 \\ +VH had SM like Tag0 & 2.704 & 2.289 & 0.604 & 2.546 & 2.693 & 2.283 & 2.335 & 2.083 \\ +VH had SM like Tag1 & 2.922 & 1.212 & 1.278 & 1.639 & 2.325 & 2.590 & 3.005 & 1.860 \\ +VH had SM like Tag2 & 2.996 & 0.945 & 1.985 & 1.302 & 2.341 & 3.225 & 3.287 & 1.967 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/VHHAD_zh_BSM_err.tex b/Yield/VHHAD_zh_BSM_err.tex new file mode 100644 index 00000000..71c6032b --- /dev/null +++ b/Yield/VHHAD_zh_BSM_err.tex @@ -0,0 +1,17 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrrrr} +\toprule +proc & zh $f_{L1Zg}=0.5$ & zh $f_{L1Zg}=1$ & zh $f_{L1}=0.5$ & zh $f_{L1}=1$ & zh $f_{a2}=0.5$ & zh $f_{a2}=1$ & zh $f_{a3}=0.5$ & zh $f_{a3}=1$ \\ +Category & & & & & & & & \\ +\midrule +VH had BSM like Tag0 & 0.005 & 0.004 & 0.004 & 0.003 & 0.004 & 0.006 & 0.006 & 0.005 \\ +VH had BSM like Tag1 & 0.005 & 0.004 & 0.005 & 0.003 & 0.004 & 0.005 & 0.006 & 0.005 \\ +VH had SM like Tag0 & 0.005 & 0.007 & 0.012 & 0.005 & 0.005 & 0.006 & 0.006 & 0.007 \\ +VH had SM like Tag1 & 0.005 & 0.011 & 0.009 & 0.007 & 0.006 & 0.006 & 0.006 & 0.008 \\ +VH had SM like Tag2 & 0.007 & 0.015 & 0.009 & 0.010 & 0.008 & 0.006 & 0.007 & 0.009 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/VH_LEP_wh_BSM.tex b/Yield/VH_LEP_wh_BSM.tex new file mode 100644 index 00000000..035b8310 --- /dev/null +++ b/Yield/VH_LEP_wh_BSM.tex @@ -0,0 +1,22 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrr} +\toprule +proc & wh $f_{L1}=0.5$ & wh $f_{L1}=1$ & wh $f_{a2}=0.5$ & wh $f_{a2}=1$ & wh $f_{a3}=0.5$ & wh $f_{a3}=1$ \\ +Category & & & & & & \\ +\midrule +VH-MET Tag0 & 3.519 & 4.578 & 4.842 & 2.886 & 2.318 & 4.096 \\ +VH-MET Tag1 & 0.227 & 1.288 & 2.705 & 2.244 & 1.883 & 2.849 \\ +VH-MET Tag2 & 9.848 & 10.616 & 3.917 & 2.383 & 1.806 & 3.735 \\ +VH-MET Tag3 & 0.788 & 2.412 & 4.575 & 4.276 & 3.727 & 4.985 \\ +WH-lep Tag0 & 28.155 & 31.037 & 14.610 & 9.198 & 7.258 & 13.420 \\ +WH-lep Tag1 & 1.648 & 5.928 & 7.234 & 7.123 & 7.186 & 7.792 \\ +WH-lep Tag2 & 1.405 & 1.561 & 1.525 & 1.069 & 0.785 & 1.681 \\ +WH-lep Tag3 & 0.732 & 1.076 & 1.978 & 2.714 & 2.479 & 2.558 \\ +ZH-lep Tag0 & 0.002 & 0.001 & 0.000 & 0.000 & 0.000 & 0.000 \\ +ZH-lep Tag1 & 0.001 & 0.000 & 0.001 & 0.000 & 0.000 & 0.002 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/VH_LEP_wh_BSM_err.tex b/Yield/VH_LEP_wh_BSM_err.tex new file mode 100644 index 00000000..cd34e90d --- /dev/null +++ b/Yield/VH_LEP_wh_BSM_err.tex @@ -0,0 +1,22 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrr} +\toprule +proc & wh $f_{L1}=0.5$ & wh $f_{L1}=1$ & wh $f_{a2}=0.5$ & wh $f_{a2}=1$ & wh $f_{a3}=0.5$ & wh $f_{a3}=1$ \\ +Category & & & & & & \\ +\midrule +VH-MET Tag0 & 0.007 & 0.006 & 0.006 & 0.008 & 0.009 & 0.006 \\ +VH-MET Tag1 & 0.028 & 0.011 & 0.008 & 0.009 & 0.009 & 0.008 \\ +VH-MET Tag2 & 0.004 & 0.004 & 0.007 & 0.010 & 0.010 & 0.007 \\ +VH-MET Tag3 & 0.015 & 0.008 & 0.006 & 0.007 & 0.007 & 0.006 \\ +WH-lep Tag0 & 0.002 & 0.002 & 0.003 & 0.004 & 0.005 & 0.003 \\ +WH-lep Tag1 & 0.011 & 0.005 & 0.005 & 0.005 & 0.005 & 0.005 \\ +WH-lep Tag2 & 0.012 & 0.011 & 0.011 & 0.014 & 0.015 & 0.011 \\ +WH-lep Tag3 & 0.016 & 0.013 & 0.010 & 0.009 & 0.008 & 0.008 \\ +ZH-lep Tag0 & 0.316 & 0.229 & 0.408 & 0.353 & 0.500 & 0.301 \\ +ZH-lep Tag1 & 0.250 & 0.377 & 0.301 & 0.408 & 0.577 & 0.235 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/VH_LEP_zh_BSM.tex b/Yield/VH_LEP_zh_BSM.tex new file mode 100644 index 00000000..c7864b0d --- /dev/null +++ b/Yield/VH_LEP_zh_BSM.tex @@ -0,0 +1,22 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrrrr} +\toprule +proc & zh $f_{L1Zg}=0.5$ & zh $f_{L1Zg}=1$ & zh $f_{L1}=0.5$ & zh $f_{L1}=1$ & zh $f_{a2}=0.5$ & zh $f_{a2}=1$ & zh $f_{a3}=0.5$ & zh $f_{a3}=1$ \\ +Category & & & & & & & & \\ +\midrule +VH-MET Tag0 & 7.552 & 15.436 & 12.790 & 14.617 & 8.664 & 5.067 & 4.161 & 7.459 \\ +VH-MET Tag1 & 1.933 & 1.744 & 0.306 & 1.983 & 3.218 & 2.930 & 2.591 & 3.540 \\ +VH-MET Tag2 & 2.913 & 6.315 & 5.573 & 5.694 & 2.514 & 1.371 & 1.600 & 2.024 \\ +VH-MET Tag3 & 2.983 & 1.983 & 0.798 & 2.305 & 3.852 & 4.085 & 3.938 & 4.323 \\ +WH-lep Tag0 & 0.719 & 1.549 & 1.320 & 1.443 & 0.924 & 0.525 & 0.457 & 0.844 \\ +WH-lep Tag1 & 0.360 & 0.312 & 0.090 & 0.361 & 0.460 & 0.475 & 0.401 & 0.476 \\ +WH-lep Tag2 & 0.288 & 0.656 & 0.615 & 0.597 & 0.465 & 0.270 & 0.213 & 0.394 \\ +WH-lep Tag3 & 0.362 & 0.291 & 0.128 & 0.317 & 0.443 & 0.471 & 0.413 & 0.477 \\ +ZH-lep Tag0 & 3.219 & 5.510 & 4.492 & 5.305 & 3.004 & 2.372 & 2.995 & 3.139 \\ +ZH-lep Tag1 & 0.160 & 0.251 & 0.201 & 0.247 & 0.208 & 0.179 & 0.159 & 0.245 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/VH_LEP_zh_BSM_err.tex b/Yield/VH_LEP_zh_BSM_err.tex new file mode 100644 index 00000000..dd8abf3c --- /dev/null +++ b/Yield/VH_LEP_zh_BSM_err.tex @@ -0,0 +1,22 @@ +\begin{table}[H] + \tiny + \right + \makebox[\textwidth][c]{% +\begin{tabular}{lrrrrrrrr} +\toprule +proc & zh $f_{L1Zg}=0.5$ & zh $f_{L1Zg}=1$ & zh $f_{L1}=0.5$ & zh $f_{L1}=1$ & zh $f_{a2}=0.5$ & zh $f_{a2}=1$ & zh $f_{a3}=0.5$ & zh $f_{a3}=1$ \\ +Category & & & & & & & & \\ +\midrule +VH-MET Tag0 & 0.003 & 0.003 & 0.003 & 0.002 & 0.003 & 0.004 & 0.005 & 0.004 \\ +VH-MET Tag1 & 0.007 & 0.009 & 0.019 & 0.007 & 0.005 & 0.006 & 0.007 & 0.006 \\ +VH-MET Tag2 & 0.006 & 0.005 & 0.004 & 0.004 & 0.007 & 0.009 & 0.011 & 0.008 \\ +VH-MET Tag3 & 0.006 & 0.009 & 0.012 & 0.007 & 0.005 & 0.005 & 0.005 & 0.005 \\ +WH-lep Tag0 & 0.012 & 0.010 & 0.009 & 0.009 & 0.011 & 0.015 & 0.017 & 0.014 \\ +WH-lep Tag1 & 0.017 & 0.023 & 0.037 & 0.018 & 0.015 & 0.015 & 0.017 & 0.017 \\ +WH-lep Tag2 & 0.020 & 0.016 & 0.015 & 0.014 & 0.015 & 0.021 & 0.024 & 0.019 \\ +WH-lep Tag3 & 0.018 & 0.024 & 0.031 & 0.019 & 0.016 & 0.015 & 0.017 & 0.017 \\ +ZH-lep Tag0 & 0.006 & 0.005 & 0.005 & 0.004 & 0.006 & 0.007 & 0.007 & 0.006 \\ +ZH-lep Tag1 & 0.027 & 0.026 & 0.024 & 0.022 & 0.023 & 0.025 & 0.028 & 0.024 \\ +\bottomrule +\end{tabular} +}\end{table} \ No newline at end of file diff --git a/Yield/catToLatex.py b/Yield/catToLatex.py new file mode 100644 index 00000000..c20a4039 --- /dev/null +++ b/Yield/catToLatex.py @@ -0,0 +1,40 @@ + +# -*- coding: utf-8 -*- +import os, sys +import glob +import re +import ROOT +import math +from collections import OrderedDict as od + +catToLatexMap = od() +catToLatexMap['RECO_VBFTOPO_ACGGH_Tag0'] = '$D_{bkg}$$>$0.05,D0−$>$0.6,$D_{bsm}$$<$0.97' +catToLatexMap['RECO_VBFTOPO_ACGGH_Tag1'] = '$D_{bkg}$$>$0.05,D0− $<$0.6,$D_{bsm}$ $<$0.97' +catToLatexMap['RECO_VBFTOPO_ACVBFBSM_Tag0'] = '$D_{bkg}$ $<$0.05,D0− $<$0.6,$D_{bsm}$$>$0.97' +catToLatexMap['RECO_VBFTOPO_ACVBFBSM_Tag1'] = '$D_{bkg}$ $<$0.05,D0− $<$0.6,$D_{bsm}$ $<$0.97' +catToLatexMap['RECO_VBFTOPO_ACVBFSM_Tag0'] = '$D_{bkg}$ $<$0.05,D0−$>$0.6,$D_{bsm}$ $<$0.97' + +catToLatexMap['RECO_VBFTOPO_ACVHHADBSM_Tag0'] = '$DNN_{bsm}$$>$0.89\&log($DNN_{bkg}$) $<$−2.50' +catToLatexMap['RECO_VBFTOPO_ACVHHADBSM_Tag1'] = '$DNN_{bsm}$$>$0.75\¬ ($DNN_{bsm}$$>$0.89\&log($DNN_{bkg}$)$>$−2.50)' +catToLatexMap['RECO_VBFTOPO_ACVHHADSM_Tag0'] = '$DNN_{bsm}$$<$0.56\&log($DNN_{bkg}$) $<$−2.50' +catToLatexMap['RECO_VBFTOPO_ACVHHADSM_Tag1'] = '$DNN_{bsm}$$<$0.45\&−2.50 $<$log($DNN_{bkg}$)$<$−1.38' +catToLatexMap['RECO_VBFTOPO_ACVHHADSM_Tag2'] = '$DNN_{bsm}$$<$0.45\&−1.38 $<$log($DNN_{bkg}$)$<$−0.72' + + +catToLatexMap['RECO_VH_MET_Tag0'] = '1.000$>$STXS$>$0.798\&1.00$>$ANOM$>$0.86' +catToLatexMap['RECO_VH_MET_Tag1'] = '1.000$>$STXS$>$0.798\&0.86$>$ANOM$>$-1.00' +catToLatexMap['RECO_VH_MET_Tag2'] = '0.798$>$STXS$>$0.619\&1.00$>$ANOM$>$0.92' +catToLatexMap['RECO_VH_MET_Tag3'] = '0.798$>$STXS$>$0.619\&0.92$>$ANOM$>$-1.00' + +catToLatexMap['RECO_WH_LEP_Tag0'] = '0.385$>$STXS$>$0.125\&0.89$>$ANOM$>$-0.68' +catToLatexMap['RECO_WH_LEP_Tag1'] = '0.385$>$STXS$>$0.125\&1.00$>$ANOM$>$0.89' +catToLatexMap['RECO_WH_LEP_Tag2'] = '1.000$>$STXS$>$0.385\&0.79$>$ANOM$>$-0.68' +catToLatexMap['RECO_WH_LEP_Tag3'] = '1.000$>$STXS$>$0.385\&1.00$>$ANOM$>$0.79' + +catToLatexMap['RECO_ZH_LEP_Tag0'] = '0.229$>$STXS$>$-0.135\&1.00$>$ANOM$>$-0.16' +catToLatexMap['RECO_ZH_LEP_Tag1'] = '1.000$>$STXS$>$0.229\&1.00$>$ANOM$>$-0.68' + +def catToLatex( _proc ): + k = _proc + if k in catToLatexMap: _proc = re.sub( k, catToLatexMap[k], _proc ) + return _proc \ No newline at end of file diff --git a/Yield/catToLatex2.py b/Yield/catToLatex2.py new file mode 100644 index 00000000..d4e71728 --- /dev/null +++ b/Yield/catToLatex2.py @@ -0,0 +1,40 @@ + +# -*- coding: utf-8 -*- +import os, sys +import glob +import re +import ROOT +import math +from collections import OrderedDict as od + +catToLatex2Map = od() +catToLatex2Map['RECO_VBFTOPO_ACGGH_Tag0'] = 'qqH ggH like Tag0' +catToLatex2Map['RECO_VBFTOPO_ACGGH_Tag1'] = 'qqH ggH like Tag1' +catToLatex2Map['RECO_VBFTOPO_ACVBFBSM_Tag0'] = 'qqH BSM like Tag0' +catToLatex2Map['RECO_VBFTOPO_ACVBFBSM_Tag1'] = 'qqH BSM like Tag1' +catToLatex2Map['RECO_VBFTOPO_ACVBFSM_Tag0'] = 'qqH SM like' + +catToLatex2Map['RECO_VBFTOPO_ACVHHADBSM_Tag0'] = "VH had BSM like Tag0" +catToLatex2Map['RECO_VBFTOPO_ACVHHADBSM_Tag1'] = "VH had BSM like Tag1" +catToLatex2Map['RECO_VBFTOPO_ACVHHADSM_Tag0'] = "VH had SM like Tag0" +catToLatex2Map['RECO_VBFTOPO_ACVHHADSM_Tag1'] = "VH had SM like Tag1" +catToLatex2Map['RECO_VBFTOPO_ACVHHADSM_Tag2'] = "VH had SM like Tag2" + + +catToLatex2Map['RECO_VH_MET_Tag0'] = 'VH-MET Tag0' +catToLatex2Map['RECO_VH_MET_Tag1'] = 'VH-MET Tag1' +catToLatex2Map['RECO_VH_MET_Tag2'] = 'VH-MET Tag2' +catToLatex2Map['RECO_VH_MET_Tag3'] = 'VH-MET Tag3' + +catToLatex2Map['RECO_WH_LEP_Tag0'] = 'WH-lep Tag0' +catToLatex2Map['RECO_WH_LEP_Tag1'] = 'WH-lep Tag1' +catToLatex2Map['RECO_WH_LEP_Tag2'] = 'WH-lep Tag2' +catToLatex2Map['RECO_WH_LEP_Tag3'] = 'WH-lep Tag3' + +catToLatex2Map['RECO_ZH_LEP_Tag0'] = 'ZH-lep Tag0' +catToLatex2Map['RECO_ZH_LEP_Tag1'] = 'ZH-lep Tag1' + +def catToLatex2( _proc ): + k = _proc + if k in catToLatex2Map: _proc = re.sub( k, catToLatex2Map[k], _proc ) + return _proc \ No newline at end of file diff --git a/Yield/procToLatex.py b/Yield/procToLatex.py new file mode 100644 index 00000000..16ecbca6 --- /dev/null +++ b/Yield/procToLatex.py @@ -0,0 +1,47 @@ +import os, sys +import glob +import re +import ROOT +import math +from collections import OrderedDict as od + +procToLatexMap = od() +procToLatexMap['GG2H'] = 'ggh' +procToLatexMap['VBF'] = 'vbf' +procToLatexMap['VBF_ALT_0PM'] = 'vbf_ALT_0PM' +procToLatexMap['VBF_ALT_0PH'] = 'vbf $f_{a2}=1$' +procToLatexMap['VBF_ALT_0PHf05'] = 'vbf $f_{a2}=0.5$' +procToLatexMap['VBF_ALT_0M'] = 'vbf $f_{a3}=1$' +procToLatexMap['VBF_ALT_0Mf05'] = 'vbf $f_{a3}=0.5$' +procToLatexMap['VBF_ALT_L1'] = 'vbf $f_{L1}=1$' +procToLatexMap['VBF_ALT_L1f05'] = 'vbf $f_{L1}=0.5$' +procToLatexMap['VBF_ALT_L1Zg'] = 'vbf $f_{L1Zg}=1$' +procToLatexMap['VBF_ALT_L1Zgf05'] = 'vbf $f_{L1Zg}=0.5$' +#procToLatexMap['WMINUSH2HQQ'] = 'wh' +#procToLatexMap['WPLUSH2HQQ'] = 'wh' + +procToLatexMap['WH_ALT0L1f05ph0'] = 'wh $f_{L1}=0.5$' +procToLatexMap['wh_ALT_L1'] = 'wh $f_{L1}=1$' +procToLatexMap['WH_ALT0PHf05ph0'] = 'wh $f_{a2}=0.5$' +procToLatexMap['WH_ALT0PH'] = 'wh $f_{a2}=1$' +procToLatexMap['wh_ALT_0M'] = 'wh $f_{a3}=1$' +procToLatexMap['wh_ALT_0Mf05'] = 'wh $f_{a3}=0.5$' + +procToLatexMap['ZH_ALT0L1f05ph0'] = 'zh $f_{L1}=0.5$' +procToLatexMap['ZH_ALT0L1'] = 'zh $f_{L1}=1$' +procToLatexMap['ZH_ALT0L1Zgf05ph0'] = 'zh $f_{L1Zg}=0.5$' +procToLatexMap['ZH_ALT0L1Zg'] = 'zh $f_{L1Zg}=1$' +procToLatexMap['ZH_ALT0Mf05ph0'] = 'zh $f_{a3}=0.5$' +procToLatexMap['ZH_ALT0M'] = 'zh $f_{a3}=1$' +procToLatexMap['ZH_ALT0PHf05ph0'] = 'zh $f_{a2}=0.5$' +procToLatexMap['ZH_ALT0PH'] = 'zh $f_{a2}=1$' +procToLatexMap['QQ2HLL'] = 'zh' +procToLatexMap['TTH'] = 'tth' + +procToLatexMap['WMINUSH2HQQ'] = 'wh_{plus}' +procToLatexMap['WPLUSH2HQQ'] = 'wh_{minus}' + +def procToLatex( _proc ): + k = _proc + if k in procToLatexMap: _proc = re.sub( k, procToLatexMap[k], _proc ) + return _proc \ No newline at end of file diff --git a/Yield/run_sequence.sh b/Yield/run_sequence.sh new file mode 100644 index 00000000..d48ccf69 --- /dev/null +++ b/Yield/run_sequence.sh @@ -0,0 +1,102 @@ +outdate=`date +%F` + +STEP=0 +usage(){ + echo "Script to run fits and plots of fit output. dryRun option is for the fitting only, that can be run in batch." + echo "options:" + + echo "-h|--help) " + echo "-s|--step) " + echo "-d|--dryRun) " +} +# options may be followed by one colon to indicate they have a required argument +if ! options=$(getopt -u -o s:hd -l help,step:,dryRun -- "$@") +then +# something went wrong, getopt will put out an error message for us +exit 1 +fi +set -- $options +while [ $# -gt 0 ] +do +case $1 in +-h|--help) usage; exit 0;; +-s|--step) STEP=$2; shift ;; +-d|--dryRun) DR=$2; shift ;; +(--) shift; break;; +(-*) usage; echo "$0: error - unrecognized option $1" 1>&2; usage >> /dev/stderr; exit 1;; +(*) break;; +esac +shift +done + +DROPT="" +if [[ $DR ]]; then + DROPT=" --dryRun " +fi + +ext='2024-09-01' + +if [[ $STEP == "append" ]];then + +# for year in "2017" "2018" "2016preVFP" "2016postVFP" + for year in "2017" "2018" "2016preVFP" "2016postVFP" + do + awk 'FNR==1 && NR!=1 {next} {print}' ../Signal/plots/${ext}_year${year}/signalFit/Yield/* > yield_${year}.txt + awk '/GG2H/ {found=1} found' yield_${year}.txt > yield_${year}.txt + sed -i '1i proc cat yield entries' yield_${year}.txt + wc -l yield_${year}.txt + + done +elif [[ $STEP == "t2w" ]]; then + for fit in ${fits[*]} + do + for ext in ${ext2[*]} + do + echo python RunText2Workspace.py --ext ${fit}_${ext} --mode ${fit} + python RunText2Workspace.py --ext ${fit}_${ext} --mode ${fit} + done + done + +elif [[ $STEP == "fit" ]]; then + +for fit in ${fits[*]} + do + for obs in " " + # " --doObserved " + do + for ext in ${ext2[*]} + do + echo python RunFits.py --inputJson inputs.json --ext ${fit}_${ext} --mode $fit ${DROPT} $obs + python RunFits.py --inputJson inputs.json --ext ${fit}_${ext} --mode ${fit} ${DROPT} $obs + done + done + done +elif [[ $STEP == "collect" ]]; then + for obs in " " + # " --doObserved " + do + for fit in ${fits[*]} + do + for ext in ${ext2[*]} + do + python CollectFits.py --inputJson inputs.json --ext ${fit}_${ext} --mode $fit $obs + done + done + done +elif [[ $STEP == "plot" ]]; then + for obs in " " + #" --doObserved " + do + for fit in ${fits[*]} + do + # string="runFits${fit}_TTH_${fit}/profile1D_syst_${fit}_TTH_CMS_zz4l_fai1.root:TTH:2 runFits${fit}_VBF_${fit}/profile1D_syst_${fit}_VBF_CMS_zz4l_fai1.root:VBF:3 runFits${fit}_VHHAD_${fit}/profile1D_syst_${fit}_VHHAD_CMS_zz4l_fai1.root:VHHAD:4 runFits${fit}_VHMET_${fit}/profile1D_syst_${fit}_VHMET_CMS_zz4l_fai1.root:VH-MET:9 runFits${fit}_VHLEP_${fit}/profile1D_syst_${fit}_VHLEP_CMS_zz4l_fai1.root:VH-LEP:46" + # plot1DScan.py runFits${fit}_GGH_${fit}/profile1D_syst_${fit}_GGH_CMS_zz4l_fai1.root --y-cut 4 --y-max 4 -o plots/Breakdown_${fit} --POI CMS_zz4l_fai1 --main-label GGH --translate ../Plots/pois_fa3.json --others $string + + string="runFits${fit}_NoSyst_${fit}/profile1D_syst_${fit}_NoSyst_CMS_zz4l_fai1.root:NoSyst:2 " + plot1DScan.py runFits${fit}_${fit}/profile1D_syst_${fit}_CMS_zz4l_fai1.root --y-cut 15 --y-max 15 -o plots/Breakdown_Syst_${fit} --POI CMS_zz4l_fai1 --main-label Syst --translate ../Plots/pois_fa3.json --others $string + done + done +else + echo "Step $STEP is not one among append,fit,plot. Exiting." +fi + diff --git a/setup.csh b/setup.csh new file mode 100755 index 00000000..f52d50b2 --- /dev/null +++ b/setup.csh @@ -0,0 +1,6 @@ +# Add tools dir to PYTHONPATH +eval `scramv1 runtime -csh` + +setenv PYTHONPATH "${PYTHON27PATH}" +setenv PYTHONPATH "${PYTHONPATH}:${CMSSW_BASE}/src/flashggFinalFit/tools" +setenv PYTHONPATH "${PYTHONPATH}:${CMSSW_BASE}/src/flashggFinalFit/Signal/tools" diff --git a/setup.sh b/setup.sh index f7f61192..647b2658 100755 --- a/setup.sh +++ b/setup.sh @@ -1,4 +1,7 @@ # Add tools dir to PYTHONPATH eval `scramv1 runtime -sh` +export PYTHONPATH=${PYTHON27PATH} export PYTHONPATH=$PYTHONPATH:${CMSSW_BASE}/src/flashggFinalFit/tools +export PYTHONPATH=$PYTHONPATH:${CMSSW_BASE}/src/flashggFinalFit/Signal/tools +export PYTHONPATH=$PYTHONPATH:${CMSSW_BASE}/src/flashggFinalFit/Background/tools diff --git a/tools/commonObjects.py b/tools/commonObjects.py index a3d38f8f..1ed77c3f 100644 --- a/tools/commonObjects.py +++ b/tools/commonObjects.py @@ -13,10 +13,8 @@ # Centre of mass energy string sqrts__ = "13TeV" -# Luminosity map in fb^-1: for using UL 2018 -lumiMap = {'2016':36.33, '2017':41.48, '2018':59.83, 'combined':137.65, 'merged':137.65} -# If using ReReco samples then switch to lumiMap below (missing data in 2018 EGamma data set) -#lumiMap = {'2016':36.33, '2017':41.48, '2018':59.35, 'combined':137.17, 'merged':137.17} +# Luminosity map in fb^-1 +lumiMap = {'2016':36.33, '2016preVFP': 19.51, '2016postVFP': 16.80, '2017':41.48, '2018':59.83, 'combined':137.62, 'merged':137.62} lumiScaleFactor = 1000. # Converting from pb to fb # Constants diff --git a/tools/commonTools.py b/tools/commonTools.py index a7f10ed8..3cf98cef 100644 --- a/tools/commonTools.py +++ b/tools/commonTools.py @@ -45,6 +45,7 @@ def extractListOfCats( _listOfWSFileNames ): def extractListOfCatsFromData( _fileName ): f = ROOT.TFile(_fileName) + print inputWSName__ ws = f.Get(inputWSName__) allData = ws.allData() cats = [] @@ -75,6 +76,8 @@ def signalFromFileName(_fileName): elif "GluGlu" in _fileName: p = "ggh" elif "VBF" in _fileName: p = "vbf" elif "WH" in _fileName: p = "wh" + elif "Wminus" in _fileName: p = "wh" + elif "Wplus" in _fileName: p = "wh" elif "ZH" in _fileName: p = "zh" elif "ttH" in _fileName: p = "tth" elif "THQ" in _fileName: p = "thq" @@ -83,25 +86,128 @@ def signalFromFileName(_fileName): else: print " --> [ERROR]: cannot extract production mode from input file name. Please update tools.commonTools.signalFromFileName" exit(1) + if "JHUGen" in _fileName: + if "0L1Zg" in _fileName: + p += "_ALT_L1Zg" + elif "0L1" in _fileName: + p += "_ALT_L1" + elif "0M" in _fileName: + p += "_ALT_0M" + elif "0PH" in _fileName: + p += "_ALT_0PH" + elif "0PM" in _fileName: + p += "_ALT_0PM" + else: + print " --> [ERROR]: cannot extract production mode from input file name. Please update tools.commonTools.signalFromFileName" + exit(1) + if "f05ph0" in _fileName: + p += "f05" return p,d +def extFromFileName(_fileName): + p, d = None, None + + if "GluGlu" in _fileName: p = "GG2H" + elif "VBF" in _fileName: p = "VBF" + elif "Wminus" in _fileName: p = "WMINUSH2HQQ" + elif "Wplus" in _fileName: p = "WPLUSH2HQQ" + elif "WHiggs" in _fileName: p = "-" + elif "ZH_HToGG" in _fileName: p = "QQ2HLL" + elif "ZHiggs" in _fileName: p = "ZH" + elif "ttH" in _fileName: p = "TTH" + else: + print " --> [ERROR]: cannot extract production mode from input file name. Please update tools.commonTools.signalFromFileName" + exit(1) + print(_fileName) + if "JHUGen" in _fileName and "WHiggs0M" in _fileName : p = 'wh_ALT_0M' + elif "JHUGen" in _fileName and "WHiggs0L1f" in _fileName : p = 'WH_ALT0L1' + elif "JHUGen" in _fileName and "WHiggs0PHf05" in _fileName : p = 'WH_ALT0PH' + elif "JHUGen" in _fileName and "WHiggs0L1ToGG" in _fileName : p = 'wh_ALT_L1' + elif "JHUGen" in _fileName and "WHiggs0PH" in _fileName : p = 'WH_ALT0PH' + + elif "JHUGen" in _fileName and "VBFHiggs" in _fileName : + if "0L1Zg" in _fileName: + p += "_ALT_L1Zg" + elif "0L1" in _fileName: + p += "_ALT_L1" + elif "0M" in _fileName: + p += "_ALT_0M" + elif "0PH" in _fileName: + p += "_ALT_0PH" + elif "0PM" in _fileName: + p += "_ALT_0PM" + + + elif "JHUGen" in _fileName and "ZHiggs" in _fileName : + if "0L1Zg" in _fileName: + p += "_ALT0L1Zg" + elif "0L1" in _fileName: + p += "_ALT0L1" + elif "0M" in _fileName: + p += "_ALT0M" + elif "0PH" in _fileName: + p += "_ALT0PH" + + if "f05ph0" in _fileName and (( "VBFHiggs" in _fileName ) or ( "WHiggs0Mf" in _fileName )) : + p += "f05" + elif "f05ph0" in _fileName : + p += "f05ph0" + if p == None: + print " --> [ERROR]: cannot extract production mode from input file name. Please update tools.commonTools.signalFromFileName" + exit(1) + return p,d + +# Function to return mass from input file name +def massFromFileName(_fileName): + m = None + # to be done with regexp + if "_M120_" in _fileName: m = 120 + elif "_M125_" in _fileName: m = 125 + elif "_M130_" in _fileName: m = 130 + else: + print " ---> [ERROR]: cannot extract mass from input file name. Please update tools.commonTools.massFromFileName" + return m + # Function for converting STXS process to production mode in dataset name procToDataMap = od() procToDataMap['GG2H'] = 'ggh' procToDataMap['VBF'] = 'vbf' -procToDataMap['WH2HQQ'] = 'wh' -procToDataMap['ZH2HQQ'] = 'zh' +procToDataMap['VBF_ALT_0PM'] = 'vbf_ALT_0PM' +procToDataMap['VBF_ALT_0PH'] = 'vbf_ALT_0PH' +procToDataMap['VBF_ALT_0PHf05'] = 'vbf_ALT_0PHf05' +procToDataMap['VBF_ALT_0M'] = 'vbf_ALT_0M' +procToDataMap['VBF_ALT_0Mf05'] = 'vbf_ALT_0Mf05' +procToDataMap['VBF_ALT_L1'] = 'vbf_ALT_L1' +procToDataMap['VBF_ALT_L1f05'] = 'vbf_ALT_L1f05' +procToDataMap['VBF_ALT_L1Zg'] = 'vbf_ALT_L1Zg' +procToDataMap['VBF_ALT_L1Zgf05'] = 'vbf_ALT_L1Zgf05' +procToDataMap['VH'] = 'wzh' +procToDataMap['WMINUSH2HQQ'] = 'wh' +procToDataMap['WPLUSH2HQQ'] = 'wh' + +procToDataMap['WH_ALT0L1f05ph0'] = 'wh_ALT_L1f05' +procToDataMap['wh_ALT_L1'] = 'wh_ALT_L1' +procToDataMap['WH_ALT0PHf05ph0'] = 'wh_ALT_0PHf05' +procToDataMap['WH_ALT0PH'] = 'wh_ALT_0PH' +procToDataMap['wh_ALT_0M'] = 'wh_ALT_0M' + +procToDataMap['ZH_ALT0L1f05ph0'] = 'zh_ALT_L1f05' +procToDataMap['ZH_ALT0L1'] = 'zh_ALT_L1' +procToDataMap['ZH_ALT0L1Zgf05ph0'] = 'zh_ALT_L1Zgf05' +procToDataMap['ZH_ALT0L1Zg'] = 'zh_ALT_L1Zg' +procToDataMap['ZH_ALT0Mf05ph0'] = 'zh_ALT_0Mf05' +procToDataMap['ZH_ALT0M'] = 'zh_ALT_0M' +procToDataMap['ZH_ALT0PHf05ph0'] = 'zh_ALT_0PHf05' +procToDataMap['ZH_ALT0PH'] = 'zh_ALT_0PH' procToDataMap['QQ2HLNU'] = 'wh' procToDataMap['QQ2HLL'] = 'zh' procToDataMap['TTH'] = 'tth' -procToDataMap['BBH'] = 'bbh' -procToDataMap['THQ'] = 'thq' -procToDataMap['THW'] = 'thw' -procToDataMap['GG2HQQ'] = 'ggzh' -procToDataMap['GG2HLL'] = 'ggzh' -procToDataMap['GG2HNUNU'] = 'ggzh' +#procToDataMap['WMINUSH2HQQ'] = 'wh_plus' +#procToDataMap['WPLUSH2HQQ'] = 'wh_minus' + + def procToData( _proc ): - k = _proc.split("_")[0] + k = _proc if k in procToDataMap: _proc = re.sub( k, procToDataMap[k], _proc ) return _proc @@ -114,20 +220,44 @@ def dataToProc( _d ): procToDatacardNameMap = od() procToDatacardNameMap['GG2H'] = "ggH" procToDatacardNameMap['VBF'] = "qqH" -procToDatacardNameMap['WH2HQQ'] = "WH_had" -procToDatacardNameMap["ZH2HQQ"] = "ZH_had" -procToDatacardNameMap["QQ2HLNU"] = "WH_lep" -procToDatacardNameMap["QQ2HLL"] = "ZH_lep" procToDatacardNameMap["TTH"] = "ttH" -procToDatacardNameMap["BBH"] = "bbH" -procToDatacardNameMap["THQ"] = "tHq" -procToDatacardNameMap["THW"] = "tHW" -procToDatacardNameMap["TH"] = "tHq" -procToDatacardNameMap["GG2HQQ"] = "ggZH_had" -procToDatacardNameMap["GG2HLL"] = "ggZH_ll" -procToDatacardNameMap["GG2HNUNU"] = "ggZH_nunu" +procToDatacardNameMap['WMINUSH2HQQ'] = 'wh_minus' +procToDatacardNameMap['WPLUSH2HQQ'] = 'wh_plus' +procToDatacardNameMap['QQ2HLL'] = 'zh' + +procToDatacardNameMap['VBF_ALT_0PH'] = 'qqH_ALT_0PH' +procToDatacardNameMap['VBF_ALT_0PM'] = 'qqH' +procToDatacardNameMap['VBF_ALT_0PHf05'] = 'qqH_ALT_0PHf05' +procToDatacardNameMap['VBF_ALT_0M'] = 'qqH_ALT_0M' +procToDatacardNameMap['VBF_ALT_0Mf05'] = 'qqH_ALT_0Mf05' +procToDatacardNameMap['VBF_ALT_L1'] = 'qqH_ALT_L1' +procToDatacardNameMap['VBF_ALT_L1f05'] = 'qqH_ALT_L1f05' +procToDatacardNameMap['VBF_ALT_L1Zg'] = 'qqH_ALT_L1Zg' +procToDatacardNameMap['VBF_ALT_L1Zgf05'] = 'qqH_ALT_L1Zgf05' + +procToDatacardNameMap['WH_ALT0L1f05ph0'] = 'wh_ALT_L1f05' +procToDatacardNameMap['wh_ALT_L1'] = 'wh_ALT_L1' +procToDatacardNameMap['WH_ALT0PHf05ph0'] = 'wh_ALT_0PHf05' +procToDatacardNameMap['WH_ALT0PH'] = 'wh_ALT_0PH' +procToDatacardNameMap['wh_ALT_0M'] = 'wh_ALT_0M' +procToDatacardNameMap['wh_ALT_0Mf05'] = 'wh_ALT_0Mf05' +procToDatacardNameMap['ZH_ALT0L1f05ph0'] = 'zh_ALT_L1f05' +procToDatacardNameMap['ZH_ALT0L1'] = 'zh_ALT_L1' +procToDatacardNameMap['ZH_ALT0L1Zgf05ph0'] = 'zh_ALT_L1Zgf05' +procToDatacardNameMap['ZH_ALT0L1Zg'] = 'zh_ALT_L1Zg' +procToDatacardNameMap['ZH_ALT0Mf05ph0'] = 'zh_ALT_0Mf05' +procToDatacardNameMap['ZH_ALT0M'] = 'zh_ALT_0M' +procToDatacardNameMap['ZH_ALT0PHf05ph0'] = 'zh_ALT_0PHf05' +procToDatacardNameMap['ZH_ALT0PH'] = 'zh_ALT_0PH' + + +#procToDatacardNameMap["WPLUSH2HQQ"] = "WH_lep" +#procToDatacardNameMap["WPLUSH2HQQ"] = "WH_lep" def procToDatacardName( _proc ): - k = _proc.split("_")[0] - if k in procToDatacardNameMap: _proc = re.sub( k, procToDatacardNameMap[k], _proc ) + k = _proc + if k in procToDatacardNameMap: + _proc = re.sub( k, procToDatacardNameMap[k], _proc ) return _proc + +