Skip to content

Commit

Permalink
Merge pull request #327 from TopEFT/renorm_fact_independent
Browse files Browse the repository at this point in the history
Make renormalization & factorization independent per-process systematics
  • Loading branch information
kmohrman authored Dec 16, 2022
2 parents 3b78eb0 + a28051c commit 65cb5ad
Show file tree
Hide file tree
Showing 4 changed files with 40 additions and 6 deletions.
4 changes: 2 additions & 2 deletions analysis/topEFT/fullR2_run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@ OUT_NAME="example_name"

# Build the run command for filling SR histos
CFGS="../../topcoffea/cfg/mc_signal_samples_NDSkim.cfg,../../topcoffea/cfg/mc_background_samples_NDSkim.cfg,../../topcoffea/cfg/data_samples_NDSkim.cfg"
OPTIONS="--hist-list ana --skip-cr --do-systs -s 50000 --do-np --do-renormfact-envelope -o $OUT_NAME" # For analysis
OPTIONS="--hist-list ana --skip-cr --do-systs -s 50000 --do-np -o $OUT_NAME" # For analysis

# Build the run command for filling CR histos
#CFGS="../../topcoffea/cfg/mc_signal_samples_NDSkim.cfg,../../topcoffea/cfg/mc_background_samples_NDSkim.cfg,../../topcoffea/cfg/mc_background_samples_cr_NDSkim.cfg,../../topcoffea/cfg/data_samples_NDSkim.cfg"
#OPTIONS="--hist-list cr --skip-sr --do-systs --do-np --do-renormfact-envelope --wc-list ctG -o $OUT_NAME" # For CR plots
#OPTIONS="--hist-list cr --skip-sr --do-systs --do-np --wc-list ctG -o $OUT_NAME" # For CR plots

# Run the processor over all Run2 samples
RUN_COMMAND="time python work_queue_run.py $CFGS $OPTIONS"
Expand Down
5 changes: 5 additions & 0 deletions analysis/topEFT/make_cards.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,8 @@ def run_condor(dc,pkl_fpath,out_dir,var_lst,ch_lst,chunk_size):
other_opts.append("--unblind")
if dc.year_lst:
other_opts.extend(["--year"," ".join(dc.year_lst)])
if dc.drop_syst:
other_opts.extend(["--drop-syst"," ".join(dc.drop_syst)])
other_opts = " ".join(other_opts)

idx = 0
Expand Down Expand Up @@ -155,6 +157,7 @@ def main():
parser.add_argument("--ch-lst","-c",default=[],action="extend",nargs="+",help="Specify a list of channels to process.")
parser.add_argument("--do-mc-stat",action="store_true",help="Add bin-by-bin statistical uncertainties with the autoMCstats option (for background)")
parser.add_argument("--ignore","-i",default=[],action="extend",nargs="+",help="Specify a list of processes to exclude, must match name from 'sample' axis modulo UL year")
parser.add_argument("--drop-syst",default=[],action="extend",nargs="+",help="Specify one or more template systematics to remove from the datacard")
parser.add_argument("--POI",default=[],help="List of WCs (comma separated)")
parser.add_argument("--year","-y",default=[],action="extend",nargs="+",help="Run over a subset of years")
parser.add_argument("--do-nuisance",action="store_true",help="Include nuisance parameters")
Expand All @@ -180,6 +183,7 @@ def main():
wcs = args.POI
ignore = args.ignore
do_nuis = args.do_nuisance
drop_syst = args.drop_syst
unblind = args.unblind
verbose = args.verbose

Expand Down Expand Up @@ -210,6 +214,7 @@ def main():
"do_mc_stat": do_mc_stat,
"ignore": ignore,
"do_nuisance": do_nuis,
"drop_syst": drop_syst,
"unblind": unblind,
"verbose": verbose,
"year_lst": years,
Expand Down
3 changes: 1 addition & 2 deletions analysis/topEFT/topeft.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ def process(self, events):
]
wgt_correction_syst_lst = [
"lepSF_muonUp","lepSF_muonDown","lepSF_elecUp","lepSF_elecDown",f"btagSFbc_{year}Up",f"btagSFbc_{year}Down","btagSFbc_corrUp","btagSFbc_corrDown",f"btagSFlight_{year}Up",f"btagSFlight_{year}Down","btagSFlight_corrUp","btagSFlight_corrDown","PUUp","PUDown","PreFiringUp","PreFiringDown",f"triggerSF_{year}Up",f"triggerSF_{year}Down", # Exp systs
"FSRUp","FSRDown","ISRUp","ISRDown","renormfactUp","renormfactDown", "renormUp","renormDown","factUp","factDown", # Theory systs
"FSRUp","FSRDown","ISRUp","ISRDown","renormUp","renormDown","factUp","factDown", # Theory systs
]
data_syst_lst = [
"FFUp","FFDown","FFptUp","FFptDown","FFetaUp","FFetaDown",f"FFcloseEl_{year}Up",f"FFcloseEl_{year}Down",f"FFcloseMu_{year}Up",f"FFcloseMu_{year}Down"
Expand Down Expand Up @@ -309,7 +309,6 @@ def process(self, events):
weights_obj_base.add('ISR', events.nom, events.ISRUp*(sow/sow_ISRUp), events.ISRDown*(sow/sow_ISRDown))
weights_obj_base.add('FSR', events.nom, events.FSRUp*(sow/sow_FSRUp), events.FSRDown*(sow/sow_FSRDown))
# renorm/fact scale
weights_obj_base.add('renormfact', events.nom, events.renormfactUp*(sow/sow_renormfactUp), events.renormfactDown*(sow/sow_renormfactDown))
weights_obj_base.add('renorm', events.nom, events.renormUp*(sow/sow_renormUp), events.renormDown*(sow/sow_renormDown))
weights_obj_base.add('fact', events.nom, events.factUp*(sow/sow_factUp), events.factDown*(sow/sow_factDown))
# Prefiring and PU (note prefire weights only available in nanoAODv9)
Expand Down
34 changes: 32 additions & 2 deletions topcoffea/modules/datacard_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,6 +308,7 @@ def __init__(self,pkl_path,**kwargs):
self.year_lst = kwargs.pop("year_lst",[])
self.do_sm = kwargs.pop("do_sm",False)
self.do_nuisance = kwargs.pop("do_nuisance",False)
self.drop_syst = kwargs.pop("drop_syst",[])
self.out_dir = kwargs.pop("out_dir",".")
self.var_lst = kwargs.pop("var_lst",[])
self.do_mc_stat = kwargs.pop("do_mc_stat",False)
Expand Down Expand Up @@ -431,6 +432,20 @@ def read(self,fpath):
# Remove all shape systematics
h = prune_axis(h,"systematic",["nominal"])

if self.drop_syst:
to_drop = set()
for syst in self.drop_syst:
if syst.endswith("Up"):
to_drop.add(syst)
elif syst.endswith("Down"):
to_drop.add(syst)
else:
to_drop.add(f"{syst}Up")
to_drop.add(f"{syst}Down")
for x in to_drop:
print(f"Removing systematic: {x}")
h = h.remove(list(to_drop),"systematic")

if km_dist != "njets":
edge_arr = self.BINNING[km_dist] + [h.axis(km_dist).edges()[-1]]
h = h.rebin(km_dist,Bin(km_dist,h.axis(km_dist).label,edge_arr))
Expand Down Expand Up @@ -837,8 +852,23 @@ def analyze(self,km_dist,ch,selected_wcs, crop_negative_bins):
hist_name = f"{proc_name}_{syst}"
# Systematics in the text datacard don't have the Up/Down postfix
syst_base = syst.replace("Up","").replace("Down","")
all_shapes.add(syst_base)
text_card_info[proc_name]["shapes"].add(syst_base)
if syst_base in ["renorm","fact"]: # Note: Requires exact matches
# We want to split the renorm and fact systematics to be uncorrelated
# between processes, so we modify the systematic name to make combine
# treat them as separate systematics. Also, we use 'p' instead of
# 'proc_name' for renaming since we want the decomposed EFT terms
# for a particular process to share the same nuisance parameter
# TODO: We should move the hardcoded list in the if statement somewhere
# else to make it less buried in the weeds
split_syst = f"{syst_base}_{p}"
hist_name = hist_name.replace(syst_base,split_syst)
all_shapes.add(split_syst)
text_card_info[proc_name]["shapes"].add(split_syst)
if self.verbose:
print(f"\t {hist_name}: Splitting {syst_base} --> {split_syst}")
else:
all_shapes.add(syst_base)
text_card_info[proc_name]["shapes"].add(syst_base)
syst_width = max(len(syst),syst_width)
zero_out_sumw2 = p != "fakes" # Zero out sumw2 for all proc but fakes, so that we only do auto stats for fakes
f[hist_name] = to_hist(arr,hist_name,zero_wgts=zero_out_sumw2)
Expand Down

0 comments on commit 65cb5ad

Please sign in to comment.