diff --git a/.gitignore b/.gitignore index c17f56d..5d25320 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ # Ignore root files +*.tgz *.xml *.1 *.png @@ -168,3 +169,6 @@ br.sm2 *maincc *linkdef.h JHUGenMELA/ +cutFlow.json +yaml-cpp/ +nanoAOD_skim.wiki/ diff --git a/GenVarsProducer.py b/GenVarsProducer.py index d94fcfb..cced311 100644 --- a/GenVarsProducer.py +++ b/GenVarsProducer.py @@ -9,6 +9,7 @@ class GenVarsProducer(Module): def __init__(self): + DEBUG = False pass def beginJob(self): pass @@ -47,7 +48,7 @@ def beginFile(self, inputFile, outputFile, inputTree, wrappedOutputTree): def endFile(self, inputFile, outputFile, inputTree, wrappedOutputTree): pass def getParentID(self,particle,genParticles): - if particle.genPartIdxMother is -1: #No parent in record, return ID of original particle + if particle.genPartIdxMother == -1: #No parent in record, return ID of original particle return particle.pdgId elif genParticles[particle.genPartIdxMother].pdgId is particle.pdgId: #'Parent' is self, keep iterating return self.getParentID(genParticles[particle.genPartIdxMother],genParticles) @@ -60,7 +61,7 @@ def analyze(self, event): genmet = Object(event, "GenMET", None) GenMET_pt = None GenMET_pt = genmet.pt - #print("GenMET_pt: {}".format(GenMET_pt)) + self.DEBUG: print("GenMET_pt: {}".format(GenMET_pt)) # Loop over gen particles to find Higgs and its each respective decay products. Then keep all kinematics information of Higgs and its respective decay products along with its PDG ID and status flag. higgs = None @@ -82,18 +83,18 @@ def analyze(self, event): neutrino2_pz = 0.0 delta_pz_neutrino = 0.0 pz1 = 0.0 - - print("length of genParticles: {}".format(len(genParticles))) + + self.DEBUG: print("length of genParticles: {}".format(len(genParticles))) for idx, particle in enumerate(genParticles): - print("DEBUG - line 70: Index: {}, Particle pdgID: {}, Parent ID: {}, MotherIdx: {}, Status: {}".format(idx, particle.pdgId, self.getParentID(particle, genParticles), particle.genPartIdxMother, particle.statusFlags >> 13 & 1)) - + self.DEBUG: print("DEBUG - line 70: Index: {}, Particle pdgID: {}, Parent ID: {}, MotherIdx: {}, Status: {}".format(idx, particle.pdgId, self.getParentID(particle, genParticles), particle.genPartIdxMother, particle.statusFlags >> 13 & 1)) + if particle.pdgId == 25 and (particle.statusFlags >> 13 & 1): higgs = particle - print("DEBUG - line 74 (found higgs): Index: {}, Particle ID: {}, MotherIdx: {}, Parent ID: {}, Status: {}".format(idx, particle.pdgId, self.getParentID(particle, genParticles), particle.genPartIdxMother, particle.statusFlags >> 13 & 1)) - + self.DEBUG: print("DEBUG - line 74 (found higgs): Index: {}, Particle ID: {}, MotherIdx: {}, Parent ID: {}, Status: {}".format(idx, particle.pdgId, self.getParentID(particle, genParticles), particle.genPartIdxMother, particle.statusFlags >> 13 & 1)) + elif (abs(particle.pdgId) == 23) and (particle.statusFlags >> 13 & 1) and self.getParentID(particle, genParticles) == 25: - print("DEBUG - line 76 (found Z boson, daughter of higgs): Index: {}, Particle ID: {}, Parent ID: {}, MotherIdx: {}, Status: {}".format(idx, particle.pdgId, self.getParentID(particle, genParticles), particle.genPartIdxMother, particle.statusFlags >> 13 & 1)) - + self.DEBUG: print("DEBUG - line 76 (found Z boson, daughter of higgs): Index: {}, Particle ID: {}, Parent ID: {}, MotherIdx: {}, Status: {}".format(idx, particle.pdgId, self.getParentID(particle, genParticles), particle.genPartIdxMother, particle.statusFlags >> 13 & 1)) + if v1 is None or v2 is None: v1_daughters = [] v2_daughters = [] @@ -101,24 +102,24 @@ def analyze(self, event): if abs(daughter1.pdgId) in [11, 13, 15] and daughter1.genPartIdxMother == idx and self.getParentID(daughter1, genParticles) == 23 and daughter1.statusFlags >> 13 & 1: v1 = particle found_Z1 = True - print("DEBUG - line 81 (found Z1 boson, daughter of higgs): Index: {}, Particle ID: {}, Parent ID: {}, MotherIdx: {}, Status: {}".format(idx, v1.pdgId, self.getParentID(v1, genParticles), v1.genPartIdxMother, v1.statusFlags >> 13 & 1)) + self.DEBUG: print("DEBUG - line 81 (found Z1 boson, daughter of higgs): Index: {}, Particle ID: {}, Parent ID: {}, MotherIdx: {}, Status: {}".format(idx, v1.pdgId, self.getParentID(v1, genParticles), v1.genPartIdxMother, v1.statusFlags >> 13 & 1)) v1_daughters.append(daughter1) #elif temp_boson is None: #temp_boson = particle #if v2 is None: #v2 = temp_boson #v2_daughters.append(daughter1) - n = len(v1_daughters) + n = len(v1_daughters) if len(v1_daughters) == 2: for i in range(n): v1_decay_products = v1_daughters - print("DEBUG - line 92 ( 2 daughters of Z1 boson): Index: {}, Particle ID: {}, Parent ID: {}, MotherIdx: {}, Status: {}".format(idx, v1_decay_products[i].pdgId, self.getParentID(v1_decay_products[i], genParticles), v1_decay_products[i].genPartIdxMother, v1_decay_products[i].statusFlags >> 13 & 1)) - + self.DEBUG: print("DEBUG - line 92 ( 2 daughters of Z1 boson): Index: {}, Particle ID: {}, Parent ID: {}, MotherIdx: {}, Status: {}".format(idx, v1_decay_products[i].pdgId, self.getParentID(v1_decay_products[i], genParticles), v1_decay_products[i].genPartIdxMother, v1_decay_products[i].statusFlags >> 13 & 1)) + #m = len(v2_daughters) #if len(v2_daughters) == 2: #for i in range(m): #v1_decay_products = v1_daughters - #print("DEBUG - line 108 ( 2 daughters of Z2 boson): Index: {}, Particle ID: {}, Parent ID: {}, MotherIdx: {}, Status: {}".format(idx, v2_decay_products[i].pdgId, self.getParentID(v2_decay_products[i], genParticles), v2_decay_products[i].genPartIdxMother, v2_decay_products[i].statusFlags >> 13 & 1)) + #self.DEBUG: print("DEBUG - line 108 ( 2 daughters of Z2 boson): Index: {}, Particle ID: {}, Parent ID: {}, MotherIdx: {}, Status: {}".format(idx, v2_decay_products[i].pdgId, self.getParentID(v2_decay_products[i], genParticles), v2_decay_products[i].genPartIdxMother, v2_decay_products[i].statusFlags >> 13 & 1)) #elif abs(v1_daughters[0].pdgId) in [1, 2, 3, 4, 5] and abs(v1_daughters[1].pdgId) in [1, 2, 3, 4, 5]: #v1_decay_products = v1_daughters #if v2 is None: @@ -129,7 +130,7 @@ def analyze(self, event): if abs(daughter2.pdgId) in [12, 14, 16] and daughter2.genPartIdxMother == idx and self.getParentID(daughter2, genParticles) == 23 and daughter2.statusFlags >> 13 & 1: v2 = particle found_Z2 = True - print("DEBUG - line 115 (found Z2 boson, daughter of higgs): Index: {}, Particle ID: {}, Parent ID: {}, MotherIdx: {}, Status: {}".format(idx, v2.pdgId, self.getParentID(v2, genParticles), v2.genPartIdxMother, v2.statusFlags >> 13 & 1)) + self.DEBUG: print("DEBUG - line 115 (found Z2 boson, daughter of higgs): Index: {}, Particle ID: {}, Parent ID: {}, MotherIdx: {}, Status: {}".format(idx, v2.pdgId, self.getParentID(v2, genParticles), v2.genPartIdxMother, v2.statusFlags >> 13 & 1)) v2_daughters.append(daughter2) #elif temp_boson1 is None: #temp_boson1 = particle @@ -140,8 +141,8 @@ def analyze(self, event): if len(v2_daughters) == 2: for i in range(m): v2_decay_products = v2_daughters - print("DEBUG - line 126 ( 2 daughters of Z2 boson): Index: {}, Particle ID: {}, Parent ID: {}, MotherIdx: {}, Status: {}".format(idx, v2_decay_products[i].pdgId, self.getParentID(v2_decay_products[i], genParticles), v2_decay_products[i].genPartIdxMother, v2_decay_products[i].statusFlags >> 13 & 1)) - + self.DEBUG: print("DEBUG - line 126 ( 2 daughters of Z2 boson): Index: {}, Particle ID: {}, Parent ID: {}, MotherIdx: {}, Status: {}".format(idx, v2_decay_products[i].pdgId, self.getParentID(v2_decay_products[i], genParticles), v2_decay_products[i].genPartIdxMother, v2_decay_products[i].statusFlags >> 13 & 1)) + if higgs is not None: higgs_pt = higgs.pt higgs_eta = higgs.eta @@ -158,12 +159,12 @@ def analyze(self, event): v1_eta = v1.eta v1_phi = v1.phi v1_mass = v1.mass - + #Z1 = ROOT.TLorentzVector() #Z1.SetPtEtaPhiM(v1_pt, v1_eta, v1_phi, v1_mass) #boost_Z1 = Z1.BoostVector() #boost_Z1_mag = boost_Z1.Mag() - + else: v1_pt = -1. v1_eta = 0. @@ -177,15 +178,15 @@ def analyze(self, event): v1_decay_products_eta = [daughter.eta for daughter in v1_decay_products] v1_decay_products_phi = [daughter.phi for daughter in v1_decay_products] v1_decay_products_mass = [daughter.mass for daughter in v1_decay_products] - print("v1_decay_products_pt:", v1_decay_products_pt, type(v1_decay_products_pt)) + self.DEBUG: print("v1_decay_products_pt:", v1_decay_products_pt, type(v1_decay_products_pt)) #pz = v1_decay_products_pt[0] * math.sinh(v1_decay_products_eta[0]) - #print("pz of neutrino1:", pz, type(pz)) + #self.DEBUG: print("pz of neutrino1:", pz, type(pz)) #for i in range(2): #v1_decay_products[i] = ROOT.TLorentzVector() #v1_decay_products[i].SetPxPyPzE(0.0, 0.0, 0.0, 0.0) #neutrino1_pz = v1_decay_products[0].Pz() #neutrino2_pz = v1_decay_products[1].Pz() - #print("neutrino1_pz:", neutrino1_pz, type(neutrino1_pz)) + #self.DEBUG: print("neutrino1_pz:", neutrino1_pz, type(neutrino1_pz)) else: v1_decay_products_pt = [-1.] v1_decay_products_eta = [0.] @@ -198,13 +199,13 @@ def analyze(self, event): v2_eta = v2.eta v2_phi = v2.phi v2_mass = v2.mass - print("v2_mass:", v2_mass, type(v2_mass)) - + self.DEBUG: print("v2_mass:", v2_mass, type(v2_mass)) + #Z2 = ROOT.TLorentzVector() #Z2.SetPtEtaPhiM(v2_pt, v2_eta, v2_phi, v2_mass) #boost_Z2 = Z2.BoostVector() #boost_Z2_mag = boost_Z2.Mag() - + else: v2_pt = -1. v2_eta = 0. @@ -217,15 +218,15 @@ def analyze(self, event): v2_decay_products_eta = [daughter.eta for daughter in v2_decay_products] v2_decay_products_phi = [daughter.phi for daughter in v2_decay_products] v2_decay_products_mass = [daughter.mass for daughter in v2_decay_products] - print("v2_decay_products_pt:", v2_decay_products_pt, type(v2_decay_products_pt)) + self.DEBUG: print("v2_decay_products_pt:", v2_decay_products_pt, type(v2_decay_products_pt)) pz1 = v2_decay_products_pt[0] * math.sinh(v2_decay_products_eta[0]) - neutrino1_pz = abs(pz1) - print("pz of neutrino1:", neutrino1_pz, type(neutrino1_pz)) + neutrino1_pz = abs(pz1) + self.DEBUG: print("pz of neutrino1:", neutrino1_pz, type(neutrino1_pz)) pz2 = v2_decay_products_pt[1] * math.sinh(v2_decay_products_eta[1]) neutrino2_pz = abs(pz2) - print("pz of neutrino2:", neutrino2_pz, type(neutrino2_pz)) + self.DEBUG: print("pz of neutrino2:", neutrino2_pz, type(neutrino2_pz)) delta_pz_neutrino = neutrino1_pz - neutrino2_pz - print("delta_pz_neutrino:", delta_pz_neutrino, type(delta_pz_neutrino)) + self.DEBUG: print("delta_pz_neutrino:", delta_pz_neutrino, type(delta_pz_neutrino)) ###defining Pz of neutrino as a lorentz vector v2_decay_products[0] = ROOT.TLorentzVector() @@ -233,9 +234,9 @@ def analyze(self, event): v2_decay_products[1] = ROOT.TLorentzVector() v2_decay_products[1].SetPtEtaPhiM(v2_decay_products_pt[1], v2_decay_products_eta[1], v2_decay_products_phi[1], v2_decay_products_mass[1]) pz1 = v2_decay_products[0].Pz() - print("pz of neutrino1_new:", pz1, type(pz1)) + self.DEBUG: print("pz of neutrino1_new:", pz1, type(pz1)) pz2 = v2_decay_products[1].Pz() - print("pz of neutrino2_new:", pz2, type(pz2)) + self.DEBUG: print("pz of neutrino2_new:", pz2, type(pz2)) else: v2_decay_products_pt = [-1.] v2_decay_products_eta = [0.] @@ -245,8 +246,8 @@ def analyze(self, event): #Pz_list = [] #Pz = ROOT.TMath.Sqrt((v2_mass ** 2) / 4 - GenMET_pt) #Pz_list.append(Pz) - #print("Pz:", Pz_list) - + #self.DEBUG: print("Pz:", Pz_list) + ## Calculating Boost if found_Z1 == True and found_Z2 == True: @@ -255,14 +256,16 @@ def analyze(self, event): boost_Z1_mag = boost_Z1.Mag() Z2.SetPtEtaPhiM(v2_pt, v2_eta, v2_phi, v2_mass) boost_Z2 = Z2.BoostVector() - boost_Z2_mag = boost_Z2.Mag() + boost_Z2_mag = boost_Z2.Mag() boost_diff_mag = boost_Z1_mag - boost_Z2_mag - print("delta boost: {}".format(boost_diff_mag)) + self.DEBUG: print("delta boost: {}".format(boost_diff_mag)) #self.out.fillBranch("Boostdiff", boost_diff_mag) - + #self.out.fillBranch("Pz_neutrino", Pz) self.out.fillBranch("Pz_neutrino1", pz1) self.out.fillBranch("delta_pz_neutrino", delta_pz_neutrino) + + # self.out.fillBranch("Pz_neutrino", Pz) self.out.fillBranch("BoostZ1", boost_Z1_mag) self.out.fillBranch("BoostZ2", boost_Z2_mag) self.out.fillBranch("Boostdiff", boost_diff_mag) @@ -286,6 +289,6 @@ def analyze(self, event): self.out.fillBranch("genV2DaughterEta", v2_decay_products_eta) self.out.fillBranch("genV2DaughterPhi", v2_decay_products_phi) self.out.fillBranch("genV2DaughterMass", v2_decay_products_mass) - - print("#######################Event end ################################") + + self.DEBUG: print("#######################Event end ################################") return True diff --git a/H4LCppModule.py b/H4LCppModule.py index 6d0271e..1bf641e 100644 --- a/H4LCppModule.py +++ b/H4LCppModule.py @@ -27,16 +27,24 @@ def __init__(self, year, cfgFile, isMC, isFSR, cutFlowJSONFile, DEBUG=False): def loadLibraries(self): base_path = os.getenv('CMSSW_BASE') + '/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim' + yaml_cpp_path = os.path.join(base_path, "external/yaml-cpp") + + # Adding yaml-cpp headers to the include path + ROOT.gSystem.AddIncludePath("-I%s/include" % yaml_cpp_path) libraries = [ + 'libmcfm_710.so', 'libJHUGenMELAMELA.so', 'libjhugenmela.so', - 'libmcfm_707.so', 'libcollier.so', ] for lib in libraries: - fullPath = os.path.join(base_path, 'JHUGenMELA/MELA/data/slc7_amd64_gcc700', lib) + fullPath = os.path.join(base_path, 'JHUGenMELA/MELA/data/el9_amd64_gcc12', lib) ROOT.gSystem.Load(fullPath) + # Load the yaml-cpp library + yaml_cpp_lib_path = os.path.join(yaml_cpp_path, "build") + ROOT.gSystem.Load(os.path.join(yaml_cpp_lib_path, "libyaml-cpp.so")) + # Load the C++ module if "/H4LTools_cc.so" not in ROOT.gSystem.GetLibraries(): print("Load C++ module") diff --git a/Input_2018.yml b/Input_2018.yml index 5447b7a..7095be3 100644 --- a/Input_2018.yml +++ b/Input_2018.yml @@ -3,6 +3,9 @@ TriggerChannels: - Triggers_HZZ4l - Triggers_HZZ2l2q - Triggers_HZZ2l2nu + - Triggers_HZZ2l2nu_SingleLep + - Triggers_HZZ2l2nu_DiLep + - Triggers_HZZ2l2nu_HighPt Triggers_HZZ4l: - event.HLT_Ele32_WPTight_Gsf @@ -53,6 +56,44 @@ Triggers_HZZ2l2nu: - event.HLT_Photon165_R9Id90_HE10_IsoM - event.HLT_Photon300_NoHE + +Triggers_HZZ2l2nu_SingleLep: + # Single Mu + - event.HLT_IsoMu20 + - event.HLT_IsoMu24 + - event.HLT_IsoMu27 + # Single Electron + - event.HLT_Ele32_WPTight_Gsf + +Triggers_HZZ2l2nu_DiLep: + # DoubleMu + - event.HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ_Mass3p8 + # Double Electron + - event.HLT_Ele23_Ele12_CaloIdL_TrackIdL_IsoVL + - event.HLT_DoubleEle25_CaloIdL_MW + +Triggers_HZZ2l2nu_HighPt: + # Double Electron + - event.HLT_Ele27_Ele37_CaloIdL_MW + # Single Electron + - event.HLT_Ele35_WPTight_Gsf + - event.HLT_Ele35_WPTight_Gsf_L1EGMT + - event.HLT_Ele38_WPTight_Gsf + - event.HLT_Ele40_WPTight_Gsf + - event.HLT_Ele50_IsoVVVL_PFHT450 + - event.HLT_Ele115_CaloIdVT_GsfTrkIdT + - event.HLT_Ele135_CaloIdVT_GsfTrkIdT + - event.HLT_Ele145_CaloIdVT_GsfTrkIdT + - event.HLT_Ele200_CaloIdVT_GsfTrkIdT + - event.HLT_Ele250_CaloIdVT_GsfTrkIdT + - event.HLT_Ele300_CaloIdVT_GsfTrkIdT + # DoubleMu + - event.HLT_Mu37_TkMu27 + # SingleMu + - event.HLT_Mu50 + - event.HLT_Mu55 + + Zmass: 91.1876 MZ1cut: 40 MZcut: diff --git a/JetSFMaker.py b/JetSFMaker.py index 5003c05..1fb4a67 100644 --- a/JetSFMaker.py +++ b/JetSFMaker.py @@ -15,8 +15,20 @@ class JetSFMaker(Module): def __init__(self, cmssw, puid_sf_config='PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/data/JetPUID_cfg.py'): cmssw_base = os.getenv('CMSSW_BASE') - with open(cmssw_base + '/src/' + puid_sf_config) as src: - exec(src) + + # Dictionary to hold the local variables from the exec + # Reference: https://stackoverflow.com/a/65647108 + local_vars = {} + + with open(cmssw_base + '/src/' + puid_sf_config) as src_file: + src_code = src_file.read() + exec(src_code, {}, local_vars) + + # Extract jet_puid_sf from local_vars + if 'jet_puid_sf' in local_vars: + jet_puid_sf = local_vars['jet_puid_sf'] + else: + raise NameError("jet_puid_sf is not defined in the provided configuration file.") puid_sf_cfg = jet_puid_sf[cmssw] diff --git a/README.md b/README.md index f754ab6..5e5bd89 100644 --- a/README.md +++ b/README.md @@ -6,8 +6,8 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. 1. Step: 1: Get CMSSW release ```bash - cmsrel CMSSW_10_6_30 - cd CMSSW_10_6_30/src + cmsrel CMSSW_14_0_2 + cd CMSSW_14_0_2/src cmsenv ``` @@ -16,7 +16,8 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. ```bash git clone git@github.com:cms-nanoAOD/nanoAOD-tools.git PhysicsTools/NanoAODTools cd PhysicsTools/NanoAODTools - git checkout 65359982275c476834ad4b37363d658166881f12 # Updated to commit on 16 June 2023 in official nanoAOD-tools + git checkout d163c18096fe2c5963ff5a9764bb420b46632178 # Updated to commit on 6 Dec 2023 in official nanoAOD-tools + git apply ../external/nanoAODTools_py2to3.patch ``` 3. Step: 3: Get our analysis repository @@ -25,11 +26,17 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. cd $CMSSW_BASE/src git clone git@github.com:ram1123/nanoAOD_skim.git PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim cd PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim - git checkout HZZ_Analysis - cd - - cmsenv - # patch PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/nanoAOD_tools.patch + git checkout Anusreevijay769-HZZ_Analysis_2l2q_v2_dev + git clone git@github.com:jbeder/yaml-cpp.git external/yaml-cpp + cd external/yaml-cpp/ + git apply ../yamlcpp_pkg_py2to3.patch + mkdir build + cd build + cmake3 .. -DBUILD_SHARED_LIBS=ON + cmake3 --build . + cd $CMSSW_BASE/src cp PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/data/btag/*.csv PhysicsTools/NanoAODTools/data/btagSF/. + # FIXME: Apply some patches scram b voms-proxy-init --voms cms --valid 168:00 ``` @@ -44,17 +51,21 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. ```bash cd $CMSSW_BASE/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim - git clone -b v2.3.5 https://github.com/JHUGen/JHUGenMELA + git clone -b v2.4.2 https://github.com/JHUGen/JHUGenMELA + cd JHUGenMELA + git apply ../external/JHUGen_py2to3.patch + cd .. sh JHUGenMELA/MELA/setup.sh -j 8 - cd JHUGenMELA/MELA - make + chmod +x JHUGenMELA/MELA/data/el9_amd64_gcc12/libmcfm_710.so + chmod +x JHUGenMELA/MELA/data/el9_amd64_gcc12/libjhugenmela.so ``` 4. Step: 4: interactive running ```bash cd $CMSSW_BASE/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim - python post_proc.py + export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/afs/cern.ch/work/r/rasharma/h2l2nu/checkNewSetup_15July2024/CMSSW_14_0_2/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/JHUGenMELA/MELA/data/el9_amd64_gcc12 + python3 post_proc.py ``` 5. batch job submission. diff --git a/condor_setup_lxplus.py b/condor_setup_lxplus.py index 5379af0..4bd6c8d 100644 --- a/condor_setup_lxplus.py +++ b/condor_setup_lxplus.py @@ -41,7 +41,7 @@ def main(args): # Create log files import infoCreaterGit - SumamryOfCurrentSubmission = raw_input("\n\nWrite summary for current job submission: ") + SumamryOfCurrentSubmission = input("\n\nWrite summary for current job submission: ") # in python3 raw_input is renamed as input infoLogFiles = infoCreaterGit.BasicInfoCreater('summary.dat',SumamryOfCurrentSubmission) infoLogFiles.generate_git_patch_and_log() @@ -64,27 +64,26 @@ def main(args): os.system('xrdcp ' + CMSSWRel+".tgz" + ' root://eosuser.cern.ch/'+storeDir+'/' + CMSSWRel+".tgz") post_proc_to_run = "post_proc.py" - command = "python "+post_proc_to_run + command = "python3 "+post_proc_to_run + condor_arguments_list = [] # A list that contains all the arguments to be passed for each job - Transfer_Input_Files = ("keep_and_drop.txt") # FIXME: Generalise this. - # Transfer_Input_Files = ("Cert_271036-284044_13TeV_PromptReco_Collisions16_JSON.txt, " + - # "Cert_294927-306462_13TeV_PromptReco_Collisions17_JSON.txt, " + - # "Cert_314472-325175_13TeV_PromptReco_Collisions18_JSON.txt, " + - # "keep_and_drop_data.txt") + outjdl_file = open(condor_file_name+".jdl","w") + condor_queue = "espresso" if args.debug else condor_queue + outjdl_file.write(f"""+JobFlavour = "{condor_queue}" +Executable = {condor_file_name}.sh +Universe = vanilla +Notification = ERROR +Should_Transfer_Files = NO +x509userproxy = $ENV(X509_USER_PROXY) +Output = {output_log_path}/$(logtxt)_$(Process).stdout +Error = {output_log_path}/$(logtxt)_$(Process).err +Log = {output_log_path}/$(logtxt)_$(Process).log +Arguments = "$(infile) $(outfile) $(eospath) $(outfilename)" +queue infile, outfile, eospath, outfilename, logtxt from {condor_file_name}.txt +""") + outjdl_file.close() - # with open('input_data_Files/sample_list_v6_2017_campaign.dat') as in_file: with open('input_data_Files/'+InputFileFromWhereReadDASNames) as in_file: - outjdl_file = open(condor_file_name+".jdl","w") - outjdl_file.write("+JobFlavour = \""+condor_queue+"\"\n") - outjdl_file.write("Executable = "+condor_file_name+".sh\n") - outjdl_file.write("Universe = vanilla\n") - outjdl_file.write("Notification = ERROR\n") - outjdl_file.write("Should_Transfer_Files = YES\n") - outjdl_file.write("WhenToTransferOutput = ON_EXIT\n") - outjdl_file.write("Transfer_Input_Files = "+Transfer_Input_Files + ", " + post_proc_to_run+"\n") - outjdl_file.write("x509userproxy = $ENV(X509_USER_PROXY)\n") - outjdl_file.write("requirements = TARGET.OpSysAndVer =?= \"AlmaLinux9\"\n") - outjdl_file.write("MY.WantOS = \"el7\"\n") count = 0 count_jobs = 0 output_string_list = [] @@ -147,11 +146,15 @@ def main(args): # print "=> ",root_file count_root_files+=1 count_jobs += 1 - outjdl_file.write("Output = "+output_log_path+"/"+sample_name+"_$(Process).stdout\n") - outjdl_file.write("Error = "+output_log_path+"/"+sample_name+"_$(Process).err\n") - outjdl_file.write("Log = "+output_log_path+"/"+sample_name+"_$(Process).log\n") - outjdl_file.write("Arguments = "+(xrd_redirector+root_file)+" "+output_path+" "+EOS_Output_path+ " " + (root_file.split('/')[-1]).split('.')[0] + "\n") - outjdl_file.write("Queue \n") + condor_arguments_list.append( + ( + xrd_redirector + root_file, + output_path, + EOS_Output_path, + (root_file.split("/")[-1]).split(".")[0], + output_path.split("/")[-2], # This argument is used for the log file name + ) + ) if args.debug: # break the for loop after 1 iteration to submit only 1 job break @@ -160,64 +163,64 @@ def main(args): break print("Number of files: ",count_root_files) print("Number of jobs (till now): ",count_jobs) - outjdl_file.close(); - outScript = open(condor_file_name+".sh","w"); - outScript.write('#!/bin/bash'); - outScript.write("\n"+'echo "Starting job on " `date`'); - outScript.write("\n"+'echo "Running on: `uname -a`"'); - outScript.write("\n"+'echo "System software: `cat /etc/redhat-release`"'); - outScript.write("\n"+'source /cvmfs/cms.cern.ch/cmsset_default.sh'); - outScript.write("\n"+'echo "====> List input arguments : " '); - outScript.write("\n"+'echo "1. nanoAOD ROOT file: ${1}"'); - outScript.write("\n"+'echo "2. EOS path to store output root file: ${2}"'); - outScript.write("\n"+'echo "3. EOS path from where we copy CMSSW: ${3}"'); - outScript.write("\n"+'echo "4. Output root file name: ${4}"'); - outScript.write("\n"+'echo "========================================="'); - outScript.write("\n"+'echo "copy cmssw tar file from store area"'); - outScript.write("\n"+'xrdcp -f root://eosuser.cern.ch/${3}/'+CMSSWRel +'.tgz .'); - outScript.write("\n"+'tar -xf '+ CMSSWRel +'.tgz' ); - outScript.write("\n"+'rm '+ CMSSWRel +'.tgz' ); - outScript.write("\n"+'cd ' + CMSSWRel + '/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/'+TOP_LEVEL_DIR_NAME+'/' ); - outScript.write("\n"+'rm *.root'); - outScript.write("\n"+'scramv1 b ProjectRename'); - outScript.write("\n"+'eval `scram runtime -sh`'); - outScript.write("\n"+'echo "========================================="'); - outScript.write("\n"+'echo "cat post_proc.py"'); - outScript.write("\n"+'echo "..."'); - outScript.write("\n"+'cat post_proc.py'); - outScript.write("\n"+'echo "..."'); - outScript.write("\n"+'echo "========================================="'); - if args.NOsyst: - outScript.write( - "\n" - + command - + " --entriesToRun 0 --inputFile ${1} --outputFile ${4}_hadd.root --cutFlowFile ${4}.json --DownloadFileToLocalThenRun True --NOsyst" - ) - else: - outScript.write( - "\n" - + command - + " --entriesToRun 0 --inputFile ${1} --outputFile ${4}_hadd.root --cutFlowFile ${4}.json --DownloadFileToLocalThenRun True" - ) - outScript.write("\n"+'echo "====> List root files : " '); - outScript.write("\n"+'ls -ltrh *.root'); - outScript.write("\n"+'ls -ltrh *.json'); - outScript.write("\n"+'echo "====> copying *.root file to stores area..." '); - outScript.write("\n"+'if ls ${4}_hadd.root 1> /dev/null 2>&1; then'); - outScript.write("\n"+' echo "File ${4}_hadd.root exists. Copy this."'); - outScript.write("\n"+' echo "xrdcp -f ${4}_hadd.root root://eosuser.cern.ch/${2}/${4}_Skim.root"'); - outScript.write("\n"+' xrdcp -f ${4}_hadd.root root://eosuser.cern.ch/${2}/${4}_Skim.root'); - outScript.write("\n"+' echo "xrdcp -f ${4}.json root://eosuser.cern.ch/${2}/cutFlow_${4}.json"'); - outScript.write("\n"+' xrdcp -f ${4}.json root://eosuser.cern.ch/${2}/cutFlow_${4}.json'); - outScript.write("\n"+'else'); - outScript.write("\n"+' echo "Something wrong: file ${4}_hadd.root does not exists, please check the post_proc.py script."'); - outScript.write("\n"+'fi'); - outScript.write("\n"+'rm *.root'); - outScript.write("\n"+'cd ${_CONDOR_SCRATCH_DIR}'); - outScript.write("\n"+'rm -rf ' + CMSSWRel); - outScript.write("\n"); - outScript.close(); + # Write all condor jobs arguments from list to a file with same name as condor_file_name but with .txt extension + with open(condor_file_name+".txt", "w") as f: + for item in condor_arguments_list: + f.write("{}\n".format(",".join(item))) + + # Create the executable file for condor jobs + outScript = open(condor_file_name + ".sh", "w") + # Variables for the outScript + entries = 100 if args.debug else 0 + no_syst_flag="--NOsyst" if args.NOsyst else "" + + outScript.write(f"""#!/bin/bash +echo "Starting job on " `date` +echo "Running on: `uname -a`" +echo "System software: `cat /etc/redhat-release`" +source /cvmfs/cms.cern.ch/cmsset_default.sh +echo "====> List input arguments : " +echo "1. nanoAOD ROOT file: ${{1}}" +echo "2. EOS path to store output root file: ${{2}}" +echo "3. EOS path from where we copy CMSSW: ${{3}}" +echo "4. Output root file name: ${{4}}" +echo "=========================================" +echo "copy cmssw tar file from store area" +xrdcp -f root://eosuser.cern.ch/${{3}}/{CMSSWRel}.tgz . +tar -xf {CMSSWRel}.tgz +rm {CMSSWRel}.tgz +cd {CMSSWRel}/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/{TOP_LEVEL_DIR_NAME}/ +rm *.root +scramv1 b ProjectRename +eval `scram runtime -sh` +echo "=========================================" +echo "cat post_proc.py" +echo "..." +cat post_proc.py +echo "..." +echo "=========================================" +output_file=${{4}}_hadd.root +export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CMSSW_BASE/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/JHUGenMELA/MELA/data/el9_amd64_gcc12 +{command} --entriesToRun {entries} --inputFile ${{1}} --outputFile ${{output_file}} --cutFlowFile ${{4}}.json --DownloadFileToLocalThenRun True {no_syst_flag} +echo "====> List root files : " +ls -ltrh *.root +ls -ltrh *.json +echo "====> copying *.root file to stores area..." +if ls ${{output_file}} 1> /dev/null 2>&1; then + echo "File ${{output_file}} exists. Copy this." + echo "xrdcp -f ${{output_file}} root://eosuser.cern.ch/${{2}}/${{4}}_Skim.root" + xrdcp -f ${{output_file}} root://eosuser.cern.ch/${{2}}/${{4}}_Skim.root + echo "xrdcp -f ${{4}}.json root://eosuser.cern.ch/${{2}}/cutFlow_${{4}}.json" + xrdcp -f ${{4}}.json root://eosuser.cern.ch/${{2}}/cutFlow_${{4}}.json +else + echo "Something wrong: file ${{output_file}} does not exists, please check the post_proc.py script." +fi +rm *.root +cd ${{_CONDOR_SCRATCH_DIR}} +rm -rf {CMSSWRel} +""") + outScript.close() os.system("chmod 777 "+condor_file_name+".sh"); print("\n#===> Set Proxy Using:") diff --git a/crab/crab_script.py b/crab/crab_script.py index 6136e5e..6eb7f62 100644 --- a/crab/crab_script.py +++ b/crab/crab_script.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 import os from PhysicsTools.NanoAODTools.postprocessing.framework.postprocessor import PostProcessor @@ -16,4 +16,4 @@ p.run() -print "DONE" +print ("DONE") diff --git a/crab/crab_scriptMC.py b/crab/crab_scriptMC.py index 4cb7b72..42e9ff1 100644 --- a/crab/crab_scriptMC.py +++ b/crab/crab_scriptMC.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 import os from PhysicsTools.NanoAODTools.postprocessing.framework.postprocessor import PostProcessor @@ -17,4 +17,4 @@ p.run() -print "DONE" +print ("DONE") diff --git a/data/JetPUID_cfg.py b/data/JetPUID_cfg.py index 69c6190..3441b47 100644 --- a/data/JetPUID_cfg.py +++ b/data/JetPUID_cfg.py @@ -16,7 +16,7 @@ for jet, jetTag in [('real','eff'), ('pu','mistag')]: for wp, iwp in [('loose', 'L'), ('medium', 'M'), ('tight', 'T')]: - for year, jcfg in _jet_puid_sf.iteritems(): + for year, jcfg in _jet_puid_sf.items(): jcfg['%s_%s' % (jet, wp)] = 'h2_%s_sf%s_%s' % (jetTag, year, iwp) jcfg['%s_mc_%s' % (jet, wp)] = 'h2_%s_mc%s_%s' % (jetTag, year, iwp) jcfg['%s_%s_uncty' % (jet, wp)] = 'h2_%s_sf%s_%s_Systuncty' % (jetTag, year, iwp) diff --git a/env.sh b/env.sh new file mode 100755 index 0000000..99c024c --- /dev/null +++ b/env.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +# Update LD_LIBRARY_PATH for JHUGenMELA +export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CMSSW_BASE/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/JHUGenMELA/MELA/data/el9_amd64_gcc12 + +# Initialize a new proxy with the desired validity +voms-proxy-init --voms cms --valid 168:00 + +if [ $? -eq 0 ]; then + echo "Proxy successfully created." + + # Check if the proxy is created in /tmp + PROXY_PATH=$(voms-proxy-info --path) + + if [[ $PROXY_PATH == /tmp/* ]]; then + echo "Proxy is located in /tmp, moving it to home directory..." + echo "cp $PROXY_PATH ~/" + cp $PROXY_PATH ~/ + echo "export X509_USER_PROXY=~/$(basename $PROXY_PATH)" + export X509_USER_PROXY=~/$(basename $PROXY_PATH) + echo "Proxy moved to home directory and X509_USER_PROXY set to $X509_USER_PROXY" + else + echo "Proxy is not in /tmp, no need to move it." + fi +else + echo "Failed to create the proxy." + exit 1 +fi diff --git a/external/JHUGen_py2to3.patch b/external/JHUGen_py2to3.patch new file mode 100644 index 0000000..cc56900 --- /dev/null +++ b/external/JHUGen_py2to3.patch @@ -0,0 +1,55 @@ +diff --git a/MELA/makefile b/MELA/makefile +index a7d3b07..c1490c2 100644 +--- a/MELA/makefile ++++ b/MELA/makefile +@@ -19,7 +19,8 @@ MELADIR = $(shell pwd) + MELASRCDIR = $(MELADIR)/src + MELAOBJDIR = $(MELADIR)/obj + # Modify MELALIBDIR for the gcc version as needed +-MELALIBDIR = ${MELA_LIB_PATH} ++# MELALIBDIR = ${MELA_LIB_PATH} ++MELALIBDIR = "./data/el9_amd64_gcc12/" + # _melapkgpath_ should refer to the root compilation path just like MELADIR with an extra '/'. + # If environment variables need to be inserted without expansion for portability, + # you can use '.oODOLLAROo..oOOPEN_BRACKETOo.[YOUR_ENV_VARIABLE].oOCLOSE_BRACKETOo.', +diff --git a/MELA/test/batch.py b/MELA/test/batch.py +index e19b07b..f9c4e15 100755 +--- a/MELA/test/batch.py ++++ b/MELA/test/batch.py +@@ -2,12 +2,12 @@ + + import os + import sys +-import commands ++import subprocess + + from ROOT import TFile + + def processDirectory ( args, dirname, filenames ): +- print "processing " + dirname ++ print("processing " + dirname) + for filename in filenames: + fullname = dirname + "/" + filename + +@@ -55,18 +55,18 @@ def processDirectory ( args, dirname, filenames ): + + + +- print " " * 4 + filename + " with flavor " + str(flavor) + " and sqrts = " + str(sqrts) ++ print(" " * 4 + filename + " with flavor " + str(flavor) + " and sqrts = " + str(sqrts)) + + + + if flavor!=10: # looks like a valid file, prepare string + command = "root -q -b addProbtoTree.C\\(\\\"" + fullname[:-5] + "\\\","+str(flavor)+",-1,"+str(sqrts)+"\\)\n" + #create batch script +- commands.getstatusoutput("cp batchscript.csh batchscript_tmp.csh") ++ subprocess.getstatusoutput("cp batchscript.csh batchscript_tmp.csh") + file = open('batchscript_tmp.csh', 'a') + file.write(command) + file.close() +- commands.getstatusoutput("bsub -q 8nh < batchscript_tmp.csh" ) ++ subprocess.getstatusoutput("bsub -q 8nh < batchscript_tmp.csh" ) + #exit(0) + + diff --git a/external/nanoAODTools_py2to3.patch b/external/nanoAODTools_py2to3.patch new file mode 100644 index 0000000..e7885d9 --- /dev/null +++ b/external/nanoAODTools_py2to3.patch @@ -0,0 +1,129 @@ +diff --git a/python/postprocessing/examples/exampleAnalysis.py b/python/postprocessing/examples/exampleAnalysis.py +index 28cb45f..1824684 100644 +--- a/python/postprocessing/examples/exampleAnalysis.py ++++ b/python/postprocessing/examples/exampleAnalysis.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python3 + from PhysicsTools.NanoAODTools.postprocessing.framework.eventloop import Module + from PhysicsTools.NanoAODTools.postprocessing.framework.datamodel import Collection + from PhysicsTools.NanoAODTools.postprocessing.framework.postprocessor import PostProcessor +diff --git a/python/postprocessing/examples/example_postproc.py b/python/postprocessing/examples/example_postproc.py +index 1ace02c..3e7a495 100644 +--- a/python/postprocessing/examples/example_postproc.py ++++ b/python/postprocessing/examples/example_postproc.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python3 + from exampleModule import * + from PhysicsTools.NanoAODTools.postprocessing.modules.jme.jetmetHelperRun2 import * + from PhysicsTools.NanoAODTools.postprocessing.modules.jme.jetmetUncertainties import * +diff --git a/python/postprocessing/framework/crabhelper.py b/python/postprocessing/framework/crabhelper.py +index 405b055..53613a7 100644 +--- a/python/postprocessing/framework/crabhelper.py ++++ b/python/postprocessing/framework/crabhelper.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python3 + import os + from PhysicsTools.NanoAODTools.postprocessing.framework.postprocessor import * + import sys +diff --git a/python/postprocessing/framework/postprocessor.py b/python/postprocessing/framework/postprocessor.py +index c7cc3ca..ea54b4a 100755 +--- a/python/postprocessing/framework/postprocessor.py ++++ b/python/postprocessing/framework/postprocessor.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python3 + from PhysicsTools.NanoAODTools.postprocessing.framework.jobreport import JobReport + from PhysicsTools.NanoAODTools.postprocessing.framework.preskimming import preSkim + from PhysicsTools.NanoAODTools.postprocessing.framework.output import FriendOutput, FullOutput +@@ -261,8 +261,18 @@ class PostProcessor: + if self.haddFileName: + haddnano = "./haddnano.py" if os.path.isfile( + "./haddnano.py") else "haddnano.py" ++ print("Merging output files into %s" % self.haddFileName) ++ print("") ++ os.system('date') ++ startTime = time.time() ++ print("%s %s %s" % ++ (haddnano, self.outputDir + "/" + self.haddFileName, " ".join(outFileNames))) + os.system("%s %s %s" % +- (haddnano, self.haddFileName, " ".join(outFileNames))) ++ (haddnano, self.outputDir + "/" + self.haddFileName, " ".join(outFileNames))) ++ os.system('date') ++ print("Total time to merge %i files: %.1f sec" % ++ (len(outFileNames), time.time() - startTime)) ++ print("Done") + if self.jobReport: +- self.jobReport.addOutputFile(self.haddFileName) ++ self.jobReport.addOutputFile( self.outputDir + "/" + self.haddFileName) + self.jobReport.save() +diff --git a/python/postprocessing/modules/common/puWeightProducer.py b/python/postprocessing/modules/common/puWeightProducer.py +index b9062b4..7c5a1da 100644 +--- a/python/postprocessing/modules/common/puWeightProducer.py ++++ b/python/postprocessing/modules/common/puWeightProducer.py +@@ -59,8 +59,13 @@ class puWeightProducer(Module): + + def loadHisto(self, filename, hname): + tf = ROOT.TFile.Open(filename) ++ if not tf or tf.IsZombie(): ++ raise IOError(f"Cannot open file: {filename}") + hist = tf.Get(hname) +- hist.SetDirectory(None) ++ if not hist: ++ raise IOError(f"Cannot find histogram {hname} in file: {filename}") ++ ++ hist.SetDirectory(0) # With Python3/EL9 "None" is not working. So, replaced "None" with "0" + tf.Close() + return hist + +diff --git a/python/postprocessing/modules/jme/jetmetHelperRun2.py b/python/postprocessing/modules/jme/jetmetHelperRun2.py +index d38c623..9d07645 100644 +--- a/python/postprocessing/modules/jme/jetmetHelperRun2.py ++++ b/python/postprocessing/modules/jme/jetmetHelperRun2.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python3 + import os + import sys + import subprocess +@@ -107,7 +107,7 @@ jmsValues = { + 'UL2016': [1.000, 1.000, 1.000], # placeholder + 'UL2017': [1.000, 1.000, 1.000], # placeholder + 'UL2018': [1.000, 1.000, 1.000], # placeholder +-} ++} + + + def createJMECorrector(isMC=True, +diff --git a/scripts/haddnano.py b/scripts/haddnano.py +index 96d7d11..82ac1dc 100755 +--- a/scripts/haddnano.py ++++ b/scripts/haddnano.py +@@ -1,4 +1,4 @@ +-#!/bin/env python ++#!/usr/bin/env python3 + import ROOT + import numpy + import sys +diff --git a/scripts/nano_postproc.py b/scripts/nano_postproc.py +index acd5c80..e788c10 100755 +--- a/scripts/nano_postproc.py ++++ b/scripts/nano_postproc.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python3 + from PhysicsTools.NanoAODTools.postprocessing.framework.postprocessor import PostProcessor + from importlib import import_module + import os +diff --git a/scripts/nano_report.py b/scripts/nano_report.py +index e54b55b..e162a9a 100644 +--- a/scripts/nano_report.py ++++ b/scripts/nano_report.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python3 + # imported from https://github.com/CERN-PH-CMG/cmg-cmssw/blob/0c11a5a0a15c4c3e1a648c9707b06b08b747b0c0/PhysicsTools/Heppy/scripts/heppy_report.py + from optparse import OptionParser + import json diff --git a/external/yamlcpp_pkg_py2to3.patch b/external/yamlcpp_pkg_py2to3.patch new file mode 100644 index 0000000..f19fa3d --- /dev/null +++ b/external/yamlcpp_pkg_py2to3.patch @@ -0,0 +1,465 @@ +diff --git a/test/gtest-1.11.0/googletest/scripts/gen_gtest_pred_impl.py b/test/gtest-1.11.0/googletest/scripts/gen_gtest_pred_impl.py +index e09a6e0..95c8574 100755 +--- a/test/gtest-1.11.0/googletest/scripts/gen_gtest_pred_impl.py ++++ b/test/gtest-1.11.0/googletest/scripts/gen_gtest_pred_impl.py +@@ -184,7 +184,7 @@ def Title(word): + def OneTo(n): + """Returns the list [1, 2, 3, ..., n].""" + +- return range(1, n + 1) ++ return list(range(1, n + 1)) + + + def Iter(n, format, sep=''): +@@ -308,12 +308,12 @@ def GenerateFile(path, content): + """Given a file path and a content string + overwrites it with the given content. + """ +- print 'Updating file %s . . .' % path ++ print('Updating file %s . . .' % path) + f = file(path, 'w+') +- print >>f, content, ++ print(content, end=' ', file=f) + f.close() + +- print 'File %s has been updated.' % path ++ print('File %s has been updated.' % path) + + + def GenerateHeader(n): +@@ -720,8 +720,8 @@ def _Main(): + unit test.""" + + if len(sys.argv) != 2: +- print __doc__ +- print 'Author: ' + __author__ ++ print(__doc__) ++ print('Author: ' + __author__) + sys.exit(1) + + n = int(sys.argv[1]) +diff --git a/test/gtest-1.11.0/googletest/scripts/release_docs.py b/test/gtest-1.11.0/googletest/scripts/release_docs.py +index 8d24f28..05b15fd 100755 +--- a/test/gtest-1.11.0/googletest/scripts/release_docs.py ++++ b/test/gtest-1.11.0/googletest/scripts/release_docs.py +@@ -127,11 +127,11 @@ class WikiBrancher(object): + def BranchFiles(self): + """Branches the .wiki files needed to be branched.""" + +- print 'Branching %d .wiki files:' % (len(self.files_to_branch),) ++ print('Branching %d .wiki files:' % (len(self.files_to_branch),)) + os.chdir(self.wiki_dir) + for f in self.files_to_branch: + command = 'svn cp %s %s%s' % (f, self.version_prefix, f) +- print command ++ print(command) + os.system(command) + + def UpdateLinksInBranchedFiles(self): +@@ -139,7 +139,7 @@ class WikiBrancher(object): + for f in self.files_to_branch: + source_file = os.path.join(self.wiki_dir, f) + versioned_file = os.path.join(self.wiki_dir, self.version_prefix + f) +- print 'Updating links in %s.' % (versioned_file,) ++ print('Updating links in %s.' % (versioned_file,)) + text = file(source_file, 'r').read() + new_text = self.search_for_re.sub(self.replace_with, text) + file(versioned_file, 'w').write(new_text) +diff --git a/test/gtest-1.11.0/googletest/scripts/upload.py b/test/gtest-1.11.0/googletest/scripts/upload.py +index eba5711..786c281 100755 +--- a/test/gtest-1.11.0/googletest/scripts/upload.py ++++ b/test/gtest-1.11.0/googletest/scripts/upload.py +@@ -46,7 +46,7 @@ against by using the '--rev' option. + # This code is derived from appcfg.py in the App Engine SDK (open source), + # and from ASPN recipe #146306. + +-import cookielib ++import http.cookiejar + import getpass + import logging + import md5 +@@ -57,9 +57,9 @@ import re + import socket + import subprocess + import sys +-import urllib +-import urllib2 +-import urlparse ++import urllib.request, urllib.parse, urllib.error ++import urllib.request, urllib.error, urllib.parse ++import urllib.parse + + try: + import readline +@@ -94,15 +94,15 @@ def GetEmail(prompt): + last_email = last_email_file.readline().strip("\n") + last_email_file.close() + prompt += " [%s]" % last_email +- except IOError, e: ++ except IOError as e: + pass +- email = raw_input(prompt + ": ").strip() ++ email = input(prompt + ": ").strip() + if email: + try: + last_email_file = open(last_email_file_name, "w") + last_email_file.write(email) + last_email_file.close() +- except IOError, e: ++ except IOError as e: + pass + else: + email = last_email +@@ -118,20 +118,20 @@ def StatusUpdate(msg): + msg: The string to print. + """ + if verbosity > 0: +- print msg ++ print(msg) + + + def ErrorExit(msg): + """Print an error message to stderr and exit.""" +- print >>sys.stderr, msg ++ print(msg, file=sys.stderr) + sys.exit(1) + + +-class ClientLoginError(urllib2.HTTPError): ++class ClientLoginError(urllib.error.HTTPError): + """Raised to indicate there was an error authenticating with ClientLogin.""" + + def __init__(self, url, code, msg, headers, args): +- urllib2.HTTPError.__init__(self, url, code, msg, headers, None) ++ urllib.error.HTTPError.__init__(self, url, code, msg, headers, None) + self.args = args + self.reason = args["Error"] + +@@ -177,10 +177,10 @@ class AbstractRpcServer(object): + def _CreateRequest(self, url, data=None): + """Creates a new urllib request.""" + logging.debug("Creating request for: '%s' with payload:\n%s", url, data) +- req = urllib2.Request(url, data=data) ++ req = urllib.request.Request(url, data=data) + if self.host_override: + req.add_header("Host", self.host_override) +- for key, value in self.extra_headers.iteritems(): ++ for key, value in self.extra_headers.items(): + req.add_header(key, value) + return req + +@@ -204,7 +204,7 @@ class AbstractRpcServer(object): + account_type = "HOSTED" + req = self._CreateRequest( + url="https://www.google.com/accounts/ClientLogin", +- data=urllib.urlencode({ ++ data=urllib.parse.urlencode({ + "Email": email, + "Passwd": password, + "service": "ah", +@@ -218,7 +218,7 @@ class AbstractRpcServer(object): + response_dict = dict(x.split("=") + for x in response_body.split("\n") if x) + return response_dict["Auth"] +- except urllib2.HTTPError, e: ++ except urllib.error.HTTPError as e: + if e.code == 403: + body = e.read() + response_dict = dict(x.split("=", 1) for x in body.split("\n") if x) +@@ -240,14 +240,14 @@ class AbstractRpcServer(object): + continue_location = "http://localhost/" + args = {"continue": continue_location, "auth": auth_token} + req = self._CreateRequest("http://%s/_ah/login?%s" % +- (self.host, urllib.urlencode(args))) ++ (self.host, urllib.parse.urlencode(args))) + try: + response = self.opener.open(req) +- except urllib2.HTTPError, e: ++ except urllib.error.HTTPError as e: + response = e + if (response.code != 302 or + response.info()["location"] != continue_location): +- raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, ++ raise urllib.error.HTTPError(req.get_full_url(), response.code, response.msg, + response.headers, response.fp) + self.authenticated = True + +@@ -270,34 +270,34 @@ class AbstractRpcServer(object): + credentials = self.auth_function() + try: + auth_token = self._GetAuthToken(credentials[0], credentials[1]) +- except ClientLoginError, e: ++ except ClientLoginError as e: + if e.reason == "BadAuthentication": +- print >>sys.stderr, "Invalid username or password." ++ print("Invalid username or password.", file=sys.stderr) + continue + if e.reason == "CaptchaRequired": +- print >>sys.stderr, ( ++ print(( + "Please go to\n" + "https://www.google.com/accounts/DisplayUnlockCaptcha\n" +- "and verify you are a human. Then try again.") ++ "and verify you are a human. Then try again."), file=sys.stderr) + break + if e.reason == "NotVerified": +- print >>sys.stderr, "Account not verified." ++ print("Account not verified.", file=sys.stderr) + break + if e.reason == "TermsNotAgreed": +- print >>sys.stderr, "User has not agreed to TOS." ++ print("User has not agreed to TOS.", file=sys.stderr) + break + if e.reason == "AccountDeleted": +- print >>sys.stderr, "The user account has been deleted." ++ print("The user account has been deleted.", file=sys.stderr) + break + if e.reason == "AccountDisabled": +- print >>sys.stderr, "The user account has been disabled." ++ print("The user account has been disabled.", file=sys.stderr) + break + if e.reason == "ServiceDisabled": +- print >>sys.stderr, ("The user's access to the service has been " +- "disabled.") ++ print(("The user's access to the service has been " ++ "disabled."), file=sys.stderr) + break + if e.reason == "ServiceUnavailable": +- print >>sys.stderr, "The service is not available; try again later." ++ print("The service is not available; try again later.", file=sys.stderr) + break + raise + self._GetAuthCookie(auth_token) +@@ -334,7 +334,7 @@ class AbstractRpcServer(object): + args = dict(kwargs) + url = "http://%s%s" % (self.host, request_path) + if args: +- url += "?" + urllib.urlencode(args) ++ url += "?" + urllib.parse.urlencode(args) + req = self._CreateRequest(url=url, data=payload) + req.add_header("Content-Type", content_type) + try: +@@ -342,7 +342,7 @@ class AbstractRpcServer(object): + response = f.read() + f.close() + return response +- except urllib2.HTTPError, e: ++ except urllib.error.HTTPError as e: + if tries > 3: + raise + elif e.code == 401: +@@ -372,35 +372,35 @@ class HttpRpcServer(AbstractRpcServer): + Returns: + A urllib2.OpenerDirector object. + """ +- opener = urllib2.OpenerDirector() +- opener.add_handler(urllib2.ProxyHandler()) +- opener.add_handler(urllib2.UnknownHandler()) +- opener.add_handler(urllib2.HTTPHandler()) +- opener.add_handler(urllib2.HTTPDefaultErrorHandler()) +- opener.add_handler(urllib2.HTTPSHandler()) ++ opener = urllib.request.OpenerDirector() ++ opener.add_handler(urllib.request.ProxyHandler()) ++ opener.add_handler(urllib.request.UnknownHandler()) ++ opener.add_handler(urllib.request.HTTPHandler()) ++ opener.add_handler(urllib.request.HTTPDefaultErrorHandler()) ++ opener.add_handler(urllib.request.HTTPSHandler()) + opener.add_handler(urllib2.HTTPErrorProcessor()) + if self.save_cookies: + self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies") +- self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file) ++ self.cookie_jar = http.cookiejar.MozillaCookieJar(self.cookie_file) + if os.path.exists(self.cookie_file): + try: + self.cookie_jar.load() + self.authenticated = True + StatusUpdate("Loaded authentication cookies from %s" % + self.cookie_file) +- except (cookielib.LoadError, IOError): ++ except (http.cookiejar.LoadError, IOError): + # Failed to load cookies - just ignore them. + pass + else: + # Create an empty cookie file with mode 600 +- fd = os.open(self.cookie_file, os.O_CREAT, 0600) ++ fd = os.open(self.cookie_file, os.O_CREAT, 0o600) + os.close(fd) + # Always chmod the cookie file +- os.chmod(self.cookie_file, 0600) ++ os.chmod(self.cookie_file, 0o600) + else: + # Don't save cookies across runs of update.py. +- self.cookie_jar = cookielib.CookieJar() +- opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) ++ self.cookie_jar = http.cookiejar.CookieJar() ++ opener.add_handler(urllib.request.HTTPCookieProcessor(self.cookie_jar)) + return opener + + +@@ -575,7 +575,7 @@ def RunShellWithReturnCode(command, print_output=False, + line = p.stdout.readline() + if not line: + break +- print line.strip("\n") ++ print(line.strip("\n")) + output_array.append(line) + output = "".join(output_array) + else: +@@ -583,7 +583,7 @@ def RunShellWithReturnCode(command, print_output=False, + p.wait() + errout = p.stderr.read() + if print_output and errout: +- print >>sys.stderr, errout ++ print(errout, file=sys.stderr) + p.stdout.close() + p.stderr.close() + return output, p.returncode +@@ -629,11 +629,11 @@ class VersionControlSystem(object): + """Show an "are you sure?" prompt if there are unknown files.""" + unknown_files = self.GetUnknownFiles() + if unknown_files: +- print "The following files are not added to version control:" ++ print("The following files are not added to version control:") + for line in unknown_files: +- print line ++ print(line) + prompt = "Are you sure to continue?(y/N) " +- answer = raw_input(prompt).strip() ++ answer = input(prompt).strip() + if answer != "y": + ErrorExit("User aborted") + +@@ -685,13 +685,13 @@ class VersionControlSystem(object): + else: + type = "current" + if len(content) > MAX_UPLOAD_SIZE: +- print ("Not uploading the %s file for %s because it's too large." % +- (type, filename)) ++ print(("Not uploading the %s file for %s because it's too large." % ++ (type, filename))) + file_too_large = True + content = "" + checksum = md5.new(content).hexdigest() + if options.verbose > 0 and not file_too_large: +- print "Uploading %s file for %s" % (type, filename) ++ print("Uploading %s file for %s" % (type, filename)) + url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id) + form_fields = [("filename", filename), + ("status", status), +@@ -713,7 +713,7 @@ class VersionControlSystem(object): + + patches = dict() + [patches.setdefault(v, k) for k, v in patch_list] +- for filename in patches.keys(): ++ for filename in list(patches.keys()): + base_content, new_content, is_binary, status = files[filename] + file_id_str = patches.get(filename) + if file_id_str.find("nobase") != -1: +@@ -770,8 +770,8 @@ class SubversionVCS(VersionControlSystem): + words = line.split() + if len(words) == 2 and words[0] == "URL:": + url = words[1] +- scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) +- username, netloc = urllib.splituser(netloc) ++ scheme, netloc, path, params, query, fragment = urllib.parse.urlparse(url) ++ username, netloc = urllib.parse.splituser(netloc) + if username: + logging.info("Removed username from base URL") + if netloc.endswith("svn.python.org"): +@@ -789,12 +789,12 @@ class SubversionVCS(VersionControlSystem): + logging.info("Guessed CollabNet base = %s", base) + elif netloc.endswith(".googlecode.com"): + path = path + "/" +- base = urlparse.urlunparse(("http", netloc, path, params, ++ base = urllib.parse.urlunparse(("http", netloc, path, params, + query, fragment)) + logging.info("Guessed Google Code base = %s", base) + else: + path = path + "/" +- base = urlparse.urlunparse((scheme, netloc, path, params, ++ base = urllib.parse.urlunparse((scheme, netloc, path, params, + query, fragment)) + logging.info("Guessed base = %s", base) + return base +@@ -1202,8 +1202,8 @@ def UploadSeparatePatches(issue, rpc_server, patchset, data, options): + rv = [] + for patch in patches: + if len(patch[1]) > MAX_UPLOAD_SIZE: +- print ("Not uploading the patch for " + patch[0] + +- " because the file is too large.") ++ print(("Not uploading the patch for " + patch[0] + ++ " because the file is too large.")) + continue + form_fields = [("filename", patch[0])] + if not options.download_base: +@@ -1211,7 +1211,7 @@ def UploadSeparatePatches(issue, rpc_server, patchset, data, options): + files = [("data", "data.diff", patch[1])] + ctype, body = EncodeMultipartFormData(form_fields, files) + url = "/%d/upload_patch/%d" % (int(issue), int(patchset)) +- print "Uploading patch for " + patch[0] ++ print("Uploading patch for " + patch[0]) + response_body = rpc_server.Send(url, body, content_type=ctype) + lines = response_body.splitlines() + if not lines or lines[0] != "OK": +@@ -1238,7 +1238,8 @@ def GuessVCS(options): + out, returncode = RunShellWithReturnCode(["hg", "root"]) + if returncode == 0: + return MercurialVCS(options, out.strip()) +- except OSError, (errno, message): ++ except OSError as xxx_todo_changeme: ++ (errno, message) = xxx_todo_changeme.args + if errno != 2: # ENOENT -- they don't have hg installed. + raise + +@@ -1254,7 +1255,8 @@ def GuessVCS(options): + "--is-inside-work-tree"]) + if returncode == 0: + return GitVCS(options) +- except OSError, (errno, message): ++ except OSError as xxx_todo_changeme1: ++ (errno, message) = xxx_todo_changeme1.args + if errno != 2: # ENOENT -- they don't have git installed. + raise + +@@ -1301,12 +1303,12 @@ def RealMain(argv, data=None): + data = vcs.GenerateDiff(args) + files = vcs.GetBaseFiles(data) + if verbosity >= 1: +- print "Upload server:", options.server, "(change with -s/--server)" ++ print("Upload server:", options.server, "(change with -s/--server)") + if options.issue: + prompt = "Message describing this patch set: " + else: + prompt = "New issue subject: " +- message = options.message or raw_input(prompt).strip() ++ message = options.message or input(prompt).strip() + if not message: + ErrorExit("A non-empty message is required") + rpc_server = GetRpcServer(options) +@@ -1339,7 +1341,7 @@ def RealMain(argv, data=None): + # Send a hash of all the base file so the server can determine if a copy + # already exists in an earlier patchset. + base_hashes = "" +- for file, info in files.iteritems(): ++ for file, info in files.items(): + if not info[0] is None: + checksum = md5.new(info[0]).hexdigest() + if base_hashes: +@@ -1353,7 +1355,7 @@ def RealMain(argv, data=None): + if not options.download_base: + form_fields.append(("content_upload", "1")) + if len(data) > MAX_UPLOAD_SIZE: +- print "Patch is large, so uploading file patches separately." ++ print("Patch is large, so uploading file patches separately.") + uploaded_diff_file = [] + form_fields.append(("separate_patches", "1")) + else: +@@ -1393,7 +1395,7 @@ def main(): + try: + RealMain(sys.argv) + except KeyboardInterrupt: +- print ++ print() + StatusUpdate("Interrupted.") + sys.exit(1) + diff --git a/input_data_Files/sample_list_v9_2018.dat b/input_data_Files/sample_list_v9_2018.dat index 01c284b..55dbdcc 100644 --- a/input_data_Files/sample_list_v9_2018.dat +++ b/input_data_Files/sample_list_v9_2018.dat @@ -30,9 +30,16 @@ /GluGluHToZZTo2L2Nu_M200_TuneCP5_13TeV_powheg2_JHUGenV7011_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM /GluGluHToZZTo2L2Nu_M250_TuneCP5_13TeV_powheg2_JHUGenV735_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v2/NANOAODSIM /GluGluHToZZTo2L2Nu_M300_TuneCP5_13TeV_powheg2_JHUGenV7011_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM +/GluGluHToZZTo2L2Nu_M400_TuneCP5_13TeV_powheg2_JHUGenV7011_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v2/NANOAODSIM /GluGluHToZZTo2L2Nu_M500_TuneCP5_13TeV_powheg2_JHUGenV7011_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM +/GluGluHToZZTo2L2Nu_M600_TuneCP5_13TeV_powheg2_JHUGenV7011_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM +/GluGluHToZZTo2L2Nu_M700_TuneCP5_13TeV_powheg2_JHUGenV7011_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v2/NANOAODSIM +/GluGluHToZZTo2L2Nu_M800_TuneCP5_13TeV_powheg2_JHUGenV7011_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v2/NANOAODSIM +/GluGluHToZZTo2L2Nu_M900_TuneCP5_13TeV_powheg2_JHUGenV7011_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v3/NANOAODSIM /GluGluHToZZTo2L2Nu_M1000_TuneCP5_13TeV_powheg2_JHUGenV7011_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v2/NANOAODSIM /GluGluHToZZTo2L2Nu_M1500_TuneCP5_13TeV_powheg2_JHUGenV7011_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM +/GluGluHToZZTo2L2Nu_M2000_TuneCP5_13TeV_powheg2_JHUGenV7011_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v3/NANOAODSIM +/GluGluHToZZTo2L2Nu_M2500_TuneCP5_13TeV_powheg2_JHUGenV7011_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v2/NANOAODSIM /GluGluHToZZTo2L2Nu_M3000_TuneCP5_13TeV_powheg2_JHUGenV7011_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM ## ## Background: Top diff --git a/post_proc.py b/post_proc.py index 41ae80a..d09a07d 100644 --- a/post_proc.py +++ b/post_proc.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 import os import sys import argparse @@ -23,6 +23,7 @@ def parse_arguments(): parser = argparse.ArgumentParser() parser.add_argument("-i", "--inputFile", default="", type=str, help="Input file name") parser.add_argument('-o', '--outputFile', default="skimmed_nano.root", type=str, help="Output file name") + parser.add_argument('-outDir', '--outputDir', default=".", type=str, help="Output directory") parser.add_argument('-c', '--cutFlowFile', default="cutFlow.json", type=str, help="Cut flow file name") parser.add_argument("-n", "--entriesToRun", default=100, type=int, help="Set to 0 if need to run over all entries else put number of entries to run") parser.add_argument("-d", "--DownloadFileToLocalThenRun", default=True, type=bool, help="Download file to local then run") @@ -100,8 +101,8 @@ def main(): H4LCppModule = lambda: HZZAnalysisCppProducer(year,cfgFile, isMC, isFSR, args.cutFlowFile, args.DEBUG) GenVarModule = lambda : GenVarsProducer() # FIXME: Gen variable producer module is not working #modulesToRun.extend([H4LCppModule()]) - # modulesToRun.extend([H4LCppModule(), GenVarModule()]) - modulesToRun.extend([ GenVarModule()]) + modulesToRun.extend([H4LCppModule(), GenVarModule()]) + # modulesToRun.extend([ GenVarModule()]) print("systematic info: {}".format(args.NOsyst)) print("Input json file: {}".format(jsonFileName)) @@ -114,7 +115,7 @@ def main(): jetmetCorrector = createJMECorrector(isMC=isMC, dataYear=year, jesUncert="All", jetType = "AK4PFchs") fatJetCorrector = createJMECorrector(isMC=isMC, dataYear=year, jesUncert="All", jetType = "AK8PFPuppi") # btagSF = lambda: btagSFProducer("UL"+str(year), algo="deepjet",selectedWPs=['L','M','T','shape_corr'], sfFileName=sfFileName) - btagSF = lambda: btagSFProducer(era = "UL"+str(year), algo = "deepcsv") + # btagSF = lambda: btagSFProducer(era = "UL"+str(year), algo = "deepcsv") puidSF = lambda: JetSFMaker("%s" % year) modulesToRun.extend([jetmetCorrector(), fatJetCorrector(), puidSF()]) # # modulesToRun.extend([jetmetCorrector(), fatJetCorrector(), btagSF(), puidSF()]) @@ -127,7 +128,7 @@ def main(): # otherwise the output file will have larger size then expected. Reference: https://github.com/cms-nanoAOD/nanoAOD-tools/issues/249 temp_keep_drop_file = create_temp_keep_drop_file(keep_drop_rules_GEN + keep_drop_rules_Data_MC) print("DEBUG: Keep and drop file: {}".format(temp_keep_drop_file)) - p=PostProcessor(".",testfilelist, None, None,modules = modulesToRun, + p=PostProcessor(args.outputDir,testfilelist, None, None,modules = modulesToRun, provenance=True,fwkJobReport=True, haddFileName=args.outputFile, maxEntries=entriesToRun, @@ -141,7 +142,7 @@ def main(): temp_keep_drop_file = create_temp_keep_drop_file(keep_drop_rules_Data_MC) print("DEBUG: Keep and drop file: {}".format(temp_keep_drop_file)) - p=PostProcessor(".",testfilelist, None, None, modules = modulesToRun, + p=PostProcessor(args.outputDir,testfilelist, None, None, modules = modulesToRun, provenance=True, fwkJobReport=True, haddFileName=args.outputFile, jsonInput=jsonFileName, diff --git a/scripts/check_condor_stuck_or_not.py b/scripts/check_condor_stuck_or_not.py index 7dcb482..6c13692 100644 --- a/scripts/check_condor_stuck_or_not.py +++ b/scripts/check_condor_stuck_or_not.py @@ -12,9 +12,9 @@ #Oprint output.split("\t") lpcschedd = "" -print type(output) +print(type(output)) for outputs in output.split('\n'): - print outputs + print(outputs) if outputs.find('Submitter') != -1: lpcschedd = outputs.split()[2].split('.')[0] if outputs.find('rasharma') != -1 and outputs.split()[5] == 'R': @@ -23,21 +23,21 @@ """ condor_tail = "condor_tail "+outputs.split()[0]+" -name "+lpcschedd - print "\n","-"*51,"\n\n" - print(style.GREEN + outputs+style.RESET+"\n\n") - print "COMMAND: ",condor_tail - print "\n" + print("\n","-"*51,"\n\n") + print((style.GREEN + outputs+style.RESET+"\n\n")) + print("COMMAND: ",condor_tail) + print("\n") # os.system(condor_tail) output = os.popen(condor_tail).read() foundOrNot = any(match in output for match in error_check_string) if foundOrNot: - print(style.RED + "ERROR: Going to kill this job" + style.RESET) + print((style.RED + "ERROR: Going to kill this job" + style.RESET)) killCommand = "condor_rm "+outputs.split()[0]+" -name "+lpcschedd - print(style.RED + "Running Command: " + killCommand + style.RESET) + print((style.RED + "Running Command: " + killCommand + style.RESET)) os.system(killCommand) - print(style.RED + "Successfully killed." + style.RESET) + print((style.RED + "Successfully killed." + style.RESET)) else: - print output -print "\n\n" + print(output) +print("\n\n") diff --git a/scripts/check_das_sample.py b/scripts/check_das_sample.py index b983409..38ad725 100644 --- a/scripts/check_das_sample.py +++ b/scripts/check_das_sample.py @@ -17,11 +17,11 @@ defaultOldCampaign = "v6" defaultNewCampaign = "v7" -print "="*51 +print("="*51) CommandToRun = 'cp input_data_Files/sample_list_'+campaign_to_run.replace(defaultNewCampaign,defaultOldCampaign)+'.dat samples.dat' -print CommandToRun +print(CommandToRun) os.system(CommandToRun) -print "="*51 +print("="*51) with open('samples.dat') as in_file: count = 0 outjdl_file = open("sample_list_"+campaign_to_run+".dat","w") @@ -32,25 +32,25 @@ continue #if count > 27: break count = count +1 - print "="*51,"\n" - print "==> Sample : ",count - print "==> line : ",lines + print("="*51,"\n") + print("==> Sample : ",count) + print("==> line : ",lines) sample_name = lines.split('/')[1] campaign = lines.split('/')[2] tier = lines.split('/')[3] #campaign = lines.split('/')[2].split('-')[0] - print "==> DAS = ",lines - print "==> sample_name = ",sample_name - print "==> campaign = ",campaign - print "==> campaign = ",tier + print("==> DAS = ",lines) + print("==> sample_name = ",sample_name) + print("==> campaign = ",campaign) + print("==> campaign = ",tier) if sample_name.find("SingleMuon") != -1 or sample_name.find("EGamma") != -1 or sample_name.find("SingleElectron") !=-1 or sample_name.find("DoubleEG") != -1 or sample_name.find("DoubleMuon") != -1 or sample_name.find("MuonEG") != -1: v6_ntuples = "/"+sample_name+"/"+year_campaign_dict[campaign_to_run][1]+"/"+tier else: v6_ntuples = "/"+sample_name+"/"+year_campaign_dict[campaign_to_run][0]+"/"+tier #output = os.popen('dasgoclient --query="dataset='+lines.strip()+'"').read() - print 'dasgoclient --query="dataset='+v6_ntuples.strip()+'"' + print('dasgoclient --query="dataset='+v6_ntuples.strip()+'"') output = os.popen('dasgoclient --query="dataset='+v6_ntuples.strip()+'"').read() - print "output : ",output,"\n",type(output)," : ",len(output) + print("output : ",output,"\n",type(output)," : ",len(output)) if len(output.strip()) == 0: outjdl_file.write("# NOT FOUND: "+v6_ntuples.strip()+"\n") else: diff --git a/scripts/mergeNanoAODRootFiles.py b/scripts/mergeNanoAODRootFiles.py index ce65141..1ffdc85 100644 --- a/scripts/mergeNanoAODRootFiles.py +++ b/scripts/mergeNanoAODRootFiles.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 import os import glob import ROOT diff --git a/scripts/nanoAOD_condor_resubmit.py b/scripts/nanoAOD_condor_resubmit.py index 74efa66..b31c2e3 100644 --- a/scripts/nanoAOD_condor_resubmit.py +++ b/scripts/nanoAOD_condor_resubmit.py @@ -135,18 +135,13 @@ def prepare_runJobs_missing(FailedJobRootFile,InputJdlFile,CondorLogDir,EOSDir,R if DEBUG: print("copy command: {}".format(bashCommand)) os.system(bashCommand) - outjdl_fileName = InputJdlFile.replace(".jdl", "_resubmit_"+str(Resubmit_no)+".jdl") + outjdl_fileName = InputJdlFile.replace(".txt", "_resubmit_"+str(Resubmit_no)+".txt") outjdl_file = open(outjdl_fileName,"w") - with open(InputJdlFile, 'r') as myfile: - """Copy the main part of original jdl file to new jdl file. - All the lines before "Output = " should be copied to new jdl file. - """ - for line in myfile: - # Check if line starts with "Output = " - if line.startswith("Output = "): - break - outjdl_file.write(line) + # Update the new txt file name in the JDL file + bashCommand = "sed -i 's/{0}/{1}/g' {2}".format(InputJdlFile, outjdl_fileName, InputJdlFile.replace(".txt",".jdl")) + if DEBUG: print("sed command: {}".format(bashCommand)) + os.system(bashCommand) for RootFiles in FailedJobRootFile: if DEBUG: print("Root file to look for in stdout files: {}".format(RootFiles)) @@ -175,7 +170,7 @@ def prepare_runJobs_missing(FailedJobRootFile,InputJdlFile,CondorLogDir,EOSDir,R OldRefFile = "" if DEBUG: print("OldRefFile: {}".format(OldRefFile)) - grepCommand_GetJdlInfo = 'grep -A1 -B3 "{}" {}'.format(RootFiles, InputJdlFile) + grepCommand_GetJdlInfo = 'grep "{}" {}'.format(RootFiles, InputJdlFile) if DEBUG: print(grepCommand_GetJdlInfo) grep_condor_jdl_part = os.popen(grepCommand_GetJdlInfo).read() if DEBUG: print("=="*51) @@ -244,7 +239,7 @@ def main(): jdlfile = prepare_runJobs_missing(not_finished,options.input,options.dir,stageDir,str(options.resubmit_no)) print(jdlfile) print('Submitting missing jobs : ') - submit_missing(jdlfile,options.resubmit) + submit_missing(options.input.replace(".txt",".jdl"),options.resubmit) if __name__ == "__main__": main() diff --git a/scripts/nanoAOD_tools.patch b/scripts/nanoAOD_tools.patch deleted file mode 100644 index 35490e6..0000000 --- a/scripts/nanoAOD_tools.patch +++ /dev/null @@ -1,41 +0,0 @@ -diff --git a/python/postprocessing/framework/postprocessor.py b/python/postprocessing/framework/postprocessor.py -index 1c75036..ceaf437 100755 ---- a/python/postprocessing/framework/postprocessor.py -+++ b/python/postprocessing/framework/postprocessor.py -@@ -144,6 +144,7 @@ class PostProcessor: - - fullClone = (len(self.modules) == 0) - outFileNames = [] -+ outFileNameshadd = "" - t0 = time.time() - totEntriesRead = 0 - for fname in self.inputFiles: -@@ -199,6 +200,8 @@ class PostProcessor: - if not self.noOut: - outFileName = os.path.join(self.outputDir, os.path.basename( - fname).replace(".root", outpostfix + ".root")) -+ outFileNameshadd = os.path.join(self.outputDir, os.path.basename( -+ fname).replace(".root", outpostfix + "Hadd.root")) - outFile = ROOT.TFile.Open( - outFileName, "RECREATE", "", compressionLevel) - outFileNames.append(outFileName) -@@ -257,8 +260,18 @@ class PostProcessor: - if self.haddFileName: - haddnano = "./haddnano.py" if os.path.isfile( - "./haddnano.py") else "haddnano.py" -+ print("\n\n[postprocessor.py::INFO::] %s %s %s" % -+ (haddnano, outFileNameshadd," ".join(outFileNames))) -+ print "" -+ os.system('date') -+ startTime = time.clock() -+ print "" - os.system("%s %s %s" % -- (haddnano, self.haddFileName, " ".join(outFileNames))) -+ (haddnano, outFileNameshadd, " ".join(outFileNames))) -+ print "" -+ os.system('date') -+ print "Time taken for hadd is %s s"%(time.clock() - startTime) -+ print "" - if self.jobReport: - self.jobReport.addOutputFile(self.haddFileName) - self.jobReport.save() diff --git a/src/H4LTools.cc b/src/H4LTools.cc index 710a77b..d4386db 100644 --- a/src/H4LTools.cc +++ b/src/H4LTools.cc @@ -10,7 +10,7 @@ std::vector H4LTools::goodLooseElectrons2012(){ if (DEBUG) std::cout << "Inside goodLooseElectrons2012:: Electron_pt[" << i << "] = " << Electron_pt[i] << std::endl; //if ((Electron_pt[i]>elePtcut)&&(fabs(Electron_eta[i])elePtcut)&&((fabs(Electron_eta[i])<1.4442)||(fabs(Electron_eta[i])>1.5660)&&(fabs(Electron_eta[i])elePtcut)&&(((fabs(Electron_eta[i])<1.4442)||(fabs(Electron_eta[i])>1.5660))&&(fabs(Electron_eta[i]) H4LTools::goodFsrPhotons(){ return goodFsrPhoton; } -std::vector H4LTools::SelectedJets(std::vector ele, std::vector mu){ +std::vector H4LTools::SelectedJets(std::vector ele, std::vector mu) +{ std::vector goodJets; - //unsigned nJ = (*nJet).Get()[0]; - for(unsigned int i=0;iJetPtcut)&&(fabs(Jet_eta[i])=JetEtacut) continue; + for (unsigned int i = 0; i < Jet_pt.size(); i++) + { + if ((Jet_pt[i] <= JetPtcut)) continue; + if (fabs(Jet_eta[i]) >= JetEtacut) continue; if (Jet_jetId[i] <= 0) continue; - if ((Jet_pt[i]<50)&&(Jet_puId[i]!=7)) continue; - std::cout<<"DEBUG: Jet_pt.size() = " << Jet_pt.size() << ";" <<" JetID = " << Jet_jetId[i] << ";" << "Jet_pt = " << Jet_pt[i] << ";" << " puID = " << Jet_puId[i] << std::endl; - int overlaptag=0; - TLorentzVector jettest; - jettest.SetPtEtaPhiM(Jet_pt[i],Jet_eta[i],Jet_phi[i],Jet_mass[i]); - for(unsigned int ie=0;ie