From 41f17cb56dc9836f91af9cff43fd7ef94259b3d3 Mon Sep 17 00:00:00 2001 From: Ram Krishna Sharma Date: Wed, 13 Nov 2024 22:55:32 +0100 Subject: [PATCH 1/8] changes to adapt EL9 --- .gitignore | 3 + ExampleInputFileList.txt | 5 - H4LCppModule.py | 154 +++++++--- Helper.py | 26 +- Input_2022.yml | 7 +- README.md | 33 +- external/JHUGen_py2to3.patch | 55 ++++ external/nanoAODTools_py2to3.patch | 129 ++++++++ external/yaml-cpp | 1 - external/yamlcpp_pkg_py2to3.patch | 465 +++++++++++++++++++++++++++++ post_proc.py | 2 +- 11 files changed, 807 insertions(+), 73 deletions(-) create mode 100644 external/JHUGen_py2to3.patch create mode 100644 external/nanoAODTools_py2to3.patch delete mode 160000 external/yaml-cpp create mode 100644 external/yamlcpp_pkg_py2to3.patch diff --git a/.gitignore b/.gitignore index f8b45c1..f175a71 100644 --- a/.gitignore +++ b/.gitignore @@ -158,3 +158,6 @@ br.sm2 *maincc *linkdef.h JHUGenMELA/ +yaml-cpp/ +nanoAOD_skim.wiki/ +*.xml diff --git a/ExampleInputFileList.txt b/ExampleInputFileList.txt index 427f69a..7aaad41 100644 --- a/ExampleInputFileList.txt +++ b/ExampleInputFileList.txt @@ -1,6 +1 @@ /store/mc/Run3Summer22NanoAODv12/GluGluHtoZZto4L_M-125_TuneCP5_13p6TeV_powheg2-JHUGenV752-pythia8/NANOAODSIM/130X_mcRun3_2022_realistic_v5-v2/2520000/dcae7632-2ea5-4832-80f0-de7d45837a7f.root -/store/mc/Run3Summer22NanoAODv12/GluGluHtoZZto4L_M-125_TuneCP5_13p6TeV_powheg2-JHUGenV752-pythia8/NANOAODSIM/130X_mcRun3_2022_realistic_v5-v2/2530000/25212339-d2db-48c4-936d-fc47e9de45e9.root -/store/mc/Run3Summer22NanoAODv12/GluGluHtoZZto4L_M-125_TuneCP5_13p6TeV_powheg2-JHUGenV752-pythia8/NANOAODSIM/130X_mcRun3_2022_realistic_v5-v2/2530000/7a86daa3-36d3-42a4-8962-dc831b8608e0.root -/store/mc/Run3Summer22NanoAODv12/GluGluHtoZZto4L_M-125_TuneCP5_13p6TeV_powheg2-JHUGenV752-pythia8/NANOAODSIM/130X_mcRun3_2022_realistic_v5-v2/2530000/c7c62ee7-13a9-4de1-962d-64503a6fef05.root -/store/mc/Run3Summer22NanoAODv12/GluGluHtoZZto4L_M-125_TuneCP5_13p6TeV_powheg2-JHUGenV752-pythia8/NANOAODSIM/130X_mcRun3_2022_realistic_v5-v2/2540000/a2a64a49-3404-49db-bf42-cb0ceb5218e2.root -/store/mc/Run3Summer22NanoAODv12/GluGluHtoZZto4L_M-125_TuneCP5_13p6TeV_powheg2-JHUGenV752-pythia8/NANOAODSIM/130X_mcRun3_2022_realistic_v5-v2/2540000/b46903af-5d85-479e-b776-0803db6c3e2c.root diff --git a/H4LCppModule.py b/H4LCppModule.py index 4360ac1..7ffad6d 100644 --- a/H4LCppModule.py +++ b/H4LCppModule.py @@ -1,5 +1,5 @@ from PhysicsTools.NanoAODTools.postprocessing.framework.eventloop import Module -from PhysicsTools.NanoAODTools.postprocessing.framework.datamodel import Collection +from PhysicsTools.NanoAODTools.postprocessing.framework.datamodel import Collection, Object import ROOT import yaml import os @@ -9,48 +9,109 @@ class HZZAnalysisCppProducer(Module): def __init__(self,year,cfgFile,isMC,isFSR,isFiducialAna, DEBUG=False): - base = "$CMSSW_BASE/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim" - ROOT.gSystem.Load("%s/JHUGenMELA/MELA/data/slc7_amd64_gcc700/libJHUGenMELAMELA.so" % base) - ROOT.gSystem.Load("%s/JHUGenMELA/MELA/data/slc7_amd64_gcc700/libjhugenmela.so" % base) - ROOT.gSystem.Load("%s/JHUGenMELA/MELA/data/slc7_amd64_gcc700/libmcfm_707.so" % base) - ROOT.gSystem.Load("%s/JHUGenMELA/MELA/data/slc7_amd64_gcc700/libcollier.so" % base) - yaml_cpp_path = os.path.join(base, "external/yaml-cpp") + self.loadLibraries() + self.year = year + self.isMC = isMC + self.DEBUG = DEBUG + self.cfgFile = cfgFile + self.cfg = self._load_config(cfgFile) + self.CutFlowTable = ROOT.TH1F('cutFlow','cutFlow',20, 0, 20) + self.CutFlowTable.GetXaxis().SetBinLabel(1, "Events") + self.CutFlowTable.GetXaxis().SetBinLabel(2, "Trigger") + self.CutFlowTable.GetXaxis().SetBinLabel(3, "4Lepton") + self.CutFlowTable.GetXaxis().SetBinLabel(4, "4LeptonOSSF") + self.CutFlowTable.GetXaxis().SetBinLabel(5, "getTightZ") + self.CutFlowTable.GetXaxis().SetBinLabel(6, "getTightZ1") + self.CutFlowTable.GetXaxis().SetBinLabel(7, "lep_pTcut") + self.CutFlowTable.GetXaxis().SetBinLabel(8, "lepdRcut") + self.CutFlowTable.GetXaxis().SetBinLabel(9, "QCDcut") + self.CutFlowTable.GetXaxis().SetBinLabel(10, "Smartcut") + self.CutFlowTable.GetXaxis().SetBinLabel(11, "MZ1MZ2cut") + self.CutFlowTable.GetXaxis().SetBinLabel(12, "M4Lcut") + self.CutFlowTable.GetXaxis().SetBinLabel(13, "SR") + self.CutFlowTable.GetXaxis().SetBinLabel(14, "CR") + self.CutFlowTable.GetXaxis().SetBinLabel(15, "3Lepton") + self.CutFlowTable.GetXaxis().SetBinLabel(16, "properID_3lep") + self.CutFlowTable.GetXaxis().SetBinLabel(17, "3LepDRcut") + self.CutFlowTable.GetXaxis().SetBinLabel(18, "3LepPtcut") + self.CutFlowTable.GetXaxis().SetBinLabel(19, "3LepQCDcut") + self.CutFlowTable.GetXaxis().SetBinLabel(20, "3LepTightZ1cut") + + self.worker = ROOT.H4LTools(self.year, self.isMC) + self._initialize_worker(self.cfg) + self.worker.isFSR = isFSR + self._initialize_counters() + self.worker.isFiducialAna = isFiducialAna + pass + + def loadLibraries(self): + base_path = os.getenv('CMSSW_BASE') + '/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim' + yaml_cpp_path = os.path.join(base_path, "external/yaml-cpp") + + # Adding yaml-cpp headers to the include path ROOT.gSystem.AddIncludePath("-I%s/include" % yaml_cpp_path) + libraries = [ + 'libmcfm_710.so', + 'libJHUGenMELAMELA.so', + 'libjhugenmela.so', + 'libcollier.so', + ] + for lib in libraries: + fullPath = os.path.join(base_path, 'JHUGenMELA/MELA/data/el9_amd64_gcc12', lib) + ROOT.gSystem.Load(fullPath) + + # Load the yaml-cpp library yaml_cpp_lib_path = os.path.join(yaml_cpp_path, "build") ROOT.gSystem.Load(os.path.join(yaml_cpp_lib_path, "libyaml-cpp.so")) + + # Load the C++ module if "/H4LTools_cc.so" not in ROOT.gSystem.GetLibraries(): print("Load H4LTools C++ module") - base = "$CMSSW_BASE/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim" - if base: + if base_path: ROOT.gROOT.ProcessLine( - ".L %s/src/H4LTools.cc+O" % base) + ".L %s/src/H4LTools.cc+O" % base_path) else: - base = "$CMSSW_BASE//src/PhysicsTools/NanoAODTools" + base_path = "$CMSSW_BASE//src/PhysicsTools/NanoAODTools" ROOT.gSystem.Load("libPhysicsToolsNanoAODTools.so") ROOT.gROOT.ProcessLine( - ".L %s/interface/H4LTools.h" % base) - self.year = year - self.isMC = isMC - self.DEBUG = DEBUG - self.CutFlowTable = ROOT.TH1F('cutFlow','cutFlow',18, -0.5, 17.5) + ".L %s/interface/H4LTools.h" % base_path) + + def _load_config(self, cfgFile): with open(cfgFile, 'r') as ymlfile: - cfg = yaml.load(ymlfile) - self.worker = ROOT.H4LTools(self.year,self.isMC) - self.worker.InitializeElecut(cfg['Electron']['pTcut'],cfg['Electron']['Etacut'],cfg['Electron']['Sip3dcut'],cfg['Electron']['Loosedxycut'],cfg['Electron']['Loosedzcut'], - cfg['Electron']['Isocut'],cfg['Electron']['BDTWP']['LowEta']['LowPT'],cfg['Electron']['BDTWP']['MedEta']['LowPT'],cfg['Electron']['BDTWP']['HighEta']['LowPT'], - cfg['Electron']['BDTWP']['LowEta']['HighPT'],cfg['Electron']['BDTWP']['MedEta']['HighPT'],cfg['Electron']['BDTWP']['HighEta']['HighPT']) - self.worker.InitializeMucut(cfg['Muon']['pTcut'],cfg['Muon']['Etacut'],cfg['Muon']['Sip3dcut'],cfg['Muon']['Loosedxycut'],cfg['Muon']['Loosedzcut'],cfg['Muon']['Isocut'], - cfg['Muon']['Tightdxycut'],cfg['Muon']['Tightdzcut'],cfg['Muon']['TightTrackerLayercut'],cfg['Muon']['TightpTErrorcut'],cfg['Muon']['HighPtBound']) - self.worker.InitializeFsrPhotonCut(cfg['FsrPhoton']['pTcut'],cfg['FsrPhoton']['Etacut'],cfg['FsrPhoton']['Isocut'],cfg['FsrPhoton']['dRlcut'],cfg['FsrPhoton']['dRlOverPtcut']) - self.worker.InitializeJetcut(cfg['Jet']['pTcut'],cfg['Jet']['Etacut']) - self.worker.InitializeEvtCut(cfg['MZ1cut'],cfg['MZZcut'],cfg['Higgscut']['down'],cfg['Higgscut']['up'],cfg['Zmass'],cfg['MZcut']['down'],cfg['MZcut']['up']) + return yaml.safe_load(ymlfile) + + def _initialize_worker(self, cfg): + self.worker.InitializeElecut(cfg['Electron']['pTcut'],cfg['Electron']['Etacut'],cfg['Electron']['Sip3dcut'],cfg['Electron']['Loosedxycut'],cfg['Electron']['Loosedzcut'], + cfg['Electron']['Isocut'],cfg['Electron']['BDTWP']['LowEta']['LowPT'],cfg['Electron']['BDTWP']['MedEta']['LowPT'],cfg['Electron']['BDTWP']['HighEta']['LowPT'], + cfg['Electron']['BDTWP']['LowEta']['HighPT'],cfg['Electron']['BDTWP']['MedEta']['HighPT'],cfg['Electron']['BDTWP']['HighEta']['HighPT']) + self.worker.InitializeMucut(cfg['Muon']['pTcut'],cfg['Muon']['Etacut'],cfg['Muon']['Sip3dcut'],cfg['Muon']['Loosedxycut'],cfg['Muon']['Loosedzcut'],cfg['Muon']['Isocut'], + cfg['Muon']['Tightdxycut'],cfg['Muon']['Tightdzcut'],cfg['Muon']['TightTrackerLayercut'],cfg['Muon']['TightpTErrorcut'],cfg['Muon']['HighPtBound']) + self.worker.InitializeFsrPhotonCut(cfg['FsrPhoton']['pTcut'],cfg['FsrPhoton']['Etacut'],cfg['FsrPhoton']['Isocut'],cfg['FsrPhoton']['dRlcut'],cfg['FsrPhoton']['dRlOverPtcut']) + self.worker.InitializeJetcut(cfg['Jet']['pTcut'],cfg['Jet']['Etacut']) + self.worker.InitializeEvtCut(cfg['MZ1cut'],cfg['MZZcut'],cfg['Higgscut']['down'],cfg['Higgscut']['up'],cfg['Zmass'],cfg['MZcut']['down'],cfg['MZcut']['up']) + + def _get_nested_values(self, dictionary, keys): + values = [] + for key in keys: + if isinstance(key, list): + sub_dict = dictionary + for sub_key in key: + sub_dict = sub_dict.get(sub_key, {}) + values.append(sub_dict if sub_dict else 'N/A') + else: + values.append(dictionary.get(key, 'N/A')) + return values + def _initialize_counters(self): + self.passAllEvts = 0 self.passtrigEvts = 0 + self.passMETFilters = 0 + self.passZZ4lEvts = 0 + self.passZZ2l2qEvts = 0 + self.passZZ2l2nuEvts = 0 + self.passZZ2l2nu_emuCR_Evts = 0 self.passZZEvts = 0 - self.cfgFile = cfgFile - self.worker.isFSR = isFSR - self.worker.isFiducialAna = isFiducialAna - pass + def beginJob(self): pass @@ -81,6 +142,9 @@ def endJob(self): def beginFile(self, inputFile, outputFile, inputTree, wrappedOutputTree): self.initReaders(inputTree) # initReaders must be called in beginFile self.out = wrappedOutputTree + # Boolean branches for Trigger channels + for TriggerChannel in self.cfg['TriggerChannels']: + self.out.branch(TriggerChannel, "O") self.out.branch("mass4l", "F") self.out.branch("mass4e", "F") self.out.branch("mass4mu", "F") @@ -235,12 +299,21 @@ def analyze(self, event): phi4l = -99 mass4l = 0 rapidity4l = -99 - passedTrig = PassTrig(event, self.cfgFile) - if (passedTrig==True): - self.passtrigEvts += 1 - #keepIt = True - else: + + TriggerMap = {} + passedTrig = False + for TriggerChannel in self.cfg['TriggerChannels']: + TriggerMap[TriggerChannel] = PassTrig(event, self.cfg, TriggerChannel) + + # If any of the trigger channel from TriggerMap passes, then the event is kept else return keepIt + for value in TriggerMap.values(): + if value: + passedTrig = True + break + if not passedTrig: return keepIt + self.passtrigEvts += 1 + electrons = Collection(event, "Electron") muons = Collection(event, "Muon") fsrPhotons = Collection(event, "FsrPhoton") @@ -356,7 +429,7 @@ def analyze(self, event): lep_matchedR03_MomId.append(lep_matchedR03_MomId_vec[i]) lep_matchedR03_MomMomId.append(lep_matchedR03_MomMomId_vec[i]) lep_RelIsoNoFSR.append(lep_RelIsoNoFSR_vec[i]) - + if (foundZZCandidate): self.passZZEvts += 1 pTZ1 = self.worker.Z1.Pt() @@ -459,7 +532,10 @@ def analyze(self, event): mass4l = self.worker.mass4l rapidity4l = self.worker.ZZsystemnofsr.Rapidity() - + + # Fill the branches with the Trigger information for each channel + for TriggerChannel in self.cfg['TriggerChannels']: + self.out.fillBranch(TriggerChannel, TriggerMap[TriggerChannel]) self.out.fillBranch("mass4l",mass4l) self.out.fillBranch("mass4e",mass4e) self.out.fillBranch("mass2e2mu",mass2e2mu) @@ -544,8 +620,8 @@ def analyze(self, event): self.out.fillBranch("lep_matchedR03_MomId", lep_matchedR03_MomId) self.out.fillBranch("lep_matchedR03_PdgId", lep_matchedR03_PdgId) self.out.fillBranch("lep_matchedR03_MomMomId", lep_matchedR03_MomMomId) - - + + # self.out.fillBranch("nElectron_Fsr", len(electrons)) # self.out.fillBranch("nMuon_Fsr", len(muons)) diff --git a/Helper.py b/Helper.py index d5ee4c2..a644c67 100644 --- a/Helper.py +++ b/Helper.py @@ -2,16 +2,12 @@ import yaml PI=3.14159 -def PassTrig(event,cfgFile): - - +def PassTrig(event, cfg, TriggerCh): PassTrig = False - with open(cfgFile, 'r') as ymlfile: - cfg = yaml.load(ymlfile) - TriggerList = [] - for TriggerName in cfg['Triggers']: - TriggerList.append(eval(TriggerName)) - + TriggerList = [] + for TriggerName in cfg[TriggerCh]: + TriggerList.append(eval(TriggerName)) + for i in range(len(TriggerList)): PassTrig = PassTrig | TriggerList[i] @@ -88,7 +84,7 @@ def passTight_BDT_Id(electrons,year = '2018'): Tight_Id = [] cutVal = 1000 mvaVal = -1 - + for x in electrons: if (year == '2018'): if (x.pt<=10): @@ -101,7 +97,7 @@ def passTight_BDT_Id(electrons,year = '2018'): if (abs(x.eta) >= 1.479): cutVal = -0.5169136775 mvaVal = x.mvaFall17V2Iso_WP90 - + if (year == '2017'): if (x.pt<=10): if (abs(x.eta) < 0.8): cutVal = 0.9128577458 @@ -111,7 +107,7 @@ def passTight_BDT_Id(electrons,year = '2018'): if (abs(x.eta) < 0.8): cutVal = 0.1559788054 if ((abs(x.eta) >= 0.8)&(abs(x.eta) <1.479)): cutVal = 0.0273863727 if (abs(x.eta) >= 1.479): cutVal = -0.5532483665 - + mvaVal = x.mvaFall17V2Iso_WP90 if (year == '2016'): if (x.pt<=10): @@ -122,14 +118,14 @@ def passTight_BDT_Id(electrons,year = '2018'): if (abs(x.eta) < 0.8): cutVal = 0.3272075608 if ((abs(x.eta) >= 0.8)&(abs(x.eta) <1.479)): cutVal = 0.2468345995 if (abs(x.eta) >= 1.479): cutVal = -0.5955762814 - + mvaVal = x.mvaFall17V2Iso_WP90 if mvaVal > cutVal: Tight_Id.append(True) else: Tight_Id.append(False) - + return Tight_Id def passTight_Id(muons): @@ -140,4 +136,4 @@ def passTight_Id(muons): else: Tight_Id.append(((x.ptErr/x.pt)<0.3)&(abs(x.dxy)<0.2)&(abs(x.dz)<0.5)&(x.nTrackerLayers>5)|x.isPFcand) - return Tight_Id \ No newline at end of file + return Tight_Id diff --git a/Input_2022.yml b/Input_2022.yml index d6921f3..04a082e 100644 --- a/Input_2022.yml +++ b/Input_2022.yml @@ -1,4 +1,7 @@ -Triggers: +TriggerChannels: + - Triggers_HZZ4l + +Triggers_HZZ4l: - event.HLT_Ele23_Ele12_CaloIdL_TrackIdL_IsoVL - event.HLT_DoubleEle25_CaloIdL_MW - event.HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ_Mass3p8 @@ -44,7 +47,7 @@ Electron: LowEta: LowPT: 0.926614978724 - HighPT: + HighPT: 0.352678981617 MedEta: LowPT: diff --git a/README.md b/README.md index 03f2ca4..1db9347 100644 --- a/README.md +++ b/README.md @@ -6,8 +6,8 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. 1. Step: 1: Get CMSSW release ```bash - cmsrel CMSSW_10_6_30 - cd CMSSW_10_6_30/src + cmsrel CMSSW_14_0_2 + cd CMSSW_14_0_2/src cmsenv ``` @@ -16,7 +16,7 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. ```bash git clone git@github.com:cms-nanoAOD/nanoAOD-tools.git PhysicsTools/NanoAODTools cd PhysicsTools/NanoAODTools - git checkout 65359982275c476834ad4b37363d658166881f12 # Updated to commit on 16 June 2023 in official nanoAOD-tools + git checkout d163c18096fe2c5963ff5a9764bb420b46632178 # Updated to commit on 6 Dec 2023 in official nanoAOD-tools ``` 3. Step: 3: Get our analysis repository @@ -26,9 +26,17 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. git clone git@github.com:ram1123/nanoAOD_skim.git PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim cd PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim git checkout ZXCR - cd - - cmsenv - # patch PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/nanoAOD_tools.patch + cd $CMSSW_BASE/src/PhysicsTools/NanoAODTools + git apply python/postprocessing/analysis/nanoAOD_skim/external/nanoAODTools_py2to3.patch + cd $CMSSW_BASE/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim + git clone git@github.com:jbeder/yaml-cpp.git external/yaml-cpp + cd external/yaml-cpp/ + git apply ../yamlcpp_pkg_py2to3.patch + mkdir build + cd build + cmake3 .. -DBUILD_SHARED_LIBS=ON + cmake3 --build . + cd $CMSSW_BASE/src cp PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/data/btag/*.csv PhysicsTools/NanoAODTools/data/btagSF/. scram b voms-proxy-init --voms cms --valid 168:00 @@ -44,17 +52,22 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. ```bash cd $CMSSW_BASE/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim - git clone -b v2.3.5 https://github.com/JHUGen/JHUGenMELA + git clone -b v2.4.2 https://github.com/JHUGen/JHUGenMELA + cd JHUGenMELA + git apply ../external/JHUGen_py2to3.patch + cd .. sh JHUGenMELA/MELA/setup.sh -j 8 - cd JHUGenMELA/MELA - make + cd JHUGenMELA/MELA/data/el9_amd64_gcc12/ + chmod +x *.so ``` 4. Step: 4: interactive running ```bash cd $CMSSW_BASE/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim - python post_proc.py + export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/afs/cern.ch/work/r/rasharma/h2l2nu/checkNewSetup_15July2024/CMSSW_14_0_2/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/JHUGenMELA/MELA/data/el9_amd64_gcc12 + # NOTE: The above export command is needed to run just before running the post_proc.py script. Otherwise, it will give error. + python3 post_proc.py ``` 5. batch job submission. diff --git a/external/JHUGen_py2to3.patch b/external/JHUGen_py2to3.patch new file mode 100644 index 0000000..cc56900 --- /dev/null +++ b/external/JHUGen_py2to3.patch @@ -0,0 +1,55 @@ +diff --git a/MELA/makefile b/MELA/makefile +index a7d3b07..c1490c2 100644 +--- a/MELA/makefile ++++ b/MELA/makefile +@@ -19,7 +19,8 @@ MELADIR = $(shell pwd) + MELASRCDIR = $(MELADIR)/src + MELAOBJDIR = $(MELADIR)/obj + # Modify MELALIBDIR for the gcc version as needed +-MELALIBDIR = ${MELA_LIB_PATH} ++# MELALIBDIR = ${MELA_LIB_PATH} ++MELALIBDIR = "./data/el9_amd64_gcc12/" + # _melapkgpath_ should refer to the root compilation path just like MELADIR with an extra '/'. + # If environment variables need to be inserted without expansion for portability, + # you can use '.oODOLLAROo..oOOPEN_BRACKETOo.[YOUR_ENV_VARIABLE].oOCLOSE_BRACKETOo.', +diff --git a/MELA/test/batch.py b/MELA/test/batch.py +index e19b07b..f9c4e15 100755 +--- a/MELA/test/batch.py ++++ b/MELA/test/batch.py +@@ -2,12 +2,12 @@ + + import os + import sys +-import commands ++import subprocess + + from ROOT import TFile + + def processDirectory ( args, dirname, filenames ): +- print "processing " + dirname ++ print("processing " + dirname) + for filename in filenames: + fullname = dirname + "/" + filename + +@@ -55,18 +55,18 @@ def processDirectory ( args, dirname, filenames ): + + + +- print " " * 4 + filename + " with flavor " + str(flavor) + " and sqrts = " + str(sqrts) ++ print(" " * 4 + filename + " with flavor " + str(flavor) + " and sqrts = " + str(sqrts)) + + + + if flavor!=10: # looks like a valid file, prepare string + command = "root -q -b addProbtoTree.C\\(\\\"" + fullname[:-5] + "\\\","+str(flavor)+",-1,"+str(sqrts)+"\\)\n" + #create batch script +- commands.getstatusoutput("cp batchscript.csh batchscript_tmp.csh") ++ subprocess.getstatusoutput("cp batchscript.csh batchscript_tmp.csh") + file = open('batchscript_tmp.csh', 'a') + file.write(command) + file.close() +- commands.getstatusoutput("bsub -q 8nh < batchscript_tmp.csh" ) ++ subprocess.getstatusoutput("bsub -q 8nh < batchscript_tmp.csh" ) + #exit(0) + + diff --git a/external/nanoAODTools_py2to3.patch b/external/nanoAODTools_py2to3.patch new file mode 100644 index 0000000..e7885d9 --- /dev/null +++ b/external/nanoAODTools_py2to3.patch @@ -0,0 +1,129 @@ +diff --git a/python/postprocessing/examples/exampleAnalysis.py b/python/postprocessing/examples/exampleAnalysis.py +index 28cb45f..1824684 100644 +--- a/python/postprocessing/examples/exampleAnalysis.py ++++ b/python/postprocessing/examples/exampleAnalysis.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python3 + from PhysicsTools.NanoAODTools.postprocessing.framework.eventloop import Module + from PhysicsTools.NanoAODTools.postprocessing.framework.datamodel import Collection + from PhysicsTools.NanoAODTools.postprocessing.framework.postprocessor import PostProcessor +diff --git a/python/postprocessing/examples/example_postproc.py b/python/postprocessing/examples/example_postproc.py +index 1ace02c..3e7a495 100644 +--- a/python/postprocessing/examples/example_postproc.py ++++ b/python/postprocessing/examples/example_postproc.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python3 + from exampleModule import * + from PhysicsTools.NanoAODTools.postprocessing.modules.jme.jetmetHelperRun2 import * + from PhysicsTools.NanoAODTools.postprocessing.modules.jme.jetmetUncertainties import * +diff --git a/python/postprocessing/framework/crabhelper.py b/python/postprocessing/framework/crabhelper.py +index 405b055..53613a7 100644 +--- a/python/postprocessing/framework/crabhelper.py ++++ b/python/postprocessing/framework/crabhelper.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python3 + import os + from PhysicsTools.NanoAODTools.postprocessing.framework.postprocessor import * + import sys +diff --git a/python/postprocessing/framework/postprocessor.py b/python/postprocessing/framework/postprocessor.py +index c7cc3ca..ea54b4a 100755 +--- a/python/postprocessing/framework/postprocessor.py ++++ b/python/postprocessing/framework/postprocessor.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python3 + from PhysicsTools.NanoAODTools.postprocessing.framework.jobreport import JobReport + from PhysicsTools.NanoAODTools.postprocessing.framework.preskimming import preSkim + from PhysicsTools.NanoAODTools.postprocessing.framework.output import FriendOutput, FullOutput +@@ -261,8 +261,18 @@ class PostProcessor: + if self.haddFileName: + haddnano = "./haddnano.py" if os.path.isfile( + "./haddnano.py") else "haddnano.py" ++ print("Merging output files into %s" % self.haddFileName) ++ print("") ++ os.system('date') ++ startTime = time.time() ++ print("%s %s %s" % ++ (haddnano, self.outputDir + "/" + self.haddFileName, " ".join(outFileNames))) + os.system("%s %s %s" % +- (haddnano, self.haddFileName, " ".join(outFileNames))) ++ (haddnano, self.outputDir + "/" + self.haddFileName, " ".join(outFileNames))) ++ os.system('date') ++ print("Total time to merge %i files: %.1f sec" % ++ (len(outFileNames), time.time() - startTime)) ++ print("Done") + if self.jobReport: +- self.jobReport.addOutputFile(self.haddFileName) ++ self.jobReport.addOutputFile( self.outputDir + "/" + self.haddFileName) + self.jobReport.save() +diff --git a/python/postprocessing/modules/common/puWeightProducer.py b/python/postprocessing/modules/common/puWeightProducer.py +index b9062b4..7c5a1da 100644 +--- a/python/postprocessing/modules/common/puWeightProducer.py ++++ b/python/postprocessing/modules/common/puWeightProducer.py +@@ -59,8 +59,13 @@ class puWeightProducer(Module): + + def loadHisto(self, filename, hname): + tf = ROOT.TFile.Open(filename) ++ if not tf or tf.IsZombie(): ++ raise IOError(f"Cannot open file: {filename}") + hist = tf.Get(hname) +- hist.SetDirectory(None) ++ if not hist: ++ raise IOError(f"Cannot find histogram {hname} in file: {filename}") ++ ++ hist.SetDirectory(0) # With Python3/EL9 "None" is not working. So, replaced "None" with "0" + tf.Close() + return hist + +diff --git a/python/postprocessing/modules/jme/jetmetHelperRun2.py b/python/postprocessing/modules/jme/jetmetHelperRun2.py +index d38c623..9d07645 100644 +--- a/python/postprocessing/modules/jme/jetmetHelperRun2.py ++++ b/python/postprocessing/modules/jme/jetmetHelperRun2.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python3 + import os + import sys + import subprocess +@@ -107,7 +107,7 @@ jmsValues = { + 'UL2016': [1.000, 1.000, 1.000], # placeholder + 'UL2017': [1.000, 1.000, 1.000], # placeholder + 'UL2018': [1.000, 1.000, 1.000], # placeholder +-} ++} + + + def createJMECorrector(isMC=True, +diff --git a/scripts/haddnano.py b/scripts/haddnano.py +index 96d7d11..82ac1dc 100755 +--- a/scripts/haddnano.py ++++ b/scripts/haddnano.py +@@ -1,4 +1,4 @@ +-#!/bin/env python ++#!/usr/bin/env python3 + import ROOT + import numpy + import sys +diff --git a/scripts/nano_postproc.py b/scripts/nano_postproc.py +index acd5c80..e788c10 100755 +--- a/scripts/nano_postproc.py ++++ b/scripts/nano_postproc.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python3 + from PhysicsTools.NanoAODTools.postprocessing.framework.postprocessor import PostProcessor + from importlib import import_module + import os +diff --git a/scripts/nano_report.py b/scripts/nano_report.py +index e54b55b..e162a9a 100644 +--- a/scripts/nano_report.py ++++ b/scripts/nano_report.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python3 + # imported from https://github.com/CERN-PH-CMG/cmg-cmssw/blob/0c11a5a0a15c4c3e1a648c9707b06b08b747b0c0/PhysicsTools/Heppy/scripts/heppy_report.py + from optparse import OptionParser + import json diff --git a/external/yaml-cpp b/external/yaml-cpp deleted file mode 160000 index b95aa14..0000000 --- a/external/yaml-cpp +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b95aa146ec3226f31b7b75bef1b5f750af25fb8a diff --git a/external/yamlcpp_pkg_py2to3.patch b/external/yamlcpp_pkg_py2to3.patch new file mode 100644 index 0000000..f19fa3d --- /dev/null +++ b/external/yamlcpp_pkg_py2to3.patch @@ -0,0 +1,465 @@ +diff --git a/test/gtest-1.11.0/googletest/scripts/gen_gtest_pred_impl.py b/test/gtest-1.11.0/googletest/scripts/gen_gtest_pred_impl.py +index e09a6e0..95c8574 100755 +--- a/test/gtest-1.11.0/googletest/scripts/gen_gtest_pred_impl.py ++++ b/test/gtest-1.11.0/googletest/scripts/gen_gtest_pred_impl.py +@@ -184,7 +184,7 @@ def Title(word): + def OneTo(n): + """Returns the list [1, 2, 3, ..., n].""" + +- return range(1, n + 1) ++ return list(range(1, n + 1)) + + + def Iter(n, format, sep=''): +@@ -308,12 +308,12 @@ def GenerateFile(path, content): + """Given a file path and a content string + overwrites it with the given content. + """ +- print 'Updating file %s . . .' % path ++ print('Updating file %s . . .' % path) + f = file(path, 'w+') +- print >>f, content, ++ print(content, end=' ', file=f) + f.close() + +- print 'File %s has been updated.' % path ++ print('File %s has been updated.' % path) + + + def GenerateHeader(n): +@@ -720,8 +720,8 @@ def _Main(): + unit test.""" + + if len(sys.argv) != 2: +- print __doc__ +- print 'Author: ' + __author__ ++ print(__doc__) ++ print('Author: ' + __author__) + sys.exit(1) + + n = int(sys.argv[1]) +diff --git a/test/gtest-1.11.0/googletest/scripts/release_docs.py b/test/gtest-1.11.0/googletest/scripts/release_docs.py +index 8d24f28..05b15fd 100755 +--- a/test/gtest-1.11.0/googletest/scripts/release_docs.py ++++ b/test/gtest-1.11.0/googletest/scripts/release_docs.py +@@ -127,11 +127,11 @@ class WikiBrancher(object): + def BranchFiles(self): + """Branches the .wiki files needed to be branched.""" + +- print 'Branching %d .wiki files:' % (len(self.files_to_branch),) ++ print('Branching %d .wiki files:' % (len(self.files_to_branch),)) + os.chdir(self.wiki_dir) + for f in self.files_to_branch: + command = 'svn cp %s %s%s' % (f, self.version_prefix, f) +- print command ++ print(command) + os.system(command) + + def UpdateLinksInBranchedFiles(self): +@@ -139,7 +139,7 @@ class WikiBrancher(object): + for f in self.files_to_branch: + source_file = os.path.join(self.wiki_dir, f) + versioned_file = os.path.join(self.wiki_dir, self.version_prefix + f) +- print 'Updating links in %s.' % (versioned_file,) ++ print('Updating links in %s.' % (versioned_file,)) + text = file(source_file, 'r').read() + new_text = self.search_for_re.sub(self.replace_with, text) + file(versioned_file, 'w').write(new_text) +diff --git a/test/gtest-1.11.0/googletest/scripts/upload.py b/test/gtest-1.11.0/googletest/scripts/upload.py +index eba5711..786c281 100755 +--- a/test/gtest-1.11.0/googletest/scripts/upload.py ++++ b/test/gtest-1.11.0/googletest/scripts/upload.py +@@ -46,7 +46,7 @@ against by using the '--rev' option. + # This code is derived from appcfg.py in the App Engine SDK (open source), + # and from ASPN recipe #146306. + +-import cookielib ++import http.cookiejar + import getpass + import logging + import md5 +@@ -57,9 +57,9 @@ import re + import socket + import subprocess + import sys +-import urllib +-import urllib2 +-import urlparse ++import urllib.request, urllib.parse, urllib.error ++import urllib.request, urllib.error, urllib.parse ++import urllib.parse + + try: + import readline +@@ -94,15 +94,15 @@ def GetEmail(prompt): + last_email = last_email_file.readline().strip("\n") + last_email_file.close() + prompt += " [%s]" % last_email +- except IOError, e: ++ except IOError as e: + pass +- email = raw_input(prompt + ": ").strip() ++ email = input(prompt + ": ").strip() + if email: + try: + last_email_file = open(last_email_file_name, "w") + last_email_file.write(email) + last_email_file.close() +- except IOError, e: ++ except IOError as e: + pass + else: + email = last_email +@@ -118,20 +118,20 @@ def StatusUpdate(msg): + msg: The string to print. + """ + if verbosity > 0: +- print msg ++ print(msg) + + + def ErrorExit(msg): + """Print an error message to stderr and exit.""" +- print >>sys.stderr, msg ++ print(msg, file=sys.stderr) + sys.exit(1) + + +-class ClientLoginError(urllib2.HTTPError): ++class ClientLoginError(urllib.error.HTTPError): + """Raised to indicate there was an error authenticating with ClientLogin.""" + + def __init__(self, url, code, msg, headers, args): +- urllib2.HTTPError.__init__(self, url, code, msg, headers, None) ++ urllib.error.HTTPError.__init__(self, url, code, msg, headers, None) + self.args = args + self.reason = args["Error"] + +@@ -177,10 +177,10 @@ class AbstractRpcServer(object): + def _CreateRequest(self, url, data=None): + """Creates a new urllib request.""" + logging.debug("Creating request for: '%s' with payload:\n%s", url, data) +- req = urllib2.Request(url, data=data) ++ req = urllib.request.Request(url, data=data) + if self.host_override: + req.add_header("Host", self.host_override) +- for key, value in self.extra_headers.iteritems(): ++ for key, value in self.extra_headers.items(): + req.add_header(key, value) + return req + +@@ -204,7 +204,7 @@ class AbstractRpcServer(object): + account_type = "HOSTED" + req = self._CreateRequest( + url="https://www.google.com/accounts/ClientLogin", +- data=urllib.urlencode({ ++ data=urllib.parse.urlencode({ + "Email": email, + "Passwd": password, + "service": "ah", +@@ -218,7 +218,7 @@ class AbstractRpcServer(object): + response_dict = dict(x.split("=") + for x in response_body.split("\n") if x) + return response_dict["Auth"] +- except urllib2.HTTPError, e: ++ except urllib.error.HTTPError as e: + if e.code == 403: + body = e.read() + response_dict = dict(x.split("=", 1) for x in body.split("\n") if x) +@@ -240,14 +240,14 @@ class AbstractRpcServer(object): + continue_location = "http://localhost/" + args = {"continue": continue_location, "auth": auth_token} + req = self._CreateRequest("http://%s/_ah/login?%s" % +- (self.host, urllib.urlencode(args))) ++ (self.host, urllib.parse.urlencode(args))) + try: + response = self.opener.open(req) +- except urllib2.HTTPError, e: ++ except urllib.error.HTTPError as e: + response = e + if (response.code != 302 or + response.info()["location"] != continue_location): +- raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, ++ raise urllib.error.HTTPError(req.get_full_url(), response.code, response.msg, + response.headers, response.fp) + self.authenticated = True + +@@ -270,34 +270,34 @@ class AbstractRpcServer(object): + credentials = self.auth_function() + try: + auth_token = self._GetAuthToken(credentials[0], credentials[1]) +- except ClientLoginError, e: ++ except ClientLoginError as e: + if e.reason == "BadAuthentication": +- print >>sys.stderr, "Invalid username or password." ++ print("Invalid username or password.", file=sys.stderr) + continue + if e.reason == "CaptchaRequired": +- print >>sys.stderr, ( ++ print(( + "Please go to\n" + "https://www.google.com/accounts/DisplayUnlockCaptcha\n" +- "and verify you are a human. Then try again.") ++ "and verify you are a human. Then try again."), file=sys.stderr) + break + if e.reason == "NotVerified": +- print >>sys.stderr, "Account not verified." ++ print("Account not verified.", file=sys.stderr) + break + if e.reason == "TermsNotAgreed": +- print >>sys.stderr, "User has not agreed to TOS." ++ print("User has not agreed to TOS.", file=sys.stderr) + break + if e.reason == "AccountDeleted": +- print >>sys.stderr, "The user account has been deleted." ++ print("The user account has been deleted.", file=sys.stderr) + break + if e.reason == "AccountDisabled": +- print >>sys.stderr, "The user account has been disabled." ++ print("The user account has been disabled.", file=sys.stderr) + break + if e.reason == "ServiceDisabled": +- print >>sys.stderr, ("The user's access to the service has been " +- "disabled.") ++ print(("The user's access to the service has been " ++ "disabled."), file=sys.stderr) + break + if e.reason == "ServiceUnavailable": +- print >>sys.stderr, "The service is not available; try again later." ++ print("The service is not available; try again later.", file=sys.stderr) + break + raise + self._GetAuthCookie(auth_token) +@@ -334,7 +334,7 @@ class AbstractRpcServer(object): + args = dict(kwargs) + url = "http://%s%s" % (self.host, request_path) + if args: +- url += "?" + urllib.urlencode(args) ++ url += "?" + urllib.parse.urlencode(args) + req = self._CreateRequest(url=url, data=payload) + req.add_header("Content-Type", content_type) + try: +@@ -342,7 +342,7 @@ class AbstractRpcServer(object): + response = f.read() + f.close() + return response +- except urllib2.HTTPError, e: ++ except urllib.error.HTTPError as e: + if tries > 3: + raise + elif e.code == 401: +@@ -372,35 +372,35 @@ class HttpRpcServer(AbstractRpcServer): + Returns: + A urllib2.OpenerDirector object. + """ +- opener = urllib2.OpenerDirector() +- opener.add_handler(urllib2.ProxyHandler()) +- opener.add_handler(urllib2.UnknownHandler()) +- opener.add_handler(urllib2.HTTPHandler()) +- opener.add_handler(urllib2.HTTPDefaultErrorHandler()) +- opener.add_handler(urllib2.HTTPSHandler()) ++ opener = urllib.request.OpenerDirector() ++ opener.add_handler(urllib.request.ProxyHandler()) ++ opener.add_handler(urllib.request.UnknownHandler()) ++ opener.add_handler(urllib.request.HTTPHandler()) ++ opener.add_handler(urllib.request.HTTPDefaultErrorHandler()) ++ opener.add_handler(urllib.request.HTTPSHandler()) + opener.add_handler(urllib2.HTTPErrorProcessor()) + if self.save_cookies: + self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies") +- self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file) ++ self.cookie_jar = http.cookiejar.MozillaCookieJar(self.cookie_file) + if os.path.exists(self.cookie_file): + try: + self.cookie_jar.load() + self.authenticated = True + StatusUpdate("Loaded authentication cookies from %s" % + self.cookie_file) +- except (cookielib.LoadError, IOError): ++ except (http.cookiejar.LoadError, IOError): + # Failed to load cookies - just ignore them. + pass + else: + # Create an empty cookie file with mode 600 +- fd = os.open(self.cookie_file, os.O_CREAT, 0600) ++ fd = os.open(self.cookie_file, os.O_CREAT, 0o600) + os.close(fd) + # Always chmod the cookie file +- os.chmod(self.cookie_file, 0600) ++ os.chmod(self.cookie_file, 0o600) + else: + # Don't save cookies across runs of update.py. +- self.cookie_jar = cookielib.CookieJar() +- opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) ++ self.cookie_jar = http.cookiejar.CookieJar() ++ opener.add_handler(urllib.request.HTTPCookieProcessor(self.cookie_jar)) + return opener + + +@@ -575,7 +575,7 @@ def RunShellWithReturnCode(command, print_output=False, + line = p.stdout.readline() + if not line: + break +- print line.strip("\n") ++ print(line.strip("\n")) + output_array.append(line) + output = "".join(output_array) + else: +@@ -583,7 +583,7 @@ def RunShellWithReturnCode(command, print_output=False, + p.wait() + errout = p.stderr.read() + if print_output and errout: +- print >>sys.stderr, errout ++ print(errout, file=sys.stderr) + p.stdout.close() + p.stderr.close() + return output, p.returncode +@@ -629,11 +629,11 @@ class VersionControlSystem(object): + """Show an "are you sure?" prompt if there are unknown files.""" + unknown_files = self.GetUnknownFiles() + if unknown_files: +- print "The following files are not added to version control:" ++ print("The following files are not added to version control:") + for line in unknown_files: +- print line ++ print(line) + prompt = "Are you sure to continue?(y/N) " +- answer = raw_input(prompt).strip() ++ answer = input(prompt).strip() + if answer != "y": + ErrorExit("User aborted") + +@@ -685,13 +685,13 @@ class VersionControlSystem(object): + else: + type = "current" + if len(content) > MAX_UPLOAD_SIZE: +- print ("Not uploading the %s file for %s because it's too large." % +- (type, filename)) ++ print(("Not uploading the %s file for %s because it's too large." % ++ (type, filename))) + file_too_large = True + content = "" + checksum = md5.new(content).hexdigest() + if options.verbose > 0 and not file_too_large: +- print "Uploading %s file for %s" % (type, filename) ++ print("Uploading %s file for %s" % (type, filename)) + url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id) + form_fields = [("filename", filename), + ("status", status), +@@ -713,7 +713,7 @@ class VersionControlSystem(object): + + patches = dict() + [patches.setdefault(v, k) for k, v in patch_list] +- for filename in patches.keys(): ++ for filename in list(patches.keys()): + base_content, new_content, is_binary, status = files[filename] + file_id_str = patches.get(filename) + if file_id_str.find("nobase") != -1: +@@ -770,8 +770,8 @@ class SubversionVCS(VersionControlSystem): + words = line.split() + if len(words) == 2 and words[0] == "URL:": + url = words[1] +- scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) +- username, netloc = urllib.splituser(netloc) ++ scheme, netloc, path, params, query, fragment = urllib.parse.urlparse(url) ++ username, netloc = urllib.parse.splituser(netloc) + if username: + logging.info("Removed username from base URL") + if netloc.endswith("svn.python.org"): +@@ -789,12 +789,12 @@ class SubversionVCS(VersionControlSystem): + logging.info("Guessed CollabNet base = %s", base) + elif netloc.endswith(".googlecode.com"): + path = path + "/" +- base = urlparse.urlunparse(("http", netloc, path, params, ++ base = urllib.parse.urlunparse(("http", netloc, path, params, + query, fragment)) + logging.info("Guessed Google Code base = %s", base) + else: + path = path + "/" +- base = urlparse.urlunparse((scheme, netloc, path, params, ++ base = urllib.parse.urlunparse((scheme, netloc, path, params, + query, fragment)) + logging.info("Guessed base = %s", base) + return base +@@ -1202,8 +1202,8 @@ def UploadSeparatePatches(issue, rpc_server, patchset, data, options): + rv = [] + for patch in patches: + if len(patch[1]) > MAX_UPLOAD_SIZE: +- print ("Not uploading the patch for " + patch[0] + +- " because the file is too large.") ++ print(("Not uploading the patch for " + patch[0] + ++ " because the file is too large.")) + continue + form_fields = [("filename", patch[0])] + if not options.download_base: +@@ -1211,7 +1211,7 @@ def UploadSeparatePatches(issue, rpc_server, patchset, data, options): + files = [("data", "data.diff", patch[1])] + ctype, body = EncodeMultipartFormData(form_fields, files) + url = "/%d/upload_patch/%d" % (int(issue), int(patchset)) +- print "Uploading patch for " + patch[0] ++ print("Uploading patch for " + patch[0]) + response_body = rpc_server.Send(url, body, content_type=ctype) + lines = response_body.splitlines() + if not lines or lines[0] != "OK": +@@ -1238,7 +1238,8 @@ def GuessVCS(options): + out, returncode = RunShellWithReturnCode(["hg", "root"]) + if returncode == 0: + return MercurialVCS(options, out.strip()) +- except OSError, (errno, message): ++ except OSError as xxx_todo_changeme: ++ (errno, message) = xxx_todo_changeme.args + if errno != 2: # ENOENT -- they don't have hg installed. + raise + +@@ -1254,7 +1255,8 @@ def GuessVCS(options): + "--is-inside-work-tree"]) + if returncode == 0: + return GitVCS(options) +- except OSError, (errno, message): ++ except OSError as xxx_todo_changeme1: ++ (errno, message) = xxx_todo_changeme1.args + if errno != 2: # ENOENT -- they don't have git installed. + raise + +@@ -1301,12 +1303,12 @@ def RealMain(argv, data=None): + data = vcs.GenerateDiff(args) + files = vcs.GetBaseFiles(data) + if verbosity >= 1: +- print "Upload server:", options.server, "(change with -s/--server)" ++ print("Upload server:", options.server, "(change with -s/--server)") + if options.issue: + prompt = "Message describing this patch set: " + else: + prompt = "New issue subject: " +- message = options.message or raw_input(prompt).strip() ++ message = options.message or input(prompt).strip() + if not message: + ErrorExit("A non-empty message is required") + rpc_server = GetRpcServer(options) +@@ -1339,7 +1341,7 @@ def RealMain(argv, data=None): + # Send a hash of all the base file so the server can determine if a copy + # already exists in an earlier patchset. + base_hashes = "" +- for file, info in files.iteritems(): ++ for file, info in files.items(): + if not info[0] is None: + checksum = md5.new(info[0]).hexdigest() + if base_hashes: +@@ -1353,7 +1355,7 @@ def RealMain(argv, data=None): + if not options.download_base: + form_fields.append(("content_upload", "1")) + if len(data) > MAX_UPLOAD_SIZE: +- print "Patch is large, so uploading file patches separately." ++ print("Patch is large, so uploading file patches separately.") + uploaded_diff_file = [] + form_fields.append(("separate_patches", "1")) + else: +@@ -1393,7 +1395,7 @@ def main(): + try: + RealMain(sys.argv) + except KeyboardInterrupt: +- print ++ print() + StatusUpdate("Interrupted.") + sys.exit(1) + diff --git a/post_proc.py b/post_proc.py index 3e0c9b1..97278cc 100755 --- a/post_proc.py +++ b/post_proc.py @@ -19,7 +19,7 @@ def parse_arguments(): parser = argparse.ArgumentParser() parser.add_argument("-i", "--inputFile", default="", type=str, help="Input file name") parser.add_argument('-o', '--outputFile', default="skimmed_nano.root", type=str, help="Output file name") - parser.add_argument("-n", "--entriesToRun", default=0, type=int, help="Set to 0 if need to run over all entries else put number of entries to run") + parser.add_argument("-n", "--entriesToRun", default=1000, type=int, help="Set to 0 if need to run over all entries else put number of entries to run") parser.add_argument("-d", "--DownloadFileToLocalThenRun", default=True, type=bool, help="Download file to local then run") parser.add_argument("--NOsyst", default=False, action="store_true", help="Do not run systematics") parser.add_argument("--DEBUG", default=False, action="store_true", help="Print debug information") From 167ed959de45d2185cae98fdce9bd103094673bd Mon Sep 17 00:00:00 2001 From: Ram Krishna Sharma Date: Thu, 14 Nov 2024 15:47:04 +0100 Subject: [PATCH 2/8] - Added muon rochester correction - Fixed the bug in M2e2mu and 4l kinematics --- H4LCppModule.py | 31 +++++++++++++------ README.md | 17 ++++------- crab/crab_script.py | 2 +- crab/crab_scriptMC.py | 2 +- post_proc.py | 2 +- scripts/check_condor_stuck_or_not.py | 22 +++++++------- scripts/check_das_sample.py | 24 +++++++-------- scripts/compareHist.py | 45 ++++++++++++++++++++++++++++ 8 files changed, 98 insertions(+), 47 deletions(-) create mode 100644 scripts/compareHist.py diff --git a/H4LCppModule.py b/H4LCppModule.py index 7ffad6d..f01032d 100644 --- a/H4LCppModule.py +++ b/H4LCppModule.py @@ -81,14 +81,26 @@ def _load_config(self, cfgFile): return yaml.safe_load(ymlfile) def _initialize_worker(self, cfg): - self.worker.InitializeElecut(cfg['Electron']['pTcut'],cfg['Electron']['Etacut'],cfg['Electron']['Sip3dcut'],cfg['Electron']['Loosedxycut'],cfg['Electron']['Loosedzcut'], - cfg['Electron']['Isocut'],cfg['Electron']['BDTWP']['LowEta']['LowPT'],cfg['Electron']['BDTWP']['MedEta']['LowPT'],cfg['Electron']['BDTWP']['HighEta']['LowPT'], - cfg['Electron']['BDTWP']['LowEta']['HighPT'],cfg['Electron']['BDTWP']['MedEta']['HighPT'],cfg['Electron']['BDTWP']['HighEta']['HighPT']) - self.worker.InitializeMucut(cfg['Muon']['pTcut'],cfg['Muon']['Etacut'],cfg['Muon']['Sip3dcut'],cfg['Muon']['Loosedxycut'],cfg['Muon']['Loosedzcut'],cfg['Muon']['Isocut'], - cfg['Muon']['Tightdxycut'],cfg['Muon']['Tightdzcut'],cfg['Muon']['TightTrackerLayercut'],cfg['Muon']['TightpTErrorcut'],cfg['Muon']['HighPtBound']) - self.worker.InitializeFsrPhotonCut(cfg['FsrPhoton']['pTcut'],cfg['FsrPhoton']['Etacut'],cfg['FsrPhoton']['Isocut'],cfg['FsrPhoton']['dRlcut'],cfg['FsrPhoton']['dRlOverPtcut']) - self.worker.InitializeJetcut(cfg['Jet']['pTcut'],cfg['Jet']['Etacut']) - self.worker.InitializeEvtCut(cfg['MZ1cut'],cfg['MZZcut'],cfg['Higgscut']['down'],cfg['Higgscut']['up'],cfg['Zmass'],cfg['MZcut']['down'],cfg['MZcut']['up']) + self.worker.InitializeElecut(*self._get_nested_values(cfg['Electron'], [ + 'pTcut', 'Etacut', 'Sip3dcut', 'Loosedxycut', 'Loosedzcut', + 'Isocut', ['BDTWP', 'LowEta', 'LowPT'], ['BDTWP', 'MedEta', 'LowPT'], + ['BDTWP', 'HighEta', 'LowPT'], ['BDTWP', 'LowEta', 'HighPT'], + ['BDTWP', 'MedEta', 'HighPT'], ['BDTWP', 'HighEta', 'HighPT'] + ])) + self.worker.InitializeMucut(*self._get_nested_values(cfg['Muon'], [ + 'pTcut', 'Etacut', 'Sip3dcut', 'Loosedxycut', 'Loosedzcut', 'Isocut', + 'Tightdxycut', 'Tightdzcut', 'TightTrackerLayercut', 'TightpTErrorcut', + 'HighPtBound' + ])) + self.worker.InitializeFsrPhotonCut(*self._get_nested_values(cfg['FsrPhoton'], [ + 'pTcut', 'Etacut', 'Isocut', 'dRlcut', 'dRlOverPtcut' + ])) + self.worker.InitializeJetcut(*self._get_nested_values(cfg['Jet'], ['pTcut', 'Etacut'])) + self.worker.InitializeEvtCut(*self._get_nested_values(cfg, ['MZ1cut', 'MZZcut', + ['Higgscut', 'down'], ['Higgscut', 'up'], + 'Zmass', ['MZcut', 'down'], ['MZcut', 'up'], + ])) + def _get_nested_values(self, dictionary, keys): values = [] @@ -345,6 +357,7 @@ def analyze(self, event): self.worker.findZ1LCandidate() if ((self.worker.nTightEle<2)|(self.worker.nTightMu<2)): pass + self.worker.ZZSelection() Electron_Fsr_pt_vec = self.worker.ElectronFsrPt() Electron_Fsr_eta_vec = self.worker.ElectronFsrEta() @@ -522,7 +535,7 @@ def analyze(self, event): if self.worker.flag4e: mass4e = mass4l if self.worker.flag2e2mu: - mass4e = mass4l + mass2e2mu = mass4l if self.worker.flag4mu: mass4mu = mass4l if (self.worker.isFSR==False & (passedFullSelection | passedZXCRSelection)): diff --git a/README.md b/README.md index 1db9347..0b229e8 100644 --- a/README.md +++ b/README.md @@ -14,21 +14,21 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. 2. Step: 2: Get official nanoAODTools ```bash - git clone git@github.com:cms-nanoAOD/nanoAOD-tools.git PhysicsTools/NanoAODTools + git clone git@github.com:ram1123/nanoAOD-tools.git PhysicsTools/NanoAODTools cd PhysicsTools/NanoAODTools - git checkout d163c18096fe2c5963ff5a9764bb420b46632178 # Updated to commit on 6 Dec 2023 in official nanoAOD-tools + git checkout h4l_dev ``` 3. Step: 3: Get our analysis repository ```bash + # Main analysis repository cd $CMSSW_BASE/src git clone git@github.com:ram1123/nanoAOD_skim.git PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim cd PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim git checkout ZXCR - cd $CMSSW_BASE/src/PhysicsTools/NanoAODTools - git apply python/postprocessing/analysis/nanoAOD_skim/external/nanoAODTools_py2to3.patch - cd $CMSSW_BASE/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim + + # External package: yaml-cpp git clone git@github.com:jbeder/yaml-cpp.git external/yaml-cpp cd external/yaml-cpp/ git apply ../yamlcpp_pkg_py2to3.patch @@ -37,17 +37,10 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. cmake3 .. -DBUILD_SHARED_LIBS=ON cmake3 --build . cd $CMSSW_BASE/src - cp PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/data/btag/*.csv PhysicsTools/NanoAODTools/data/btagSF/. scram b voms-proxy-init --voms cms --valid 168:00 ``` - (Optional: Fix git repo) - - ```bash - find PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/.git/ -name "*.py*" -delete - ``` - 4. Step: 4: Get the MELA package ```bash diff --git a/crab/crab_script.py b/crab/crab_script.py index 6136e5e..cf2c9c1 100644 --- a/crab/crab_script.py +++ b/crab/crab_script.py @@ -16,4 +16,4 @@ p.run() -print "DONE" +print("DONE") diff --git a/crab/crab_scriptMC.py b/crab/crab_scriptMC.py index 4cb7b72..e0ca1b4 100644 --- a/crab/crab_scriptMC.py +++ b/crab/crab_scriptMC.py @@ -17,4 +17,4 @@ p.run() -print "DONE" +print ("DONE") diff --git a/post_proc.py b/post_proc.py index 97278cc..e9324a0 100755 --- a/post_proc.py +++ b/post_proc.py @@ -76,7 +76,7 @@ def main(): cfgFile = "Input_2022.yml" jsonFileName = "golden_Json/Cert_Collisions2022_355100_362760_Golden.json" sfFileName = "DeepCSV_102XSF_V2.csv" # FIXME: Update for year 2022 - modulesToRun.extend([muonScaleRes2018()]) # FIXME: Update for year 2022 + modulesToRun.extend([muonScaleRes2022()]) # FIXME: Update for year 2022 if "UL18" in first_file or "UL2018" in first_file: """UL2018 for identification of 2018 UL data and UL18 for identification of 2018 UL MC """ diff --git a/scripts/check_condor_stuck_or_not.py b/scripts/check_condor_stuck_or_not.py index 7dcb482..6c13692 100644 --- a/scripts/check_condor_stuck_or_not.py +++ b/scripts/check_condor_stuck_or_not.py @@ -12,9 +12,9 @@ #Oprint output.split("\t") lpcschedd = "" -print type(output) +print(type(output)) for outputs in output.split('\n'): - print outputs + print(outputs) if outputs.find('Submitter') != -1: lpcschedd = outputs.split()[2].split('.')[0] if outputs.find('rasharma') != -1 and outputs.split()[5] == 'R': @@ -23,21 +23,21 @@ """ condor_tail = "condor_tail "+outputs.split()[0]+" -name "+lpcschedd - print "\n","-"*51,"\n\n" - print(style.GREEN + outputs+style.RESET+"\n\n") - print "COMMAND: ",condor_tail - print "\n" + print("\n","-"*51,"\n\n") + print((style.GREEN + outputs+style.RESET+"\n\n")) + print("COMMAND: ",condor_tail) + print("\n") # os.system(condor_tail) output = os.popen(condor_tail).read() foundOrNot = any(match in output for match in error_check_string) if foundOrNot: - print(style.RED + "ERROR: Going to kill this job" + style.RESET) + print((style.RED + "ERROR: Going to kill this job" + style.RESET)) killCommand = "condor_rm "+outputs.split()[0]+" -name "+lpcschedd - print(style.RED + "Running Command: " + killCommand + style.RESET) + print((style.RED + "Running Command: " + killCommand + style.RESET)) os.system(killCommand) - print(style.RED + "Successfully killed." + style.RESET) + print((style.RED + "Successfully killed." + style.RESET)) else: - print output -print "\n\n" + print(output) +print("\n\n") diff --git a/scripts/check_das_sample.py b/scripts/check_das_sample.py index b983409..38ad725 100644 --- a/scripts/check_das_sample.py +++ b/scripts/check_das_sample.py @@ -17,11 +17,11 @@ defaultOldCampaign = "v6" defaultNewCampaign = "v7" -print "="*51 +print("="*51) CommandToRun = 'cp input_data_Files/sample_list_'+campaign_to_run.replace(defaultNewCampaign,defaultOldCampaign)+'.dat samples.dat' -print CommandToRun +print(CommandToRun) os.system(CommandToRun) -print "="*51 +print("="*51) with open('samples.dat') as in_file: count = 0 outjdl_file = open("sample_list_"+campaign_to_run+".dat","w") @@ -32,25 +32,25 @@ continue #if count > 27: break count = count +1 - print "="*51,"\n" - print "==> Sample : ",count - print "==> line : ",lines + print("="*51,"\n") + print("==> Sample : ",count) + print("==> line : ",lines) sample_name = lines.split('/')[1] campaign = lines.split('/')[2] tier = lines.split('/')[3] #campaign = lines.split('/')[2].split('-')[0] - print "==> DAS = ",lines - print "==> sample_name = ",sample_name - print "==> campaign = ",campaign - print "==> campaign = ",tier + print("==> DAS = ",lines) + print("==> sample_name = ",sample_name) + print("==> campaign = ",campaign) + print("==> campaign = ",tier) if sample_name.find("SingleMuon") != -1 or sample_name.find("EGamma") != -1 or sample_name.find("SingleElectron") !=-1 or sample_name.find("DoubleEG") != -1 or sample_name.find("DoubleMuon") != -1 or sample_name.find("MuonEG") != -1: v6_ntuples = "/"+sample_name+"/"+year_campaign_dict[campaign_to_run][1]+"/"+tier else: v6_ntuples = "/"+sample_name+"/"+year_campaign_dict[campaign_to_run][0]+"/"+tier #output = os.popen('dasgoclient --query="dataset='+lines.strip()+'"').read() - print 'dasgoclient --query="dataset='+v6_ntuples.strip()+'"' + print('dasgoclient --query="dataset='+v6_ntuples.strip()+'"') output = os.popen('dasgoclient --query="dataset='+v6_ntuples.strip()+'"').read() - print "output : ",output,"\n",type(output)," : ",len(output) + print("output : ",output,"\n",type(output)," : ",len(output)) if len(output.strip()) == 0: outjdl_file.write("# NOT FOUND: "+v6_ntuples.strip()+"\n") else: diff --git a/scripts/compareHist.py b/scripts/compareHist.py new file mode 100644 index 0000000..1abfbe3 --- /dev/null +++ b/scripts/compareHist.py @@ -0,0 +1,45 @@ +import uproot +import matplotlib.pyplot as plt +import numpy as np + +# Open the ROOT file and access the TTree +file = uproot.open("skimmed_nano.root") +tree = file["Events"] + +# Define the histogram bins +pt_bins = np.linspace(0, 150, 51) # 50 bins from 0 to 150 + +# Load nMuon and Muon_pt branches +nMuon = tree["nMuon"].array(library="np") +muon_pt = tree["Muon_pt"].array(library="np") +muon_uncorrected_pt = tree["Muon_uncorrected_pt"].array(library="np") + +# Apply the cut nMuon > 0 and select only Muon_pt[0] for events with at least one muon +muon_pt = muon_pt[nMuon > 0] +muon_uncorrected_pt = muon_uncorrected_pt[nMuon > 0] + +# Extract the first muon pt (Muon_pt[0]) for each event +first_muon_pt = np.array([event[0] for event in muon_pt if len(event) > 0]) +first_muon_uncorrected_pt = np.array([event[0] for event in muon_uncorrected_pt if len(event) > 0]) + +# Create histograms +counts1, bin_edges1 = np.histogram(first_muon_pt, bins=pt_bins) +counts2, bin_edges2 = np.histogram(first_muon_uncorrected_pt, bins=pt_bins) + +# Plotting without displaying +fig, ax = plt.subplots() + +# Plot histogram1 in blue +ax.step(bin_edges1[:-1], counts1, where="mid", label="Muon_pt[0]", color="blue") + +# Plot histogram2 in red on the same canvas +ax.step(bin_edges2[:-1], counts2, where="mid", label="Muon_uncorrected_pt[0]", color="red") + +# Customize the plot +ax.legend(title="First Muon Transverse Momentum") +ax.set_xlabel("Transverse Momentum (GeV)") +ax.set_ylabel("Counts") + +# Save the plot to a PNG file +fig.savefig("histogram_comparison.png") +plt.close(fig) From 3ca0315279ede4685fa6f4fab25d4678d5e8f874 Mon Sep 17 00:00:00 2001 From: Ram Krishna Sharma Date: Thu, 14 Nov 2024 16:19:19 +0100 Subject: [PATCH 3/8] Updated README --- README.md | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 0b229e8..280a21c 100644 --- a/README.md +++ b/README.md @@ -14,9 +14,7 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. 2. Step: 2: Get official nanoAODTools ```bash - git clone git@github.com:ram1123/nanoAOD-tools.git PhysicsTools/NanoAODTools - cd PhysicsTools/NanoAODTools - git checkout h4l_dev + git clone -b h4l_dev git@github.com:ram1123/nanoAOD-tools.git PhysicsTools/NanoAODTools ``` 3. Step: 3: Get our analysis repository @@ -26,7 +24,7 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. cd $CMSSW_BASE/src git clone git@github.com:ram1123/nanoAOD_skim.git PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim cd PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim - git checkout ZXCR + git checkout ZXCR_EL9 # External package: yaml-cpp git clone git@github.com:jbeder/yaml-cpp.git external/yaml-cpp @@ -37,8 +35,7 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. cmake3 .. -DBUILD_SHARED_LIBS=ON cmake3 --build . cd $CMSSW_BASE/src - scram b - voms-proxy-init --voms cms --valid 168:00 + scram b -j 8 ``` 4. Step: 4: Get the MELA package @@ -60,6 +57,7 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. cd $CMSSW_BASE/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/afs/cern.ch/work/r/rasharma/h2l2nu/checkNewSetup_15July2024/CMSSW_14_0_2/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/JHUGenMELA/MELA/data/el9_amd64_gcc12 # NOTE: The above export command is needed to run just before running the post_proc.py script. Otherwise, it will give error. + voms-proxy-init --voms cms --valid 168:00 python3 post_proc.py ``` From 83a334e5b1de751aff836f8e0308aa31a91b798e Mon Sep 17 00:00:00 2001 From: yuji Date: Tue, 14 Oct 2025 14:57:07 +0200 Subject: [PATCH 4/8] bug fixed in GENPart --- ExampleInputFileList.txt | 2 +- ExampleInputFileList_data.txt | 2 +- H4LCppModule.py | 2 + condor_setup_lxplus.py | 176 ++++++++++++---------- input_data_Files/sample_list_v12_2022.dat | 16 +- interface/H4LTools.h | 10 +- scripts/utils/color_style.py | 14 ++ scripts/utils/condorJobHelper.py | 71 +++++++++ scripts/utils/fileshelper.py | 88 +++++++++++ scripts/utils/infoCreaterGit.py | 41 +++++ scripts/utils/makeTarFile.py | 34 +++++ 11 files changed, 359 insertions(+), 97 deletions(-) create mode 100644 scripts/utils/color_style.py create mode 100644 scripts/utils/condorJobHelper.py create mode 100644 scripts/utils/fileshelper.py create mode 100644 scripts/utils/infoCreaterGit.py create mode 100644 scripts/utils/makeTarFile.py diff --git a/ExampleInputFileList.txt b/ExampleInputFileList.txt index 7aaad41..b1b36dc 100644 --- a/ExampleInputFileList.txt +++ b/ExampleInputFileList.txt @@ -1 +1 @@ -/store/mc/Run3Summer22NanoAODv12/GluGluHtoZZto4L_M-125_TuneCP5_13p6TeV_powheg2-JHUGenV752-pythia8/NANOAODSIM/130X_mcRun3_2022_realistic_v5-v2/2520000/dcae7632-2ea5-4832-80f0-de7d45837a7f.root +/store/mc/Run3Summer22NanoAODv12/TTto2L2Nu_TuneCP5_13p6TeV_powheg-pythia8/NANOAODSIM/130X_mcRun3_2022_realistic_v5-v2/2520000/66b834d6-61f7-4109-b5ae-54a150d4814b.root diff --git a/ExampleInputFileList_data.txt b/ExampleInputFileList_data.txt index 9b3df88..b1b36dc 100644 --- a/ExampleInputFileList_data.txt +++ b/ExampleInputFileList_data.txt @@ -1 +1 @@ -/store/data/Run2022C/DoubleMuon/NANOAOD/16Dec2023-v1/2550000/363316f1-bbad-46a2-9aa1-ef976d65d9ff.root +/store/mc/Run3Summer22NanoAODv12/TTto2L2Nu_TuneCP5_13p6TeV_powheg-pythia8/NANOAODSIM/130X_mcRun3_2022_realistic_v5-v2/2520000/66b834d6-61f7-4109-b5ae-54a150d4814b.root diff --git a/H4LCppModule.py b/H4LCppModule.py index f01032d..ef1f120 100644 --- a/H4LCppModule.py +++ b/H4LCppModule.py @@ -393,6 +393,7 @@ def analyze(self, event): if (passedZ1LSelection): keepIt = True if (passedFullSelection): keepIt = True if (passedZXCRSelection): keepIt = True + Lepointer = self.worker.Lepointer lep_Hindex = [] lep_Hindex_vec = self.worker.lep_Hindex @@ -445,6 +446,7 @@ def analyze(self, event): if (foundZZCandidate): self.passZZEvts += 1 + pTZ1 = self.worker.Z1.Pt() etaZ1 = self.worker.Z1.Eta() phiZ1 = self.worker.Z1.Phi() diff --git a/condor_setup_lxplus.py b/condor_setup_lxplus.py index 97397eb..86fd77c 100644 --- a/condor_setup_lxplus.py +++ b/condor_setup_lxplus.py @@ -7,7 +7,7 @@ import sys import datetime -sys.path.append("Utils/.") +sys.path.append("scripts/utils/.") from color_style import style @@ -41,7 +41,7 @@ def main(args): # Create log files import infoCreaterGit - SumamryOfCurrentSubmission = raw_input("\n\nWrite summary for current job submission: ") + SumamryOfCurrentSubmission = input("\n\nWrite summary for current job submission: ") # in python3 raw_input is renamed as input infoLogFiles = infoCreaterGit.BasicInfoCreater('summary.dat',SumamryOfCurrentSubmission) infoLogFiles.generate_git_patch_and_log() @@ -64,34 +64,33 @@ def main(args): os.system('xrdcp ' + CMSSWRel+".tgz" + ' root://eosuser.cern.ch/'+storeDir+'/' + CMSSWRel+".tgz") post_proc_to_run = "post_proc.py" - command = "python "+post_proc_to_run + command = "python3 "+post_proc_to_run + condor_arguments_list = [] # A list that contains all the arguments to be passed for each job - Transfer_Input_Files = ("keep_and_drop.txt") # FIXME: Generalise this. - # Transfer_Input_Files = ("Cert_271036-284044_13TeV_PromptReco_Collisions16_JSON.txt, " + - # "Cert_294927-306462_13TeV_PromptReco_Collisions17_JSON.txt, " + - # "Cert_314472-325175_13TeV_PromptReco_Collisions18_JSON.txt, " + - # "keep_and_drop_data.txt") + outjdl_file = open(condor_file_name+".jdl","w") + condor_queue = "espresso" if args.debug else condor_queue + outjdl_file.write(f"""+JobFlavour = "{condor_queue}" +Executable = {condor_file_name}.sh +Universe = vanilla +Notification = ERROR +Should_Transfer_Files = NO +x509userproxy = $ENV(X509_USER_PROXY) +Output = {output_log_path}/$(logtxt)_$(Process).stdout +Error = {output_log_path}/$(logtxt)_$(Process).err +Log = {output_log_path}/$(logtxt)_$(Process).log +Arguments = "$(infile) $(outfile) $(eospath) $(outfilename)" +queue infile, outfile, eospath, outfilename, logtxt from {condor_file_name}.txt +""") + outjdl_file.close() - #with open('input_data_Files/sample_list_v6_2017_campaign.dat') as in_file: - with open('input_data_Files/'+InputFileFromWhereReadDASNames) as in_file: - outjdl_file = open(condor_file_name+".jdl","w") - outjdl_file.write("+JobFlavour = \""+condor_queue+"\"\n") - outjdl_file.write("Executable = "+condor_file_name+".sh\n") - outjdl_file.write("Universe = vanilla\n") - outjdl_file.write("Notification = ERROR\n") - outjdl_file.write("Should_Transfer_Files = YES\n") - outjdl_file.write("WhenToTransferOutput = ON_EXIT\n") - outjdl_file.write("Transfer_Input_Files = "+Transfer_Input_Files + ", " + post_proc_to_run+"\n") - outjdl_file.write("x509userproxy = $ENV(X509_USER_PROXY)\n") - outjdl_file.write("requirements = TARGET.OpSysAndVer =?= \"AlmaLinux9\"\n") - outjdl_file.write("MY.WantOS = \"el7\"\n") + with open(InputFileFromWhereReadDASNames) as in_file: count = 0 count_jobs = 0 output_string_list = [] for SampleDASName in in_file: if SampleDASName[0] == "#": continue count = count +1 - #if count > 1: break + # if count > 1: break print(style.RED +"="*51+style.RESET+"\n") print ("==> Sample : ",count) sample_name = SampleDASName.split('/')[1] @@ -133,8 +132,8 @@ def main(args): # print "==> output_path = ",output_path ######################################## - #print 'dasgoclient --query="file dataset='+SampleDASName.strip()+'"' - #print "..." + # print 'dasgoclient --query="file dataset='+SampleDASName.strip()+'"' + # print "..." if use_custom_eos: xrd_redirector = 'root://cms-xrd-global.cern.ch/' output = os.popen(use_custom_eos_cmd + SampleDASName.strip()).read() @@ -144,14 +143,18 @@ def main(args): count_root_files = 0 for root_file in output.split(): - #print "=> ",root_file + # print "=> ",root_file count_root_files+=1 count_jobs += 1 - outjdl_file.write("Output = "+output_log_path+"/"+sample_name+"_$(Process).stdout\n") - outjdl_file.write("Error = "+output_log_path+"/"+sample_name+"_$(Process).err\n") - outjdl_file.write("Log = "+output_log_path+"/"+sample_name+"_$(Process).log\n") - outjdl_file.write("Arguments = "+(xrd_redirector+root_file)+" "+output_path+" "+EOS_Output_path+ " " + (root_file.split('/')[-1]).split('.')[0] + "\n") - outjdl_file.write("Queue \n") + condor_arguments_list.append( + ( + xrd_redirector + root_file, + output_path, + EOS_Output_path, + (root_file.split("/")[-1]).split(".")[0], + output_path.split("/")[-2], # This argument is used for the log file name + ) + ) if args.debug: # break the for loop after 1 iteration to submit only 1 job break @@ -160,56 +163,65 @@ def main(args): break print("Number of files: ",count_root_files) print("Number of jobs (till now): ",count_jobs) - outjdl_file.close(); + # Write all condor jobs arguments from list to a file with same name as condor_file_name but with .txt extension + with open(condor_file_name+".txt", "w") as f: + for item in condor_arguments_list: + f.write("{}\n".format(",".join(item))) - outScript = open(condor_file_name+".sh","w"); - outScript.write('#!/bin/bash'); - outScript.write("\n"+'echo "Starting job on " `date`'); - outScript.write("\n"+'echo "Running on: `uname -a`"'); - outScript.write("\n"+'echo "System software: `cat /etc/redhat-release`"'); - outScript.write("\n"+'source /cvmfs/cms.cern.ch/cmsset_default.sh'); - outScript.write("\n"+'echo "====> List input arguments : " '); - outScript.write("\n"+'echo "1. nanoAOD ROOT file: ${1}"'); - outScript.write("\n"+'echo "2. EOS path to store output root file: ${2}"'); - outScript.write("\n"+'echo "3. EOS path from where we copy CMSSW: ${3}"'); - outScript.write("\n"+'echo "4. Output root file name: ${4}"'); - outScript.write("\n"+'echo "========================================="'); - outScript.write("\n"+'echo "copy cmssw tar file from store area"'); - outScript.write("\n"+'xrdcp -f root://eosuser.cern.ch/${3}/'+CMSSWRel +'.tgz .'); - outScript.write("\n"+'tar -xf '+ CMSSWRel +'.tgz' ); - outScript.write("\n"+'rm '+ CMSSWRel +'.tgz' ); - outScript.write("\n"+'cd ' + CMSSWRel + '/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/'+TOP_LEVEL_DIR_NAME+'/' ); - outScript.write("\n"+'rm *.root'); - outScript.write("\n"+'scramv1 b ProjectRename'); - outScript.write("\n"+'eval `scram runtime -sh`'); - outScript.write("\n"+'echo "========================================="'); - outScript.write("\n"+'echo "cat post_proc.py"'); - outScript.write("\n"+'echo "..."'); - outScript.write("\n"+'cat post_proc.py'); - outScript.write("\n"+'echo "..."'); - outScript.write("\n"+'echo "========================================="'); - if args.NOsyst: - outScript.write("\n"+command + " --entriesToRun 0 --inputFile ${1} --outputFile ${4}_hadd.root --DownloadFileToLocalThenRun True --NOsyst"); - else: - outScript.write("\n"+command + " --entriesToRun 0 --inputFile ${1} --outputFile ${4}_hadd.root --DownloadFileToLocalThenRun True"); - outScript.write("\n"+'echo "====> List root files : " '); - outScript.write("\n"+'ls -ltrh *.root'); - outScript.write("\n"+'ls -ltrh *.json'); - outScript.write("\n"+'echo "====> copying *.root file to stores area..." '); - outScript.write("\n"+'if ls ${4}_hadd.root 1> /dev/null 2>&1; then'); - outScript.write("\n"+' echo "File ${4}_hadd.root exists. Copy this."'); - outScript.write("\n"+' echo "xrdcp -f ${4}_hadd.root root://eosuser.cern.ch/${2}/${4}_Skim.root"'); - outScript.write("\n"+' xrdcp -f ${4}_hadd.root root://eosuser.cern.ch/${2}/${4}_Skim.root'); - # outScript.write("\n"+' echo "xrdcp -f ${4}.json root://eosuser.cern.ch/${2}/cutFlow_${4}.json"'); - outScript.write("\n"+'else'); - outScript.write("\n"+' echo "Something wrong: file ${4}_hadd.root does not exists, please check the post_proc.py script."'); - outScript.write("\n"+'fi'); - outScript.write("\n"+'rm *.root'); - outScript.write("\n"+'cd ${_CONDOR_SCRATCH_DIR}'); - outScript.write("\n"+'rm -rf ' + CMSSWRel); - outScript.write("\n"); - outScript.close(); + # Create the executable file for condor jobs + outScript = open(condor_file_name + ".sh", "w") + # Variables for the outScript + entries = 100 if args.debug else 0 + syst_flag="--WithSyst" if args.WithSyst else "" + + outScript.write(f"""#!/bin/bash +echo "Starting job on " `date` +echo "Running on: `uname -a`" +echo "System software: `cat /etc/redhat-release`" +source /cvmfs/cms.cern.ch/cmsset_default.sh +echo "====> List input arguments : " +echo "1. nanoAOD ROOT file: ${{1}}" +echo "2. EOS path to store output root file: ${{2}}" +echo "3. EOS path from where we copy CMSSW: ${{3}}" +echo "4. Output root file name: ${{4}}" +echo "=========================================" +echo "copy cmssw tar file from store area" +xrdcp -f root://eosuser.cern.ch/${{3}}/{CMSSWRel}.tgz . +tar -xf {CMSSWRel}.tgz +rm {CMSSWRel}.tgz +cd {CMSSWRel}/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/{TOP_LEVEL_DIR_NAME}/ +rm *.root +scramv1 b ProjectRename +eval `scram runtime -sh` +echo "=========================================" +echo "cat post_proc.py" +echo "..." +cat post_proc.py +echo "..." +echo "=========================================" +output_file=${{4}}_hadd.root +export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CMSSW_BASE/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/JHUGenMELA/MELA/data/el9_amd64_gcc12 +eval `external/JHUGenMELA/MELA/setup.sh env` +{command} --entriesToRun {entries} --inputFile ${{1}} --outputFile ${{output_file}} --DownloadFileToLocalThenRun True {syst_flag} +echo "====> List root files : " +ls -ltrh *.root +ls -ltrh *.json +echo "====> copying *.root file to stores area..." +if ls ${{output_file}} 1> /dev/null 2>&1; then + echo "File ${{output_file}} exists. Copy this." + echo "xrdcp -f ${{output_file}} root://eosuser.cern.ch/${{2}}/${{4}}_Skim.root" + xrdcp -f ${{output_file}} root://eosuser.cern.ch/${{2}}/${{4}}_Skim.root + echo "xrdcp -f ${{4}}.json root://eosuser.cern.ch/${{2}}/cutFlow_${{4}}.json" + xrdcp -f ${{4}}.json root://eosuser.cern.ch/${{2}}/cutFlow_${{4}}.json +else + echo "Something wrong: file ${{output_file}} does not exists, please check the post_proc.py script." +fi +rm *.root +cd ${{_CONDOR_SCRATCH_DIR}} +rm -rf {CMSSWRel} +""") + outScript.close() os.system("chmod 777 "+condor_file_name+".sh"); print("\n#===> Set Proxy Using:") @@ -219,7 +231,7 @@ def main(args): print("export X509_USER_PROXY=~/x509up_u48539") print("\n#Submit jobs:") print("condor_submit "+condor_file_name+".jdl") - #os.system("condor_submit "+condor_file_name+".jdl") + # os.system("condor_submit "+condor_file_name+".jdl") # Below patch is to format the help command as it is class PreserveWhitespaceFormatter(argparse.RawTextHelpFormatter, argparse.ArgumentDefaultsHelpFormatter): @@ -235,7 +247,7 @@ class PreserveWhitespaceFormatter(argparse.RawTextHelpFormatter, argparse.Argume parser.add_argument("--eos_output_path", default='', help="EOS path for output files. By default it is `/eos/user///nanoAOD_ntuples`") parser.add_argument("--condor_log_path", default='./', help="Path where condor log should be saved. By default is the current working directory") parser.add_argument("--condor_file_name", default='submit_condor_jobs', help="Name for the condor file.") - parser.add_argument("--condor_queue", default="testmatch", help=""" + parser.add_argument("--condor_queue", default="tomorrow", help=""" Condor queue options: (Reference: https://twiki.cern.ch/twiki/bin/view/ABPComputing/LxbatchHTCondor#Queue_Flavours) name Duration @@ -251,9 +263,9 @@ class PreserveWhitespaceFormatter(argparse.RawTextHelpFormatter, argparse.Argume parser.add_argument("--post_proc", default="post_proc.py", help="Post process script to run.") parser.add_argument("--transfer_input_files", default="keep_and_drop.txt", help="Files to be transferred as input.") - parser.add_argument("--NOsyst", default=False, action='store_true', help="Run without systematics.") + parser.add_argument("--WithSyst", default=False, action='store_true', help="Run without systematics.") parser.add_argument("--debug", default=False, action='store_true', help="Debug mode.") args = parser.parse_args() main(args) -#condor_setup_lxplus.py +# condor_setup_lxplus.py diff --git a/input_data_Files/sample_list_v12_2022.dat b/input_data_Files/sample_list_v12_2022.dat index a24f8ba..28fd370 100644 --- a/input_data_Files/sample_list_v12_2022.dat +++ b/input_data_Files/sample_list_v12_2022.dat @@ -1,18 +1,18 @@ ### MC Signal -/SingleMuon/Run2022C-22Sep2023-v1/NANOAOD -/DoubleMuon/Run2022C-22Sep2023-v1/NANOAOD -/Muon/Run2022C-22Sep2023-v1/NANOAOD -/Muon/Run2022D-22Sep2023-v1/NANOAOD +#/SingleMuon/Run2022C-22Sep2023-v1/NANOAOD +#/DoubleMuon/Run2022C-22Sep2023-v1/NANOAOD +#/Muon/Run2022C-22Sep2023-v1/NANOAOD +#/Muon/Run2022D-22Sep2023-v1/NANOAOD /Muon/Run2022E-22Sep2023-v1/NANOAOD /Muon/Run2022F-22Sep2023-v2/NANOAOD /Muon/Run2022G-22Sep2023-v1/NANOAOD -/MuonEG/Run2022C-22Sep2023-v1/NANOAOD -/MuonEG/Run2022D-22Sep2023-v1/NANOAOD +#/MuonEG/Run2022C-22Sep2023-v1/NANOAOD +#/MuonEG/Run2022D-22Sep2023-v1/NANOAOD /MuonEG/Run2022E-22Sep2023-v1/NANOAOD /MuonEG/Run2022F-22Sep2023-v1/NANOAOD /MuonEG/Run2022G-22Sep2023-v1/NANOAOD -/EGamma/Run2022C-22Sep2023-v1/NANOAOD -/EGamma/Run2022D-22Sep2023-v1/NANOAOD +#/EGamma/Run2022C-22Sep2023-v1/NANOAOD +#/EGamma/Run2022D-22Sep2023-v1/NANOAOD /EGamma/Run2022E-22Sep2023-v1/NANOAOD /EGamma/Run2022F-22Sep2023-v1/NANOAOD /EGamma/Run2022G-22Sep2023-v2/NANOAOD diff --git a/interface/H4LTools.h b/interface/H4LTools.h index c658da9..0e43ab1 100644 --- a/interface/H4LTools.h +++ b/interface/H4LTools.h @@ -136,7 +136,7 @@ class H4LTools { void SetGenParts(float GenPart_pt_, int GenPart_genPartIdxMother_, int GenPart_pdgId_){ GenPart_pt.push_back(GenPart_pt_); GenPart_genPartIdxMother.push_back(GenPart_genPartIdxMother_); - GenPart_pdgId.push_back(GenPart_genPartIdxMother_); + GenPart_pdgId.push_back(GenPart_pdgId_); } void SetObjectNum(unsigned nElectron_,unsigned nMuon_,unsigned nJet_,unsigned nFsrPhoton_){ nElectron = nElectron_; @@ -305,7 +305,7 @@ class H4LTools { Jet_pt.clear();Jet_phi.clear();Jet_eta.clear();Jet_mass.clear();Jet_btagDeepC.clear(); Jet_jetId.clear();Jet_puId.clear(); FsrPhoton_dROverEt2.clear();FsrPhoton_phi.clear();FsrPhoton_eta.clear();FsrPhoton_pt.clear();FsrPhoton_relIso03.clear(); - GenPart_pt.clear(); + GenPart_pt.clear();GenPart_pdgId.clear();GenPart_genPartIdxMother.clear(); Z_pt.clear();Z_eta.clear();Z_phi.clear(); Z_mass.clear();Z_noFSR_pt.clear(); Z_noFSR_eta.clear(); Z_noFSR_phi.clear(); Z_noFSR_mass.clear(); Z_lepindex1.clear(); Z_lepindex2.clear(); Zlist.clear(); @@ -450,7 +450,7 @@ H4LTools::H4LTools(int year, bool isMC_){ int H4LTools::motherID(int Genidx){ int ID=0; - while(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]]!=2212 || abs(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]])!=21 || abs(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]])>6){ + while(abs(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]])>6){ if(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]]!=GenPart_pdgId[Genidx]){ ID=GenPart_pdgId[GenPart_genPartIdxMother[Genidx]]; return ID; } @@ -458,12 +458,12 @@ int H4LTools::motherID(int Genidx){ Genidx=GenPart_genPartIdxMother[Genidx]; } } - return 2212; + return 0; } int H4LTools::motheridx(int Genidx){ int ID=0; - while(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]]!=2212 || abs(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]])!=21 || abs(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]])>6){ + while(abs(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]])>6){ if(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]]!=GenPart_pdgId[Genidx]){ ID=GenPart_genPartIdxMother[Genidx]; return ID; } diff --git a/scripts/utils/color_style.py b/scripts/utils/color_style.py new file mode 100644 index 0000000..dd1103e --- /dev/null +++ b/scripts/utils/color_style.py @@ -0,0 +1,14 @@ +import os +# Group of Different functions for different styles +os.system("") +class style(): + BLACK = '\033[30m' + RED = '\033[31m' + GREEN = '\033[32m' + YELLOW = '\033[33m' + BLUE = '\033[34m' + MAGENTA = '\033[35m' + CYAN = '\033[36m' + WHITE = '\033[37m' + UNDERLINE = '\033[4m' + RESET = '\033[0m' diff --git a/scripts/utils/condorJobHelper.py b/scripts/utils/condorJobHelper.py new file mode 100644 index 0000000..8fc5282 --- /dev/null +++ b/scripts/utils/condorJobHelper.py @@ -0,0 +1,71 @@ +import os +import sys + +class condorJobHelper(object): + """docstring for condorJobHelper""" + def __init__(self, fileName="test", + listOfFilesToTransfer="", + request_memory=0, + request_cpus=0, + logFilePath = 'test', + logFileName = 'test', + Arguments="", + JobFlavour="longlunch", + Queue=1): + self.fileName = fileName + self.listOfFilesToTransfer = listOfFilesToTransfer + self.request_memory = request_memory + self.request_cpus = request_cpus + self.logFilePath = logFilePath + self.logFileName = logFileName + self.Arguments = Arguments + self.JobFlavour = JobFlavour + self.Queue = Queue + + def jdlFileHeaderCreater(self): + outJdl = open(self.fileName+'.jdl','w') + outJdl.write('Executable = '+self.fileName+'.sh') + outJdl.write('\n'+'Universe = vanilla') + outJdl.write('\n'+'Notification = ERROR') + outJdl.write('\n'+'Should_Transfer_Files = YES') + outJdl.write('\n'+'WhenToTransferOutput = ON_EXIT') + outJdl.write('\n'+'Transfer_Input_Files = '+self.fileName+'.sh, ' + self.listOfFilesToTransfer) + outJdl.write('\n'+'x509userproxy = $ENV(X509_USER_PROXY)') + outJdl.write('\n'+'+JobFlavour = "'+self.JobFlavour+'"') + if self.request_memory != 0: outJdl.write('\n'+'request_memory = '+str(self.request_memory)) + if self.request_cpus != 0: outJdl.write('\n'+'request_cpus = '+ str(self.request_cpus)) + return self.fileName+'.jdl' + + def jdlFileAppendLogInfo(self): + outJdl = open(self.fileName+'.jdl','a') + outJdl.write('\n'+'Output = '+self.logFilePath+os.sep+self.logFileName+'_$(Cluster)_$(Process).stdout') + outJdl.write('\n'+'Error = '+self.logFilePath+os.sep+self.logFileName+'_$(Cluster)_$(Process).stdout') + outJdl.write('\n'+'Log = '+self.logFilePath+os.sep+self.logFileName+'_$(Cluster)_$(Process).log') + outJdl.write('\n'+'Arguments = $(Cluster) $(Process) '+self.Arguments) + outJdl.write('\n'+'Queue '+str(self.Queue)) + outJdl.close() + + def shFileHeaderCreater(self): + outScript = open(self.fileName+".sh","w"); + outScript.write('#!/bin/bash') + outScript.write('\n'+'echo "Starting job on " `date`') + outScript.write('\n'+'echo "Running on: `uname -a`"') + outScript.write('\n'+'echo "System software: `cat /etc/redhat-release`"') + outScript.write('\n'+'source /cvmfs/cms.cern.ch/cmsset_default.sh') + outScript.write('\n'+'echo "'+'#'*51+'"') + outScript.write('\n'+'echo "# List of Input Arguments: "') + outScript.write('\n'+'echo "'+'#'*51+'"') + outScript.write('\n'+'echo "Input Arguments (CluserID): $1" ') + outScript.write('\n'+'echo "Input Arguments (ProcessID): $2" ') + for x in xrange(3,len(self.Arguments)+3): + outScript.write('\n'+'echo "Input Arguments: $'+x+'" ') + outScript.write('\n'+'echo "'+'#'*51+'"') + outScript.write('\n'+'') + outScript.close() + return self.fileName+'.sh' + + def jdlAndShFileCreater(self): + jdlFile = self.jdlFileHeaderCreater() + jdlFile = self.jdlFileAppendLogInfo() + shFile = self.shFileCreater() + return jdlFile, shFile diff --git a/scripts/utils/fileshelper.py b/scripts/utils/fileshelper.py new file mode 100644 index 0000000..d311ee7 --- /dev/null +++ b/scripts/utils/fileshelper.py @@ -0,0 +1,88 @@ +import os +from datetime import datetime + +CURRENT_DATETIME = datetime.now() + +class FileHelper: + """ + A class to assist in creating directories and paths for log and store areas. + """ + def __init__(self, log_path, store_area, if_eos=False): + self.if_eos = if_eos + self.log_path = log_path + self.store_area = store_area + self.dir_name = ( + str(CURRENT_DATETIME.year)[-2:] + + str(format(CURRENT_DATETIME.month, '02d')) + + str(format(CURRENT_DATETIME.day, '02d')) + + "_" + + str(format(CURRENT_DATETIME.hour, '02d')) + + str(format(CURRENT_DATETIME.minute, '02d')) + + str(format(CURRENT_DATETIME.second, '02d')) + ) + self.eos_string = '/eos/uscms' + print("==> Time stamp: {}".format(self.dir_name)) + + def create_log_dir_with_date(self): + """ + Create a directory to store log files. + Returns: + string -- Path of the created directory + """ + log_dir_name = os.path.join(self.log_path, self.dir_name) + if not os.path.exists(log_dir_name): + os.makedirs(log_dir_name) + print("==> Created directory for log files: {}".format(log_dir_name)) + return log_dir_name + + def create_store_area(self, path): + """ + Create a directory in the store area. + Arguments: + path {string} -- Name of the directory with path + Returns: + string -- Path of the created directory in the store area + """ + # Reference:https://cernbox.docs.cern.ch/desktop/other-access-methods/eos_xrootd/ + os.system('eos root://eosuser.cern.ch mkdir -p {path}'.format(path = path)) + print("==> Created directory at eos path: {path}".format(path = path)) + # Add a check to see if the directory was created + print("==> Checking if the directory was created...") + if self.if_eos: + if os.path.exists(self.eos_string + path): + print("==> Directory was created.") + else: + print("==> Directory was not created.") + exit(1) + return path + + def create_store_dir_with_date(self, *additional_strings): + """ + Create the store area directory with a date. + Arguments: + additional_strings {tuple} -- Additional directory names to append + Returns: + string -- Path of the created directory + """ + path = self.create_store_area(self.store_area) + for additional_string in additional_strings: + if additional_string: + path = self.create_store_area(os.path.join(path, additional_string)) + path = self.create_store_area(os.path.join(path, self.dir_name)) + return path + + def create_dir_with_date(self): + """ + Create both log and store area directories. + Returns: + string -- Path of the created log directory + string -- Path of the created store area directory + """ + log_dir_name = os.path.join(self.log_path, self.dir_name) + store_area_dir_name = os.path.join(self.store_area, self.dir_name) + os.makedirs(log_dir_name, exist_ok=True) + self.create_store_area(self.store_area) + self.create_store_area(store_area_dir_name) + print("==> Created directory for log files: {log_dir_name}".format(log_dir_name = log_dir_name)) + print("==> Created directory at eos path: {store_area_dir_name}".format(store_area_dir_name = store_area_dir_name)) + return log_dir_name, store_area_dir_name diff --git a/scripts/utils/infoCreaterGit.py b/scripts/utils/infoCreaterGit.py new file mode 100644 index 0000000..3f9dcd0 --- /dev/null +++ b/scripts/utils/infoCreaterGit.py @@ -0,0 +1,41 @@ +import os + +class BasicInfoCreater: + """ + A class to create Git logs and patches for job submissions. + """ + + GITPATCH = 'gitDiff.patch' + + def __init__(self, log_file_name="summary.dat", summary=""): + self.CMSSWDirPath = os.environ['CMSSW_BASE'] + self.CMSSWRel = self.CMSSWDirPath.split("/")[-1] + self.logFileName = log_file_name + self.summary = summary + + def generate_git_log(self): + git_log = ( + """CMSSW Version used: {}\n + Current directory path: {}\n + Summary for current setup: {}\n""".format(self.CMSSWRel, self.CMSSWDirPath, self.summary) + ) + + with open(self.logFileName, "w") as out_script: + out_script.write(git_log) + out_script.write("\n\n============\n== Latest commit summary \n\n\n") + os.system("git log -1 --pretty=tformat:' Commit: %h %n Date: %ad %n Relative time: %ar %n Commit Message: %s' >> {}".format(self.logFileName)) + out_script.write("\n\n============\n\n") + os.system("git log -1 --format='%H' >> {}".format(self.logFileName)) + + def generate_git_patch(self): + os.system('git diff > {}'.format(self.GITPATCH)) + + def generate_git_patch_and_log(self): + self.generate_git_patch() + self.generate_git_log() + + def send_git_log_and_patch_to_eos(self, output_folder): + print("Copying {} to path: {}".format(self.logFileName, output_folder)) + os.system('cp -f {} {}/{}'.format(self.logFileName, output_folder, self.logFileName)) + print("Copying {} to path: {}".format(self.GITPATCH, output_folder)) + os.system('cp -f {} {}/{}'.format(self.GITPATCH, output_folder, self.GITPATCH)) diff --git a/scripts/utils/makeTarFile.py b/scripts/utils/makeTarFile.py new file mode 100644 index 0000000..1a9ad46 --- /dev/null +++ b/scripts/utils/makeTarFile.py @@ -0,0 +1,34 @@ +import tarfile +import os + +EXCLUDE_FILES = [".tmp", ".log", ".stdout", ".stderr"] + +def filter_function(tarinfo): + """ + Helper function for creating the tarball. + + This function filters out unwanted files to be added to the tarball. + + Arguments: + tarinfo {TarInfo} -- TarInfo object + + Returns: + bool -- True if the file should be included, False otherwise. + """ + if os.path.splitext(tarinfo.name)[1] in EXCLUDE_FILES: + return None + else: + return tarinfo + +def make_tarfile(source_dir, output_filename): + """ + Create a tarball from a given directory. + + Arguments: + source_dir {string} -- Name of the directory to be tarballed. + output_filename {string} -- Output file name for the tarball. + """ + with tarfile.open(output_filename, "w:gz") as tar: + print("make_tarfile:: Started creating tar file...") + tar.add(source_dir, arcname=os.path.basename(source_dir), filter=filter_function) + print("make_tarfile:: Done...") From ed581164431aa6ff26b9a0e601a48ea9efb1a455 Mon Sep 17 00:00:00 2001 From: yuji Date: Tue, 21 Oct 2025 14:57:49 +0200 Subject: [PATCH 5/8] yaml version changed --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 280a21c..e192425 100644 --- a/README.md +++ b/README.md @@ -28,6 +28,7 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. # External package: yaml-cpp git clone git@github.com:jbeder/yaml-cpp.git external/yaml-cpp + git checkout 28f93bd cd external/yaml-cpp/ git apply ../yamlcpp_pkg_py2to3.patch mkdir build @@ -55,7 +56,7 @@ nanoAOD skiming code for H->ZZ->2l2Q studies. ```bash cd $CMSSW_BASE/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim - export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/afs/cern.ch/work/r/rasharma/h2l2nu/checkNewSetup_15July2024/CMSSW_14_0_2/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/JHUGenMELA/MELA/data/el9_amd64_gcc12 + export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CMSSW_BASE/src/PhysicsTools/NanoAODTools/python/postprocessing/analysis/nanoAOD_skim/JHUGenMELA/MELA/data/el9_amd64_gcc12 # NOTE: The above export command is needed to run just before running the post_proc.py script. Otherwise, it will give error. voms-proxy-init --voms cms --valid 168:00 python3 post_proc.py From fff29a9613f068d02fb818f009d407f2eb7e1ea9 Mon Sep 17 00:00:00 2001 From: yuji Date: Wed, 29 Oct 2025 11:40:25 +0100 Subject: [PATCH 6/8] genMatching fixed --- H4LCppModule.py | 9 ++++++++- interface/H4LTools.h | 14 +++++++++----- src/H4LTools.cc | 36 +++++++++++++++++++++++++++--------- 3 files changed, 44 insertions(+), 15 deletions(-) diff --git a/H4LCppModule.py b/H4LCppModule.py index ef1f120..c34db24 100644 --- a/H4LCppModule.py +++ b/H4LCppModule.py @@ -334,7 +334,7 @@ def analyze(self, event): genparts = Collection(event, "GenPart") genjets = Collection(event, "GenJet") for xg in genparts: - self.worker.SetGenParts(xg.pt, xg.genPartIdxMother, xg.pdgId) + self.worker.SetGenParts(xg.pt,xg.eta,xg.phi,xg.mass,xg.status,xg.genPartIdxMother, xg.pdgId) for xm in muons: self.worker.SetMuonsGen(xm.genPartIdx) for xe in electrons: @@ -446,6 +446,13 @@ def analyze(self, event): if (foundZZCandidate): self.passZZEvts += 1 + if (passedZ1LSelection): + if (abs(lep_id[2])==13 and lep_matchedR03_PdgId[2]==23): + print(lep_matchedR03_PdgId[2], lep_matchedR03_MomId[2], lep_matchedR03_MomMomId[2]) + if (abs(lep_id[2])==13 and lep_matchedR03_MomId[2]==23): + print(lep_matchedR03_PdgId[2], lep_matchedR03_MomId[2], lep_matchedR03_MomMomId[2]) + if (abs(lep_id[2])==13 and lep_matchedR03_MomMomId[2]==23): + print(lep_matchedR03_PdgId[2], lep_matchedR03_MomId[2], lep_matchedR03_MomMomId[2]) pTZ1 = self.worker.Z1.Pt() etaZ1 = self.worker.Z1.Eta() diff --git a/interface/H4LTools.h b/interface/H4LTools.h index 0e43ab1..dfadb76 100644 --- a/interface/H4LTools.h +++ b/interface/H4LTools.h @@ -133,8 +133,12 @@ class H4LTools { FsrPhoton_electronIdx.push_back(FsrPhoton_electronIdx_); FsrPhoton_muonIdx.push_back(FsrPhoton_muonIdx_); } - void SetGenParts(float GenPart_pt_, int GenPart_genPartIdxMother_, int GenPart_pdgId_){ + void SetGenParts(float GenPart_pt_, float GenPart_eta_,float GenPart_phi_,float GenPart_mass_,int GenPart_status_,int GenPart_genPartIdxMother_, int GenPart_pdgId_){ GenPart_pt.push_back(GenPart_pt_); + GenPart_eta.push_back(GenPart_eta_); + GenPart_phi.push_back(GenPart_phi_); + GenPart_mass.push_back(GenPart_mass_); + GenPart_status.push_back(GenPart_status_); GenPart_genPartIdxMother.push_back(GenPart_genPartIdxMother_); GenPart_pdgId.push_back(GenPart_pdgId_); } @@ -225,7 +229,7 @@ class H4LTools { bool flag4e; bool flag4mu; bool flag2e2mu; - + int GetLepGenMatchedID(TLorentzVector Lep); void LeptonSelection(); void findZ1LCandidate(); std::vector looseEle,looseMu,bestEle,bestMu, tighteleforjetidx, tightmuforjetidx; @@ -305,7 +309,7 @@ class H4LTools { Jet_pt.clear();Jet_phi.clear();Jet_eta.clear();Jet_mass.clear();Jet_btagDeepC.clear(); Jet_jetId.clear();Jet_puId.clear(); FsrPhoton_dROverEt2.clear();FsrPhoton_phi.clear();FsrPhoton_eta.clear();FsrPhoton_pt.clear();FsrPhoton_relIso03.clear(); - GenPart_pt.clear();GenPart_pdgId.clear();GenPart_genPartIdxMother.clear(); + GenPart_pt.clear();GenPart_eta.clear();GenPart_phi.clear();GenPart_mass.clear();GenPart_status.clear();GenPart_pdgId.clear();GenPart_genPartIdxMother.clear(); Z_pt.clear();Z_eta.clear();Z_phi.clear(); Z_mass.clear();Z_noFSR_pt.clear(); Z_noFSR_eta.clear(); Z_noFSR_phi.clear(); Z_noFSR_mass.clear(); Z_lepindex1.clear(); Z_lepindex2.clear(); Zlist.clear(); @@ -394,8 +398,8 @@ class H4LTools { std::vector FsrPhoton_dROverEt2,FsrPhoton_phi,FsrPhoton_pt,FsrPhoton_relIso03,FsrPhoton_eta,FsrPhoton_muonIdx,FsrPhoton_electronIdx; - std::vector GenPart_pt; - std::vector GenPart_pdgId, GenPart_genPartIdxMother; + std::vector GenPart_pt, GenPart_eta, GenPart_phi, GenPart_mass; + std::vector GenPart_pdgId,GenPart_status, GenPart_genPartIdxMother; unsigned nElectron,nMuon,nJet,nGenPart,nFsrPhoton; diff --git a/src/H4LTools.cc b/src/H4LTools.cc index 6078061..7eceff4 100644 --- a/src/H4LTools.cc +++ b/src/H4LTools.cc @@ -361,6 +361,23 @@ std::vector H4LTools::MuonFsrPhi(){ } return lepPhi; }*/ +int H4LTools::GetLepGenMatchedID(TLorentzVector Lep){ + int matchidx = -1; + float minDistance = 99; + for(int k=0; k0.3) continue; + if (distance Date: Wed, 29 Oct 2025 11:41:23 +0100 Subject: [PATCH 7/8] genMatching fixed --- H4LCppModule.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/H4LCppModule.py b/H4LCppModule.py index c34db24..74bab88 100644 --- a/H4LCppModule.py +++ b/H4LCppModule.py @@ -446,13 +446,6 @@ def analyze(self, event): if (foundZZCandidate): self.passZZEvts += 1 - if (passedZ1LSelection): - if (abs(lep_id[2])==13 and lep_matchedR03_PdgId[2]==23): - print(lep_matchedR03_PdgId[2], lep_matchedR03_MomId[2], lep_matchedR03_MomMomId[2]) - if (abs(lep_id[2])==13 and lep_matchedR03_MomId[2]==23): - print(lep_matchedR03_PdgId[2], lep_matchedR03_MomId[2], lep_matchedR03_MomMomId[2]) - if (abs(lep_id[2])==13 and lep_matchedR03_MomMomId[2]==23): - print(lep_matchedR03_PdgId[2], lep_matchedR03_MomId[2], lep_matchedR03_MomMomId[2]) pTZ1 = self.worker.Z1.Pt() etaZ1 = self.worker.Z1.Eta() From f8679b518a39ca0e03b0bcb6d4494689217dc141 Mon Sep 17 00:00:00 2001 From: yuji Date: Wed, 29 Oct 2025 19:04:39 +0100 Subject: [PATCH 8/8] genMatching fixed --- interface/H4LTools.h | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/interface/H4LTools.h b/interface/H4LTools.h index dfadb76..df769fb 100644 --- a/interface/H4LTools.h +++ b/interface/H4LTools.h @@ -453,6 +453,7 @@ H4LTools::H4LTools(int year, bool isMC_){ } int H4LTools::motherID(int Genidx){ + if (Genidx==-1) return 0; int ID=0; while(abs(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]])>6){ if(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]]!=GenPart_pdgId[Genidx]){ @@ -466,6 +467,7 @@ int H4LTools::motherID(int Genidx){ } int H4LTools::motheridx(int Genidx){ + if (Genidx==-1) return -1; int ID=0; while(abs(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]])>6){ if(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]]!=GenPart_pdgId[Genidx]){ @@ -475,9 +477,10 @@ int H4LTools::motheridx(int Genidx){ Genidx=GenPart_genPartIdxMother[Genidx]; } } - return 0; + return -1; } int H4LTools::mothermotherID(int Genidx){ + if (Genidx==-1) return 0; int ID=0; while(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]]!=2212 || abs(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]])!=21 || abs(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]])>6){ if(GenPart_pdgId[GenPart_genPartIdxMother[Genidx]]!=GenPart_pdgId[Genidx] && GenPart_pdgId[GenPart_genPartIdxMother[GenPart_genPartIdxMother[Genidx]]]!=GenPart_pdgId[Genidx] && GenPart_pdgId[GenPart_genPartIdxMother[GenPart_genPartIdxMother[Genidx]]]!=GenPart_pdgId[GenPart_genPartIdxMother[Genidx]] ){