diff --git a/Templates/CombineCards/VGen/Customize/PtV_template.txt b/Templates/CombineCards/VGen/Customize/PtV_template.txt new file mode 100644 index 00000000..a30c559f --- /dev/null +++ b/Templates/CombineCards/VGen/Customize/PtV_template.txt @@ -0,0 +1,30 @@ +ptV0to3_QCDscale_muR_${w_sample} shape 1 +ptV3to5_QCDscale_muR_${w_sample} shape 1 +ptV5to7_QCDscale_muR_${w_sample} shape 1 +ptV7to9_QCDscale_muR_${w_sample} shape 1 +ptV9to12_QCDscale_muR_${w_sample} shape 1 +ptV12to15_QCDscale_muR_${w_sample} shape 1 +ptV15to20_QCDscale_muR_${w_sample} shape 1 +ptV20to27_QCDscale_muR_${w_sample} shape 1 +ptV27to40_QCDscale_muR_${w_sample} shape 1 +ptV40toInf_QCDscale_muR_${w_sample} shape 1 +ptV0to3_QCDscale_muF_${w_sample} shape 1 +ptV3to5_QCDscale_muF_${w_sample} shape 1 +ptV5to7_QCDscale_muF_${w_sample} shape 1 +ptV7to9_QCDscale_muF_${w_sample} shape 1 +ptV9to12_QCDscale_muF_${w_sample} shape 1 +ptV12to15_QCDscale_muF_${w_sample} shape 1 +ptV15to20_QCDscale_muF_${w_sample} shape 1 +ptV20to27_QCDscale_muF_${w_sample} shape 1 +ptV27to40_QCDscale_muF_${w_sample} shape 1 +ptV40toInf_QCDscale_muF_${w_sample} shape 1 +ptV0to3_QCDscale_muRmuF_${w_sample} shape 1 +ptV3to5_QCDscale_muRmuF_${w_sample} shape 1 +ptV5to7_QCDscale_muRmuF_${w_sample} shape 1 +ptV7to9_QCDscale_muRmuF_${w_sample} shape 1 +ptV9to12_QCDscale_muRmuF_${w_sample} shape 1 +ptV12to15_QCDscale_muRmuF_${w_sample} shape 1 +ptV15to20_QCDscale_muRmuF_${w_sample} shape 1 +ptV20to27_QCDscale_muRmuF_${w_sample} shape 1 +ptV27to40_QCDscale_muRmuF_${w_sample} shape 1 +ptV40toInf_QCDscale_muRmuF_${w_sample} shape 1 diff --git a/Templates/CombineCards/VGen/Customize/muscale_template.txt b/Templates/CombineCards/VGen/Customize/muscale_template.txt new file mode 100644 index 00000000..80f055b8 --- /dev/null +++ b/Templates/CombineCards/VGen/Customize/muscale_template.txt @@ -0,0 +1 @@ +CMS_scale_m shape 1 diff --git a/Templates/CombineCards/VGen/Customize/pdfHessian_template.txt b/Templates/CombineCards/VGen/Customize/pdfHessian_template.txt new file mode 100644 index 00000000..7a069fb2 --- /dev/null +++ b/Templates/CombineCards/VGen/Customize/pdfHessian_template.txt @@ -0,0 +1,100 @@ +pdf1 shape 1 +pdf2 shape 1 +pdf3 shape 1 +pdf4 shape 1 +pdf5 shape 1 +pdf6 shape 1 +pdf7 shape 1 +pdf8 shape 1 +pdf9 shape 1 +pdf10 shape 1 +pdf11 shape 1 +pdf12 shape 1 +pdf13 shape 1 +pdf14 shape 1 +pdf15 shape 1 +pdf16 shape 1 +pdf17 shape 1 +pdf18 shape 1 +pdf19 shape 1 +pdf20 shape 1 +pdf21 shape 1 +pdf22 shape 1 +pdf23 shape 1 +pdf24 shape 1 +pdf25 shape 1 +pdf26 shape 1 +pdf27 shape 1 +pdf28 shape 1 +pdf29 shape 1 +pdf30 shape 1 +pdf31 shape 1 +pdf32 shape 1 +pdf33 shape 1 +pdf34 shape 1 +pdf35 shape 1 +pdf36 shape 1 +pdf37 shape 1 +pdf38 shape 1 +pdf39 shape 1 +pdf40 shape 1 +pdf41 shape 1 +pdf42 shape 1 +pdf43 shape 1 +pdf44 shape 1 +pdf45 shape 1 +pdf46 shape 1 +pdf47 shape 1 +pdf48 shape 1 +pdf49 shape 1 +pdf50 shape 1 +pdf51 shape 1 +pdf52 shape 1 +pdf53 shape 1 +pdf54 shape 1 +pdf55 shape 1 +pdf56 shape 1 +pdf57 shape 1 +pdf58 shape 1 +pdf59 shape 1 +pdf60 shape 1 +pdf61 shape 1 +pdf62 shape 1 +pdf63 shape 1 +pdf64 shape 1 +pdf65 shape 1 +pdf66 shape 1 +pdf67 shape 1 +pdf68 shape 1 +pdf69 shape 1 +pdf70 shape 1 +pdf71 shape 1 +pdf72 shape 1 +pdf73 shape 1 +pdf74 shape 1 +pdf75 shape 1 +pdf76 shape 1 +pdf77 shape 1 +pdf78 shape 1 +pdf79 shape 1 +pdf80 shape 1 +pdf81 shape 1 +pdf82 shape 1 +pdf83 shape 1 +pdf84 shape 1 +pdf85 shape 1 +pdf86 shape 1 +pdf87 shape 1 +pdf88 shape 1 +pdf89 shape 1 +pdf90 shape 1 +pdf91 shape 1 +pdf92 shape 1 +pdf93 shape 1 +pdf94 shape 1 +pdf95 shape 1 +pdf96 shape 1 +pdf97 shape 1 +pdf98 shape 1 +pdf99 shape 1 +pdf100 shape 1 diff --git a/Templates/CombineCards/VGen/Customize/pdf_template.txt b/Templates/CombineCards/VGen/Customize/pdf_template.txt new file mode 100644 index 00000000..260f0cab --- /dev/null +++ b/Templates/CombineCards/VGen/Customize/pdf_template.txt @@ -0,0 +1 @@ +pdfNNPDF31_${w_sample} shape 1 diff --git a/Templates/CombineCards/VGen/Customize/scale_template.txt b/Templates/CombineCards/VGen/Customize/scale_template.txt new file mode 100644 index 00000000..0e54e493 --- /dev/null +++ b/Templates/CombineCards/VGen/Customize/scale_template.txt @@ -0,0 +1,3 @@ +QCDscale_muR_${w_sample} shape 1 +QCDscale_muF_${w_sample} shape 1 +QCDscale_muRmuF_${w_sample} shape 1 diff --git a/Templates/CombineCards/VGen/WGen_template_mn.txt b/Templates/CombineCards/VGen/WGen_template_mn.txt index 6db80b2e..8d7119fe 100644 --- a/Templates/CombineCards/VGen/WGen_template_mn.txt +++ b/Templates/CombineCards/VGen/WGen_template_mn.txt @@ -16,139 +16,9 @@ process ${w_sample} process 1 rate ${w_yield} ------------ -mWShift100MeV shapeNoConstraint 1 +mWBWShift100MeV shapeNoConstraint 1 lumi2016_13TeV lnN 1.025 CMS_scale_m shape 1 -ptV0to3_QCDscale_muR_${w_sample} shape 1 -ptV3to5_QCDscale_muR_${w_sample} shape 1 -ptV5to7_QCDscale_muR_${w_sample} shape 1 -ptV7to9_QCDscale_muR_${w_sample} shape 1 -ptV9to12_QCDscale_muR_${w_sample} shape 1 -ptV12to15_QCDscale_muR_${w_sample} shape 1 -ptV15to20_QCDscale_muR_${w_sample} shape 1 -ptV20to27_QCDscale_muR_${w_sample} shape 1 -ptV27to40_QCDscale_muR_${w_sample} shape 1 -ptV40toInf_QCDscale_muR_${w_sample} shape 1 -ptV0to3_QCDscale_muF_${w_sample} shape 1 -ptV3to5_QCDscale_muF_${w_sample} shape 1 -ptV5to7_QCDscale_muF_${w_sample} shape 1 -ptV7to9_QCDscale_muF_${w_sample} shape 1 -ptV9to12_QCDscale_muF_${w_sample} shape 1 -ptV12to15_QCDscale_muF_${w_sample} shape 1 -ptV15to20_QCDscale_muF_${w_sample} shape 1 -ptV20to27_QCDscale_muF_${w_sample} shape 1 -ptV27to40_QCDscale_muF_${w_sample} shape 1 -ptV40toInf_QCDscale_muF_${w_sample} shape 1 -ptV0to3_QCDscale_muRmuF_${w_sample} shape 1 -ptV3to5_QCDscale_muRmuF_${w_sample} shape 1 -ptV5to7_QCDscale_muRmuF_${w_sample} shape 1 -ptV7to9_QCDscale_muRmuF_${w_sample} shape 1 -ptV9to12_QCDscale_muRmuF_${w_sample} shape 1 -ptV12to15_QCDscale_muRmuF_${w_sample} shape 1 -ptV15to20_QCDscale_muRmuF_${w_sample} shape 1 -ptV20to27_QCDscale_muRmuF_${w_sample} shape 1 -ptV27to40_QCDscale_muRmuF_${w_sample} shape 1 -ptV40toInf_QCDscale_muRmuF_${w_sample} shape 1 -pdf1 shape 1 -pdf2 shape 1 -pdf3 shape 1 -pdf4 shape 1 -pdf5 shape 1 -pdf6 shape 1 -pdf7 shape 1 -pdf8 shape 1 -pdf9 shape 1 -pdf10 shape 1 -pdf11 shape 1 -pdf12 shape 1 -pdf13 shape 1 -pdf14 shape 1 -pdf15 shape 1 -pdf16 shape 1 -pdf17 shape 1 -pdf18 shape 1 -pdf19 shape 1 -pdf20 shape 1 -pdf21 shape 1 -pdf22 shape 1 -pdf23 shape 1 -pdf24 shape 1 -pdf25 shape 1 -pdf26 shape 1 -pdf27 shape 1 -pdf28 shape 1 -pdf29 shape 1 -pdf30 shape 1 -pdf31 shape 1 -pdf32 shape 1 -pdf33 shape 1 -pdf34 shape 1 -pdf35 shape 1 -pdf36 shape 1 -pdf37 shape 1 -pdf38 shape 1 -pdf39 shape 1 -pdf40 shape 1 -pdf41 shape 1 -pdf42 shape 1 -pdf43 shape 1 -pdf44 shape 1 -pdf45 shape 1 -pdf46 shape 1 -pdf47 shape 1 -pdf48 shape 1 -pdf49 shape 1 -pdf50 shape 1 -pdf51 shape 1 -pdf52 shape 1 -pdf53 shape 1 -pdf54 shape 1 -pdf55 shape 1 -pdf56 shape 1 -pdf57 shape 1 -pdf58 shape 1 -pdf59 shape 1 -pdf60 shape 1 -pdf61 shape 1 -pdf62 shape 1 -pdf63 shape 1 -pdf64 shape 1 -pdf65 shape 1 -pdf66 shape 1 -pdf67 shape 1 -pdf68 shape 1 -pdf69 shape 1 -pdf70 shape 1 -pdf71 shape 1 -pdf72 shape 1 -pdf73 shape 1 -pdf74 shape 1 -pdf75 shape 1 -pdf76 shape 1 -pdf77 shape 1 -pdf78 shape 1 -pdf79 shape 1 -pdf80 shape 1 -pdf81 shape 1 -pdf82 shape 1 -pdf83 shape 1 -pdf84 shape 1 -pdf85 shape 1 -pdf86 shape 1 -pdf87 shape 1 -pdf88 shape 1 -pdf89 shape 1 -pdf90 shape 1 -pdf91 shape 1 -pdf92 shape 1 -pdf93 shape 1 -pdf94 shape 1 -pdf95 shape 1 -pdf96 shape 1 -pdf97 shape 1 -pdf98 shape 1 -pdf99 shape 1 -pdf100 shape 1 #QCDscale_muR_${w_sample} shape 1 #QCDscale_muF_${w_sample} shape 1 #QCDscale_muRmuF_${w_sample} shape 1 diff --git a/Templates/CombineCards/VGen/WGen_template_mp.txt b/Templates/CombineCards/VGen/WGen_template_mp.txt index 0707b476..d160e138 100644 --- a/Templates/CombineCards/VGen/WGen_template_mp.txt +++ b/Templates/CombineCards/VGen/WGen_template_mp.txt @@ -16,142 +16,6 @@ process ${w_sample} process 1 rate ${w_yield} ------------ -mWShift100MeV shapeNoConstraint 1 +mWBWShift100MeV shapeNoConstraint 1 lumi2016_13TeV lnN 1.025 -CMS_scale_m shape 1 -ptV0to3_QCDscale_muR_${w_sample} shape 1 -ptV3to5_QCDscale_muR_${w_sample} shape 1 -ptV5to7_QCDscale_muR_${w_sample} shape 1 -ptV7to9_QCDscale_muR_${w_sample} shape 1 -ptV9to12_QCDscale_muR_${w_sample} shape 1 -ptV12to15_QCDscale_muR_${w_sample} shape 1 -ptV15to20_QCDscale_muR_${w_sample} shape 1 -ptV20to27_QCDscale_muR_${w_sample} shape 1 -ptV27to40_QCDscale_muR_${w_sample} shape 1 -ptV40toInf_QCDscale_muR_${w_sample} shape 1 -ptV0to3_QCDscale_muF_${w_sample} shape 1 -ptV3to5_QCDscale_muF_${w_sample} shape 1 -ptV5to7_QCDscale_muF_${w_sample} shape 1 -ptV7to9_QCDscale_muF_${w_sample} shape 1 -ptV9to12_QCDscale_muF_${w_sample} shape 1 -ptV12to15_QCDscale_muF_${w_sample} shape 1 -ptV15to20_QCDscale_muF_${w_sample} shape 1 -ptV20to27_QCDscale_muF_${w_sample} shape 1 -ptV27to40_QCDscale_muF_${w_sample} shape 1 -ptV40toInf_QCDscale_muF_${w_sample} shape 1 -ptV0to3_QCDscale_muRmuF_${w_sample} shape 1 -ptV3to5_QCDscale_muRmuF_${w_sample} shape 1 -ptV5to7_QCDscale_muRmuF_${w_sample} shape 1 -ptV7to9_QCDscale_muRmuF_${w_sample} shape 1 -ptV9to12_QCDscale_muRmuF_${w_sample} shape 1 -ptV12to15_QCDscale_muRmuF_${w_sample} shape 1 -ptV15to20_QCDscale_muRmuF_${w_sample} shape 1 -ptV20to27_QCDscale_muRmuF_${w_sample} shape 1 -ptV27to40_QCDscale_muRmuF_${w_sample} shape 1 -ptV40toInf_QCDscale_muRmuF_${w_sample} shape 1 -pdf1 shape 1 -pdf2 shape 1 -pdf3 shape 1 -pdf4 shape 1 -pdf5 shape 1 -pdf6 shape 1 -pdf7 shape 1 -pdf8 shape 1 -pdf9 shape 1 -pdf10 shape 1 -pdf11 shape 1 -pdf12 shape 1 -pdf13 shape 1 -pdf14 shape 1 -pdf15 shape 1 -pdf16 shape 1 -pdf17 shape 1 -pdf18 shape 1 -pdf19 shape 1 -pdf20 shape 1 -pdf21 shape 1 -pdf22 shape 1 -pdf23 shape 1 -pdf24 shape 1 -pdf25 shape 1 -pdf26 shape 1 -pdf27 shape 1 -pdf28 shape 1 -pdf29 shape 1 -pdf30 shape 1 -pdf31 shape 1 -pdf32 shape 1 -pdf33 shape 1 -pdf34 shape 1 -pdf35 shape 1 -pdf36 shape 1 -pdf37 shape 1 -pdf38 shape 1 -pdf39 shape 1 -pdf40 shape 1 -pdf41 shape 1 -pdf42 shape 1 -pdf43 shape 1 -pdf44 shape 1 -pdf45 shape 1 -pdf46 shape 1 -pdf47 shape 1 -pdf48 shape 1 -pdf49 shape 1 -pdf50 shape 1 -pdf51 shape 1 -pdf52 shape 1 -pdf53 shape 1 -pdf54 shape 1 -pdf55 shape 1 -pdf56 shape 1 -pdf57 shape 1 -pdf58 shape 1 -pdf59 shape 1 -pdf60 shape 1 -pdf61 shape 1 -pdf62 shape 1 -pdf63 shape 1 -pdf64 shape 1 -pdf65 shape 1 -pdf66 shape 1 -pdf67 shape 1 -pdf68 shape 1 -pdf69 shape 1 -pdf70 shape 1 -pdf71 shape 1 -pdf72 shape 1 -pdf73 shape 1 -pdf74 shape 1 -pdf75 shape 1 -pdf76 shape 1 -pdf77 shape 1 -pdf78 shape 1 -pdf79 shape 1 -pdf80 shape 1 -pdf81 shape 1 -pdf82 shape 1 -pdf83 shape 1 -pdf84 shape 1 -pdf85 shape 1 -pdf86 shape 1 -pdf87 shape 1 -pdf88 shape 1 -pdf89 shape 1 -pdf90 shape 1 -pdf91 shape 1 -pdf92 shape 1 -pdf93 shape 1 -pdf94 shape 1 -pdf95 shape 1 -pdf96 shape 1 -pdf97 shape 1 -pdf98 shape 1 -pdf99 shape 1 -pdf100 shape 1 -#QCDscale_muR_${w_sample} shape 1 -#QCDscale_muF_${w_sample} shape 1 -#QCDscale_muRmuF_${w_sample} shape 1 ${card_append} - - diff --git a/Utilities/python/CombineCardTools.py b/Utilities/python/CombineCardTools.py index 17d7af1d..a7401cca 100644 --- a/Utilities/python/CombineCardTools.py +++ b/Utilities/python/CombineCardTools.py @@ -1,8 +1,8 @@ import logging import re -import ConfigureJobs -import HistTools -import OutputTools +from . import ConfigureJobs +from . import HistTools +from . import OutputTools from prettytable import PrettyTable import os import ROOT @@ -32,6 +32,7 @@ def __init__(self): self.theoryVariations = {} self.extraCardVars = "" self.cardGroups = "" + self.customizeCards = [] self.addOverflow = False def setPlotGroups(self, xsecMap): @@ -49,6 +50,12 @@ def setCorrelateScaleUnc(self, correlate): def setRemoveZeros(self, removeZeros): self.removeZeros = removeZeros + def addCustomizeCard(self, customize): + if not os.path.isfile(customize): + raise ValueError("Did not find customize cards %s" % customize) + self.customizeCards.append(customize) + return len(open(customize).readlines()) + def setUnrolled(self, binsx, binsy): self.isUnrolledFit = True self.unrolledBinsX = binsx @@ -217,7 +224,7 @@ def combineChannels(self, group, processName, central=True): fitVariable = self.getFitVariable(group.GetName()) if central: variations.insert(0, "") - for label, channels in self.channelsToCombine.iteritems(): + for label, channels in self.channelsToCombine.items(): if label not in self.yields: self.yields[label] = {} for var in variations: @@ -324,6 +331,7 @@ def loadHistsForProcess(self, processName, scaleNorm=1, expandedTheory=True): if self.isUnrolledFit: pdfFunction = pdfFunction.replace("get", "getTransformed3D") args = args[0:1] + [HistTools.makeUnrolledHist, [self.unrolledBinsX, self.unrolledBinsY]] + args[1:] +# print(args) updatePdfs = getattr(HistTools, pdfFunction)(*args) pdfHists += updatePdfs @@ -448,7 +456,7 @@ def writeMetaInfo(self): def writeCards(self, chan, nuisances, label="", outlabel="", extraArgs={}): chan_dict = self.yields[chan].copy() chan_dict.update(extraArgs) - for key, value in extraArgs.iteritems(): + for key, value in extraArgs.items(): if "yield:" in value: chan_dict[key] = chan_dict[value.replace("yield:", "")] chan_dict["nuisances"] = nuisances @@ -458,10 +466,12 @@ def writeCards(self, chan, nuisances, label="", outlabel="", extraArgs={}): outputCard = self.templateName.split("/")[-1].format(channel=chan, label=label) outputCard = outputCard.replace("template", outlabel) outputCard = outputCard.replace("__", "_") - ConfigureJobs.fillTemplatedFile(self.templateName.format(channel=chan, label=label), + templates = [self.templateName] + self.customizeCards + ConfigureJobs.fillTemplatedFile( + [x.format(channel=chan, label=label) for x in templates], "/".join([self.outputFolder, outputCard]), chan_dict ) chan_dict.pop("card_append") - print chan_dict + print (chan_dict) diff --git a/Utilities/python/ConfigHistFactory.py b/Utilities/python/ConfigHistFactory.py new file mode 120000 index 00000000..a66581eb --- /dev/null +++ b/Utilities/python/ConfigHistFactory.py @@ -0,0 +1 @@ +/afs/cern.ch/work/m/mumuhamm/WBoson/CMSSW_11_0_0/src/Data_Manager/AnalysisDatasetManager/Utilities/python/ConfigHistFactory.py \ No newline at end of file diff --git a/Utilities/python/ConfigHistTools.py b/Utilities/python/ConfigHistTools.py new file mode 120000 index 00000000..53ef3c34 --- /dev/null +++ b/Utilities/python/ConfigHistTools.py @@ -0,0 +1 @@ +/afs/cern.ch/work/m/mumuhamm/WBoson/CMSSW_11_0_0/src/Data_Manager/AnalysisDatasetManager/Utilities/python/ConfigHistTools.py \ No newline at end of file diff --git a/Utilities/python/ConfigureJobs.py b/Utilities/python/ConfigureJobs.py index 1de2fe5e..b9dcc8e9 100644 --- a/Utilities/python/ConfigureJobs.py +++ b/Utilities/python/ConfigureJobs.py @@ -10,6 +10,7 @@ import socket import logging import logging +import os #try: import configparser #except: @@ -50,7 +51,9 @@ def getChannels(analysis='WZ'): def getManagerName(): config_name = "" try: - config_name = "Templates/config.%s" % os.getlogin() + scriptdir = os.path.dirname(os.path.realpath(__file__)) + path = "/".join(scriptdir.split("/")[:-2]) + config_name = "%s/Templates/config.%s" % (path, os.getlogin()) except OSError: pass default_name = "AnalysisDatasetManager" @@ -69,7 +72,9 @@ def getManagerName(): def getManagerPath(): config_name = "" try: - config_name = "Templates/config.%s" % os.getlogin() + scriptdir = os.path.dirname(os.path.realpath(__file__)) + path = "/".join(scriptdir.split("/")[:-2]) + config_name = "%s/Templates/config.%s" % (path, os.getlogin()) except OSError: pass if not os.path.isfile(config_name): @@ -83,7 +88,7 @@ def getManagerPath(): if "dataset_manager_path" not in config['Setup']: raise ValueError("dataset_manager_path not specified in config file %s" % config_name) - return config['Setup']['dataset_manager_path'] + "/" + return os.path.expanduser(config['Setup']['dataset_manager_path']) def getCombinePath(): config = configparser.ConfigParser() @@ -182,15 +187,20 @@ def getListOfHDFSFiles(file_path): # TODO: Would be good to switch the order of the last two arguments # completely deprecate manager_path without breaking things def getListOfFiles(filelist, selection, manager_path="", analysis=""): - if manager_path is "": + if manager_path == "": manager_path = getManagerPath() + print("The main manager path = %s" % manager_path) data_path = "%s/%s/FileInfo" % (manager_path, getManagerName()) + print("The data path = %s " % data_path) group_path = "%s/AnalysisDatasetManager/PlotGroups" % manager_path + print("The group path = %s" % group_path) data_info = UserInput.readAllInfo("/".join([data_path, "data/*"])) + #print(data_info) mc_info = UserInput.readAllInfo("/".join([data_path, "montecarlo/*"])) + #print(mc_info) analysis_info = UserInput.readInfo("/".join([data_path, analysis, selection])) \ if analysis != "" else [] - valid_names = (data_info.keys() + mc_info.keys()) if not analysis_info else analysis_info.keys() + valid_names = (list(data_info.keys()) + list(mc_info.keys())) if not analysis_info else list(analysis_info.keys()) group_names = UserInput.readAllInfo("%s/%s.py" %(group_path, analysis)) if analysis else dict() names = [] for name in filelist: @@ -202,7 +212,7 @@ def getListOfFiles(filelist, selection, manager_path="", analysis=""): elif "WZxsec2016" in name: dataset_file = manager_path + \ "%s/FileInfo/WZxsec2016/%s.json" % (getManagerPath(), selection) - allnames = json.load(open(dataset_file)).keys() + allnames = list(json.load(open(dataset_file)).keys()) if "nodata" in name: nodata = [x for x in allnames if "data" not in x] names += nodata @@ -227,7 +237,7 @@ def getListOfFiles(filelist, selection, manager_path="", analysis=""): continue else: names += [name] - if not names or len(filter(lambda x: x != '', names)) == 0: + if not names or len(list(filter(lambda x: x != '', names))) == 0: raise RuntimeError("No processes found matching pattern '%s'" % filelist) return [str(i) for i in names] @@ -245,15 +255,23 @@ def getXrdRedirector(filepath=None): return usredir return globalredir -def fillTemplatedFile(template_file_name, out_file_name, template_dict): - with open(template_file_name, "r") as templateFile: - source = string.Template(templateFile.read()) - result = source.substitute(template_dict) - with open(out_file_name, "w") as outFile: - outFile.write(result) +def fillTemplatedFile(template_files, out_file_name, template_dict): + result = "" + for template in template_files: + if not os.path.isfile(template): + print(template) + raise ValueError("Template file %s is not a valid file!" % template) + with open(template, "r") as templateFile: + source = string.Template(templateFile.read()) + filled = source.substitute(template_dict) + result += filled + with open(out_file_name, "w") as outFile: + outFile.write(result) + print('=====================================print the results related to datacard production======================================') + print(result) def getListOfFilesWithXSec(filelist, manager_path="", selection="ntuples"): - if manager_path is "": + if manager_path == "": manager_path = getManagerPath() data_path = "%s/%s/FileInfo" % (manager_path, getManagerName()) files = getListOfFiles(filelist, selection, manager_path) @@ -275,7 +293,7 @@ def getListOfFilesWithXSec(filelist, manager_path="", selection="ntuples"): return info def getListOfFilesWithPath(filelist, analysis, selection, das=True, manager_path=""): - if manager_path is "": + if manager_path == "": manager_path = getManagerPath() data_path = "%s/%s/FileInfo" % (manager_path, getManagerName()) files = getListOfFiles(filelist, selection, manager_path, analysis) @@ -337,7 +355,7 @@ def getConfigFileName(config_file_name): config_file_name) def getInputFilesPath(sample_name, selection, analysis, manager_path=""): - if manager_path is "": + if manager_path == "": manager_path = getManagerPath() if ".root" in sample_name: logging.info("Using simple file %s" % sample_name) diff --git a/Utilities/python/HistTools.py b/Utilities/python/HistTools.py index 0af6b4f6..529f1d0a 100644 --- a/Utilities/python/HistTools.py +++ b/Utilities/python/HistTools.py @@ -46,7 +46,7 @@ def makeUnrolledHist(init_2D_hist, xbins, ybins, name="", overflow=False): ybinned_hist = ybinned_hist.Rebin(len(xbins)-1, hist_name+"_rebin", xbins) hists_half_rolled.append(ybinned_hist) - if name is "": + if name == "": name = init_2D_hist.GetName().replace("2D", "unrolled") unrolled_hist = ROOT.TH1D(name, "Unrolled", nbins, 0, nbins) unrolled_hist.SetDirectory(init_2D_hist.GetDirectory()) diff --git a/Utilities/python/OutputTools.py b/Utilities/python/OutputTools.py index eb2ae1f2..f1cad245 100644 --- a/Utilities/python/OutputTools.py +++ b/Utilities/python/OutputTools.py @@ -42,6 +42,5 @@ def writeOutputListItem(item, directory): directory.cd() item.Write() else: - print "Couldn't write output item:" - print repr(item) + logging.warning("Couldn't write output item: %s " % repr(item)) directory.cd() diff --git a/Utilities/python/SelectorTools.py b/Utilities/python/SelectorTools.py index 7ce24210..9ff4f84c 100755 --- a/Utilities/python/SelectorTools.py +++ b/Utilities/python/SelectorTools.py @@ -2,7 +2,7 @@ import ROOT import glob import datetime -import ConfigureJobs, OutputTools +from . import ConfigureJobs, OutputTools import sys import os import multiprocessing @@ -136,7 +136,7 @@ def setFileList(self, list_of_files, nPerJob, jobNum): raise ValueError("%s is not a valid file." % list_of_files) filelist = [f.split("#")[0].strip() for f in open(list_of_files).readlines()] # Remove empty/commented lines - filelist = filter(lambda x: len(x) > 2, filelist) + filelist = list(filter(lambda x: len(x) > 2, filelist)) nPerJob = int(nPerJob) if nPerJob < 1: raise ValueError("Number of files per job must be >= 1.") @@ -199,9 +199,9 @@ def setDatasets(self, datalist): def expandDatasetFilePaths(self, nsplits): nFiles = 0 - for dataset, file_path in self.datasets.iteritems(): + for dataset, file_path in self.datasets.items(): maxPerSet = self.maxFiles/len(self.datasets) - if dataset == self.datasets.keys()[-1]: + if dataset == list(self.datasets.keys())[-1]: maxPerSet = self.maxFiles-nFiles files = [] for f in file_path: @@ -216,7 +216,7 @@ def applySelector(self): if self.numCores > 1: self.processParallelByDataset(self.datasets, chan) else: - for dataset, file_path in self.datasets.iteritems(): + for dataset, file_path in self.datasets.items(): self.processDataset(dataset, file_path, chan) if len(self.channels) > 1 and self.numCores > 1: tempfiles = [self.outfile_name.replace(".root", "_%s.root" % c) for c in self.channels] @@ -293,7 +293,10 @@ def writeOutput(self, output_list, chan, processes, dataset, addSumweights): else: dataset_list.Add(sumweights_hist.Clone()) OutputTools.writeOutputListItem(dataset_list, self.current_file) - map(lambda x: x.Delete(), dataset_list) + #print("why the dataset list is %s is invalid? " % dataset_list) + #map(lambda x: x.Delete(), dataset_list) + for f in dataset_list: + f.Delete() del dataset_list del output_list @@ -325,26 +328,27 @@ def getTreeName(self, chan): return ("%s/ntuple" % channel) if self.ntupleType == "UWVV" else "Events" def combineParallelFiles(self, tempfiles, chan): - tempfiles = filter(os.path.isfile, tempfiles) + tempfiles = list(filter(os.path.isfile, tempfiles)) outfile = self.outfile_name if chan != "Inclusive": outfile = self.outfile_name.replace(".root", "_%s.root" % chan) rval = subprocess.call(["hadd", "-k", "-f", "-j", str(self.numCores), outfile] + tempfiles) if rval == 0: - map(os.remove, tempfiles) + for f in tempfiles: os.remove(f) else: raise RuntimeError("Failed to collect data from parallel run") def processParallelByDataset(self, datasets, chan): self.expandDatasetFilePaths(self.numCores) - expanded_datasets = [[d, f, chan] for d, files in datasets.iteritems() for f in files] + expanded_datasets = [[d, f, chan] for d, files in datasets.items() for f in files] logging.debug(expanded_datasets) p = multiprocessing.Pool(processes=self.numCores) - tempfiles = glob.glob(self.tempfileName().replace("MainProcess", "PoolWorker*")) - map(os.remove, tempfiles) + tempfiles = glob.glob(self.tempfileName().replace("MainProcess", "*PoolWorker*")) + for f in tempfiles: + os.remove(f) p.map(self, expanded_datasets) # Store arrays in temp files, since it can get way too big to keep around in memory - tempfiles = glob.glob(self.tempfileName().replace("MainProcess", "PoolWorker*")) + tempfiles = glob.glob(self.tempfileName().replace("MainProcess", "*PoolWorker*")) p.close() self.combineParallelFiles(tempfiles, chan) @@ -391,8 +395,8 @@ def processFile(self, filename, addSumweights, chan, filenum=1): # You can use filenum to index the files and sum separately, but it's not necessary def fillSumweightsHist(self, rtfile, filenum=1): sumWeightsType = "fromTree" - weightSignOnly = filter(lambda x: "wSignOnly" in x.GetName(), self.inputs) - wSuppress = filter(lambda x: "wSuppress" in x.GetName(), self.inputs) + weightSignOnly = list(filter(lambda x: "wSignOnly" in x.GetName(), self.inputs)) + wSuppress = list(filter(lambda x: "wSuppress" in x.GetName(), self.inputs)) weightSignOnly = weightSignOnly[0].GetVal() if weightSignOnly else False wSuppress = wSuppress[0].GetVal() if wSuppress else 0 diff --git a/Utilities/python/UserInput.py b/Utilities/python/UserInput.py index 586a0eb9..021cc743 100644 --- a/Utilities/python/UserInput.py +++ b/Utilities/python/UserInput.py @@ -9,6 +9,7 @@ import ROOT import imp import os +import logging def getDefaultParser(allow_from_file=True): parser = argparse.ArgumentParser() @@ -91,14 +92,14 @@ def readJson(json_file_name): try: json_info = json.load(json_file) except ValueError as err: - print "Error reading JSON file %s. The error message was:" % json_file_name + logigng.error("Problem reading JSON file %s. The error message was:" % json_file_name) print(err) return json_info # Depends on AnalysisDatasetManagerModule def getHistInfo(analysis, input_hists, noConfig=False): if noConfig: - print "INFO: assuming histogram information is specified in selector" + logging.info("Assuming histogram information is specified in selector") return (input_hists, []) manager_path = ConfigureJobs.getManagerPath() diff --git a/Utilities/scripts/setupWGenCombine.py b/Utilities/scripts/setupWGenCombine.py index cfa694e4..c81c7490 100644 --- a/Utilities/scripts/setupWGenCombine.py +++ b/Utilities/scripts/setupWGenCombine.py @@ -1,9 +1,11 @@ +#!/bin/usr/env python from python import ConfigureJobs,CombineCardTools,UserInput import sys import ROOT import logging import array import argparse +import os ROOT.gROOT.SetBatch(True) @@ -12,11 +14,11 @@ help="Print debug info") parser.add_argument("--mc2hes", action='store_true', help="Convert MC errors to hessian") -parser.add_argument("-c", "--central", type=str, default="wlnu_jetbinned_nlo_cp5", +parser.add_argument("-c", "--central", type=str, default="wpmunu_nloew", help="Sample to use as central value") parser.add_argument("--files", type=lambda x: [i.strip() for i in x.split(",")], default=[], help="Samples to add to output file") -parser.add_argument("-d", "--data", type=str, default="wlnu_nlo", +parser.add_argument("-d", "--data", type=str, default="wpmunu_nloew", help="Sample to use as dummy data") parser.add_argument("-a", "--append", type=str, default="", help="Append to output folder name") @@ -32,7 +34,7 @@ help="don't add PDF uncertainties") parser.add_argument("--ssd", action='store_true', help="Write to /data/kelong, not /eos/user") -parser.add_argument("--noPtVSplit", action='store_true', +parser.add_argument("--splitPtV", action='store_true', help="Don't split scale uncertainties by pt(V)") parser.add_argument("--allHessianVars", action='store_true', help="store all hessian variations") @@ -49,7 +51,8 @@ cardtool = CombineCardTools.CombineCardTools() -manager_path = ConfigureJobs.getManagerPath() +manager_path = ConfigureJobs.getManagerPath() +#manager_path = "/afs/cern.ch/work/m/mumuhamm/WBoson/CMSSW_11_0_0/src/Data_Manager" sys.path.append("/".join([manager_path, "AnalysisDatasetManager", "Utilities/python"])) @@ -60,7 +63,7 @@ ) #plot_groups = ["wlnu_lo", "wlnu_lo_cp5", "wlnu_nlo", "wlnu_jetbinned_nlo", "wlnu_jetbinned_nlo_cp5", ] -plot_groups = args.files if args.files else ["wpmunu_minnlo_nnlopslike_photos", "wpmunu_nnlops_photos", "wpmunu_nnlops_nlow"] +plot_groups = args.files if args.files else ["wpmunu_minnlo_prod", "wpmunu_nloew", "wpmunu_nlo_qcd"] plotGroupsMap = {name : config_factory.getPlotGroupMembers(name) for name in plot_groups} xsecs = ConfigureJobs.getListOfFilesWithXSec([f for files in plotGroupsMap.values() for f in files]) @@ -82,7 +85,7 @@ #cardtool.setUnrolled([-2.5+0.5*i for i in range(0,11)], range(26, 56, 3)) cardtool.setProcesses(plotGroupsMap) cardtool.setChannels(args.channels) -print "Channels are", args.channels +print ("Channels are", args.channels) cardtool.setCrosSectionMap(xsecs) variations = [] if args.theoryOnly else ["CMS_scale_m"] @@ -91,7 +94,7 @@ cardtool.setNormalizedVariations(normVariations) folder_name = "_".join([args.fitvar,args.append]) if args.append != "" else args.fitvar -basefolder = "/data/kelong/" if args.ssd else "/eos/user/k/kelong" +basefolder = "/data/ali/" if args.ssd else "/eos/user/m/mumuhamm/www" cardtool.setOutputFolder(basefolder+"/CombineStudies/WGen/%s" % folder_name) cardtool.setLumi(args.lumi) @@ -167,7 +170,7 @@ if not args.noPdf: cardtool.addTheoryVar(process, 'pdf_mc' if "cp5" not in process else "pdf_hessian", range(10,111), central=0) - if not args.noPtVSplit: + if args.splitPtV: for pair in ptbinPairs: varName = 'ptV%ito%i' % pair varName = varName.replace("100", "Inf") @@ -178,11 +181,26 @@ cardtool.loadHistsForProcess(process, expandedTheory=args.allHessianVars) cardtool.writeProcessHistsToOutput(process) -nuissance_map = {"mn" : 273, "mp" : 273, "m" : 273 } + scriptdir = os.path.dirname(os.path.realpath(__file__)) + path = "/".join(scriptdir.split("/")[:-2]+["Templates", "CombineCards", "VGen"]) + + nnu = 2 + if args.splitPtV: + nnu += cardtool.addCustomizeCard(path+"/Customize/PtV_template.txt") + if not args.theoryOnly: + nnu += cardtool.addCustomizeCard(path+"/Customize/muscale_template.txt") + + if not args.noPdf: + nnu += cardtool.addCustomizeCard(path+"/Customize/pdfHessian_template.txt") \ + if args.allHessianVars else cardtool.addCustomizeCard(path+"/Customize/pdf_template.txt") + else: + nnu += cardtool.addCustomizeCard(path+"/Customize/scale_template.txt") + + nuissance_map = {"mn" : nnu, "mp" : nnu, "m" : nnu} for i, chan in enumerate(args.channels): data = args.data if "," not in args.data else args.data.split(",")[i] central = args.central if "," not in args.central else args.data.split(",")[i] - cardtool.setTemplateFileName("Templates/CombineCards/VGen/WGen_template_{channel}.txt") + cardtool.setTemplateFileName("%s/WGen_template_{channel}.txt" % path) logging.info("Writting cards for channel %s" % chan) cardtool.writeCards(chan, nuissance_map[chan], extraArgs={"data_name" : data, diff --git a/interface/NanoGenSelectorBase.h b/interface/NanoGenSelectorBase.h index b4b345ce..a532d7f2 100644 --- a/interface/NanoGenSelectorBase.h +++ b/interface/NanoGenSelectorBase.h @@ -71,6 +71,9 @@ public : bool doPreFSR_ = false; bool doBareLeptons_ = false; float ratio_mass; + float leppT_ratio; + float lep1pT_ratio; + float lep2pT_ratio; float refWeight = 1; TH1D* mcWeights_; diff --git a/interface/SelectorBase.h b/interface/SelectorBase.h index 96a19998..798bebbd 100644 --- a/interface/SelectorBase.h +++ b/interface/SelectorBase.h @@ -59,6 +59,7 @@ enum Systematic { recoilCorrectionStat8Up, recoilCorrectionStat8Down, recoilCorrectionStat9Up, recoilCorrectionStat9Down, BareLeptons, PreFSRLeptons, BornParticles, LHEParticles, + BareLeptons_mWShift100MeVUp, BareLeptons_mWShift100MeVDown,BareLeptons_muonScaleUp, BareLeptons_muonScaleDown, mWShift100MeVUp, mWShift50MeVUp, mWShift25MeVUp, mWShift20MeVUp, mWShift10MeVUp, mWShift100MeVDown, mWShift50MeVDown, mWShift25MeVDown, mWShift20MeVDown, mWShift10MeVDown, mZShift100MeVUp, mZShift50MeVUp, mZShift25MeVUp, mZShift20MeVUp, mZShift10MeVUp, diff --git a/src/NanoGenSelectorBase.cc b/src/NanoGenSelectorBase.cc index 4dfeaf39..5706b77e 100644 --- a/src/NanoGenSelectorBase.cc +++ b/src/NanoGenSelectorBase.cc @@ -32,11 +32,15 @@ void NanoGenSelectorBase::Init(TTree *tree) std::cout << "INFO: doLHE = " << doLHE_ << " doPrefsr " << doPreFSR_ << std::endl; std::cout << "INFO: doBareLeptons = "< 0.4; }), photons.end()); + return reco::deltaR(p, lep) > 3.0; }), photons.end()); //for(unsigned int i=0;iGetVal(); TParameter* muonVar = (TParameter*) GetInputList()->FindObject("muonVar"); doMuonVar_ = muonVar != nullptr && muonVar->GetVal(); - + //Have to read this here as well, otherwise you don't know until after calling NanoGenSelectorBase::Init + TParameter* barePart = (TParameter*) GetInputList()->FindObject("bare"); + doBareLeptons_ = barePart != nullptr && barePart->GetVal(); + if (doMassVar_) { systematics_[mWShift50MeVUp] = "mWBWShift50MeVUp"; systematics_[mWShift50MeVDown] = "mWBWShift50MeVDown"; systematics_[mWShift100MeVUp] = "mWBWShift100MeVUp"; systematics_[mWShift100MeVDown] = "mWBWShift100MeVDown"; + if (doBareLeptons_) { + systematics_[BareLeptons_muonScaleUp] = "bare_CMS_scale_mUp"; + systematics_[BareLeptons_muonScaleDown] = "bare_CMS_scale_mDown"; + } } if (doMuonVar_) { @@ -55,7 +62,7 @@ void WGenSelector::Init(TTree *tree) nLeptons_ = 1; nNeutrinos_ = 1; doPhotons_ = true; - doBareLeptons_ = true; + // Chose by MC sample if (name_.find("nnlops") != std::string::npos) { MV_GEN_ = 80398.0; @@ -86,11 +93,11 @@ void WGenSelector::LoadBranchesNanoAOD(Long64_t entry, SystPair variation) { TRandom3 gauss; ptl_smear = l.pt()*gauss.Gaus(1, 0.01); } - else if (variation.first == muonScaleUp) { + else if (variation.first == muonScaleUp || variation.first == BareLeptons_muonScaleUp) { leptons.at(0).setP4(makeGenParticle(l.pdgId(), l.status(), l.pt()*1.001, l.eta(), l.phi(), l.mass()).polarP4()); SetComposite(); } - else if (variation.first == muonScaleDown) { + else if (variation.first == muonScaleDown || variation.first == BareLeptons_muonScaleDown) { leptons.at(0).setP4(makeGenParticle(l.pdgId(), l.status(), l.pt()*1./1.001, l.eta(), l.phi(), l.mass()).polarP4()); SetComposite(); } @@ -101,6 +108,8 @@ void WGenSelector::LoadBranchesNanoAOD(Long64_t entry, SystPair variation) { ptVlhe = wCand.pt(); mVlhe = wCand.mass()*1000.; ratio_mass = wCand.mass(); + auto& mylep = leptons.at(0); + leppT_ratio =mylep.pt(); } else if (variation.first == LHEParticles) { // define at LHE level if it exists @@ -110,6 +119,7 @@ void WGenSelector::LoadBranchesNanoAOD(Long64_t entry, SystPair variation) { else if (variation.first == BareLeptons) { ptVlhe = wCand.pt(); mVlhe = wCand.mass()*1000.; + //ratio_mass = wCand.mass(); } else if (variation.first == mWShift10MeVUp) weight = cenWeight*breitWignerWeight(10.); @@ -128,9 +138,9 @@ void WGenSelector::LoadBranchesNanoAOD(Long64_t entry, SystPair variation) { } else if (variation.first == mWShift50MeVDown) weight = cenWeight*breitWignerWeight(-50.); - else if (variation.first == mWShift100MeVUp) + else if (variation.first == mWShift100MeVUp || variation.first == BareLeptons_mWShift100MeVUp) weight = cenWeight*breitWignerWeight(100.); - else if (variation.first == mWShift100MeVDown) + else if (variation.first == mWShift100MeVDown || variation.first == BareLeptons_mWShift100MeVDown) weight = cenWeight*breitWignerWeight(-100.); if (leptons.size() > 0 && std::abs(leptons.at(0).pdgId()) == 11) { @@ -310,7 +320,6 @@ void WGenSelector::FillHistogramsByName(Long64_t entry, std::string& toAppend, S ((variation.first == ptV40toInf || variation.first == ptV40toInf_lhe) && ptVlhe < 40. )) { return; } - SafeHistFill(histMap1D_, concatenateNames("mW", toAppend), channel_, variation.first, wCand.mass(), weight); SafeHistFill(histMap1D_, concatenateNames("yW", toAppend), channel_, variation.first, wCand.Rapidity(), weight); SafeHistFill(histMap1D_, concatenateNames("ptW", toAppend), channel_, variation.first, wCand.pt(), weight); @@ -341,10 +350,17 @@ void WGenSelector::FillHistogramsByName(Long64_t entry, std::string& toAppend, S } if (variation.first == BareLeptons) { - //ponerse las pilas, the (variation.first == BareLeptons) refraining the histograms to fill + //ponerse las pilas, the (variation.first == BareLeptons) refraining the histograms to fill + //Call the channel central just to avoid appending the name "barelep". + //These histograms should only be built for the barelepton case, should be understood that they always refer + //to the barlep channel implicitly ratio_mass /= wCand.mass(); - SafeHistFill(histMap1D_, "Ratio_Wmass", channel_, variation.first, ratio_mass, weight); - SafeHistFill(histMap1D_, concatenateNames("nGammaAssoc",toAppend), channel_, variation.first, photons.size(), weight); + leppT_ratio /= lep.pt(); + float reciproc_rpT = 1.0/leppT_ratio; + float reciproc_rm = 1.0/ratio_mass; + SafeHistFill(histMap1D_, "Ratio_leppT", channel_, Central, reciproc_rpT, weight); + SafeHistFill(histMap1D_, "Ratio_Wmass", channel_, Central, reciproc_rm, weight); + SafeHistFill(histMap1D_, concatenateNames("nGammaAssoc",toAppend), channel_, Central, photons.size(), weight); auto compareByPt = [](const reco::GenParticle& a, const reco::GenParticle& b) { return a.pt() < b.pt(); }; auto compareByDRLead = [lep] (const reco::GenParticle& a, const reco::GenParticle& b) { @@ -356,9 +372,9 @@ void WGenSelector::FillHistogramsByName(Long64_t entry, std::string& toAppend, S - SafeHistFill(histMap1D_, "dRlgamma_minassoc", channel_, variation.first, photons.size() > 0 ? reco::deltaR(*gclose, lep) : 0., weight); - SafeHistFill(histMap1D_, "dRlgamma_maxptassoc", channel_, variation.first, photons.size() > 0 ? reco::deltaR(*maxPtg, lep) : 0., weight); - SafeHistFill(histMap1D_, "ptg_closeassoc", channel_, variation.first, photons.size() > 0 ? gclose->pt() : 0., weight); - SafeHistFill(histMap1D_, "ptgmax_assoc", channel_, variation.first, photons.size() > 0 ? maxPtg->pt() : 0., weight); + SafeHistFill(histMap1D_, "dRlgamma_minassoc", channel_, Central, photons.size() > 0 ? reco::deltaR(*gclose, lep) : 0., weight); + SafeHistFill(histMap1D_, "dRlgamma_maxptassoc", channel_, Central, photons.size() > 0 ? reco::deltaR(*maxPtg, lep) : 0., weight); + SafeHistFill(histMap1D_, "ptg_closeassoc", channel_, Central, photons.size() > 0 ? gclose->pt() : 0., weight); + SafeHistFill(histMap1D_, "ptgmax_assoc", channel_, Central, photons.size() > 0 ? maxPtg->pt() : 0., weight); } } diff --git a/src/ZGenSelector.cc b/src/ZGenSelector.cc index ebef23cf..1e1c389a 100644 --- a/src/ZGenSelector.cc +++ b/src/ZGenSelector.cc @@ -18,7 +18,7 @@ void ZGenSelector::Init(TTree *tree) histMap1D_[{"CutFlow", Unknown, Central}] = {}; std::vector basehists1D = {"CutFlow", "ZMass", "yZ", "ptZ", "phiZ", "ptl1", "etal1", "phil1", "ptl2", "etal2", "phil2", "ptj1", "ptj2", "ptj3", "etaj1", "etaj2", "etaj3", "phij1", "phij2", "phij3", "nJets", - "MET", "HT",}; + "MET", "HT","Ratio_Zmass","Ratio_lep1pT","Ratio_lep2pT", "dRlgamma_maxptassoc1","dRlgamma_maxptassoc2", "dRlgamma_minassoc1","dRlgamma_minassoc2", "ptg_closeassoc1","ptg_closeassoc2", "ptg_maxassoc", "nGammaAssoc","ptgmax_assoc",}; hists1D_ = basehists1D; //std::vector partonicChans = {"uu_dd", "uubar_ddbar", "ug_dg", "ubarg_dbarg", "gg", "other"}; //for (auto& chan : partonicChans) { @@ -29,9 +29,10 @@ void ZGenSelector::Init(TTree *tree) weighthists1D_ = {"CutFlow", "ZMass", "yZ", "ptZ", "phiZ", "ptl1", "etal1", "ptl2", "etal2", "ptj1", "ptj2", "ptj3", "etaj1", "etaj2", "etaj3", "nJets", - "MET", "HT", }; + "MET", "HT","Ratio_Zmass","Ratio_lep1pT","Ratio_lep2pT", "dRlgamma_maxptassoc1","dRlgamma_maxptassoc2", "dRlgamma_minassoc1","dRlgamma_minassoc2", "ptg_closeassoc1","ptg_closeassoc2", "ptg_maxassoc", "nGammaAssoc","ptgmax_assoc", }; nLeptons_ = 2; - + doPhotons_ = true; + doBareLeptons_ = true; TParameter* massVar = (TParameter*) GetInputList()->FindObject("massVar"); doMassVar_ = massVar != nullptr && massVar->GetVal(); @@ -62,12 +63,24 @@ void ZGenSelector::Init(TTree *tree) void ZGenSelector::LoadBranchesNanoAOD(Long64_t entry, std::pair variation) { NanoGenSelectorBase::LoadBranchesNanoAOD(entry, variation); - if (variation.first == Central) + if (variation.first == Central){ cenWeight = weight; + ptVlhe = zCand.pt(); + mVlhe = zCand.mass()*1000.; + ratio_mass = zCand.mass(); + auto& mylep1 = leptons.at(0); + auto& mylep2 = leptons.at(1); + lep1pT_ratio =mylep1.pt(); + lep2pT_ratio =mylep2.pt(); + } else if (variation.first == LHEParticles) { ptVlhe = zCand.pt(); mVlhe = zCand.mass()*1000.; } + else if (variation.first == BareLeptons) { + ptVlhe = zCand.pt(); + mVlhe = zCand.mass()*1000.; + } else if (variation.first == mZShift10MeVUp) weight = cenWeight*breitWignerWeight(10.); else if (variation.first == mZShift10MeVDown) @@ -243,6 +256,44 @@ void ZGenSelector::FillHistograms(Long64_t entry, std::pair 0 ? reco::deltaR(*gclose1, lep1) : 0., weight); + SafeHistFill(histMap1D_, "dRlgamma_minassoc2", channel_, variation.first, photons.size() > 0 ? reco::deltaR(*gclose2, lep2) : 0., weight); + SafeHistFill(histMap1D_, "dRlgamma_maxptassoc1", channel_, variation.first, photons.size() > 0 ? reco::deltaR(*maxPtg, lep1) : 0., weight); + SafeHistFill(histMap1D_, "dRlgamma_maxptassoc2", channel_, variation.first, photons.size() > 0 ? reco::deltaR(*maxPtg, lep2) : 0., weight); + SafeHistFill(histMap1D_, "ptg_closeassoc1", channel_, variation.first, photons.size() > 0 ? gclose1->pt() : 0., weight); + SafeHistFill(histMap1D_, "ptg_closeassoc2", channel_, variation.first, photons.size() > 0 ? gclose2->pt() : 0., weight); + SafeHistFill(histMap1D_, "ptgmax_assoc", channel_, variation.first, photons.size() > 0 ? maxPtg->pt() : 0., weight); + } + + // Should check how slow this is. For now it's off return; @@ -279,5 +330,11 @@ void ZGenSelector::FillHistograms(Long64_t entry, std::pair