diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f67c831a0..4a73fa005 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -29,7 +29,6 @@ env: OUTFILE_LOWPU_EE: "mz_lowPU_ee.hdf5" OUTFILE_LOWPU_MUMU: "mz_lowPU_mumu.hdf5" DATAPATH: "/scratch/shared/NanoAOD/" - DATAPATH_LOWPU: "/scratch/shared/NanoAOD/LowPU/" NOMINAL_FAKE_SMOOTHING: "hybrid" # A workflow run is made up of one or more jobs that can run sequentially or in parallel @@ -315,7 +314,7 @@ jobs: - name: bsm rabbit setup run: >- scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run_python.sh scripts/rabbit/setupRabbit.py - -i $HIST_FILE --lumiScale $LUMI_SCALE --addBSMMixing WtoNMu_5 0.01 --breitwignerWMassWeights + -i $HIST_FILE --lumiScale $LUMI_SCALE --addBSMMixing WtoNMuMass5 0.01 --breitwignerWMassWeights --postfix bsm -o $WREMNANTS_OUTDIR - name: bsm rabbit fit @@ -330,19 +329,19 @@ jobs: run: >- scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run.sh rabbit_limit.py $WREMNANTS_OUTDIR/WMass_eta_pt_charge_bsm/WMass.hdf5 -o $WREMNANTS_OUTDIR/WMass_eta_pt_charge_bsm/ - -t -1 --asymptoticLimits WtoNMu_5_mixing --modes gaussian + -t -1 --asymptoticLimits WtoNMuMass5_mixing --modes gaussian - name: bsm plot postfit variations run: >- scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run.sh rabbit_plot_hists.py '$WREMNANTS_OUTDIR/WMass_eta_pt_charge_bsm/fitresults.hdf5' -o $WEB_DIR/$PLOT_DIR/BSM -m Project -m Normalize --title CMS --subtitle Preliminary --titlePos 0 --yscale '1.3' --result asimov --lowerLegCols 3 --rrange 0.98 1.02 - --varNames massShiftW100MeV 'WtoNMu_5_mixing' --varColors red blue + --varNames massShiftW100MeV 'WtoNMuMass5_mixing' --varColors red blue # - name: bsm plot parameter correlations # run: >- # scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run.sh rabbit_plot_hists_cov.py '$WREMNANTS_OUTDIR/WMass_eta_pt_charge_bsm/fitresults.hdf5' - # -o $WEB_DIR/$PLOT_DIR/BSM --params 'WtoNMu_5' massShiftW100MeV + # -o $WEB_DIR/$PLOT_DIR/BSM --params 'WtoNMuMass5' massShiftW100MeV # --title CMS --subtitle Preliminary --titlePos 0 --config 'utilities/styles/styles.py' --correlation --showNumbers # - name: bsm plot pulls and impacts @@ -354,7 +353,7 @@ jobs: # - name: bsm likelihood scan # run: >- # scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run.sh rabbit_plot_likelihood_scan.py '$WREMNANTS_OUTDIR/WMass_eta_pt_charge_bsm/fitresults.hdf5' - # -o $WEB_DIR/$PLOT_DIR/BSM --params 'WtoNMu_5' --title CMS --subtitle Preliminary --titlePos 0 --config 'utilities/styles/styles.py' + # -o $WEB_DIR/$PLOT_DIR/BSM --params 'WtoNMuMass5' --title CMS --subtitle Preliminary --titlePos 0 --config 'utilities/styles/styles.py' w-plotting: # The type of runner that the job will run on @@ -537,7 +536,7 @@ jobs: - name: lowpu w mu analysis run: >- scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run_python.sh scripts/histmakers/mw_lowPU.py - --dataPath $DATAPATH_LOWPU -o $WREMNANTS_OUTDIR -j $NTHREADS --forceDefaultName --unfolding --unfoldingLevels postfsr + --dataPath $DATAPATH -o $WREMNANTS_OUTDIR -j $NTHREADS --forceDefaultName --unfolding --unfoldingLevels postfsr - name: lowpu w mu plot ptW run: >- @@ -567,7 +566,7 @@ jobs: - name: lowpu w e analysis run: >- scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run_python.sh scripts/histmakers/mw_lowPU.py - --dataPath $DATAPATH_LOWPU -o $WREMNANTS_OUTDIR -j $NTHREADS --forceDefaultName --flavor e --unfolding --unfoldingLevels postfsr + --dataPath $DATAPATH -o $WREMNANTS_OUTDIR -j $NTHREADS --forceDefaultName --flavor e --unfolding --unfoldingLevels postfsr - name: lowpu w e plot ptW run: >- @@ -639,7 +638,7 @@ jobs: - name: lowpu z mumu analysis run: >- scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run_python.sh scripts/histmakers/mz_lowPU.py --unfolding --unfoldingLevels postfsr - --dataPath $DATAPATH_LOWPU -o $WREMNANTS_OUTDIR -j $NTHREADS --forceDefaultName + --dataPath $DATAPATH -o $WREMNANTS_OUTDIR -j $NTHREADS --forceDefaultName - name: lowpu z mumu plot ptll yll run: >- @@ -659,7 +658,7 @@ jobs: - name: lowpu z ee analysis run: >- scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run_python.sh scripts/histmakers/mz_lowPU.py --unfolding --unfoldingLevels postfsr - --dataPath $DATAPATH_LOWPU -o $WREMNANTS_OUTDIR -j $NTHREADS --forceDefaultName --flavor ee + --dataPath $DATAPATH -o $WREMNANTS_OUTDIR -j $NTHREADS --forceDefaultName --flavor ee - name: lowpu z ee plot ptll yll run: >- @@ -1165,7 +1164,7 @@ jobs: - name: dilepton plotting yll run: >- scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run_python.sh scripts/plotting/makeDataMCStackPlot.py - --yscale 1.3 --baseName nominal_yll --nominalRef nominal_yll --hists yll --fineGroups -o $WEB_DIR -f $PLOT_DIR -p z $HIST_FILE + --yscale 1.3 --baseName nominal_ptll_yll --nominalRef nominal_ptll_yll --hists yll --fineGroups -o $WEB_DIR -f $PLOT_DIR -p z $HIST_FILE - name: dilepton plotting cosThetaStarll run: >- diff --git a/scripts/corrections/make_muon_response_maps.py b/scripts/corrections/make_muon_response_maps.py index 03435d712..6140da9f0 100644 --- a/scripts/corrections/make_muon_response_maps.py +++ b/scripts/corrections/make_muon_response_maps.py @@ -20,12 +20,12 @@ hist_response_smeared = None procs = [] -procs.append("ZmumuPostVFP") -procs.append("ZtautauPostVFP") -procs.append("WplusmunuPostVFP") -procs.append("WminusmunuPostVFP") -procs.append("WplustaunuPostVFP") -procs.append("WminustaunuPostVFP") +procs.append("Zmumu_2016PostVFP") +procs.append("Ztautau_2016PostVFP") +procs.append("Wplusmunu_2016PostVFP") +procs.append("Wminusmunu_2016PostVFP") +procs.append("Wplustaunu_2016PostVFP") +procs.append("Wminustaunu_2016PostVFP") with h5py.File(infile, "r") as f: diff --git a/scripts/corrections/make_ptv_unfolding_corr.py b/scripts/corrections/make_ptv_unfolding_corr.py index 32259ac93..7b43ad2ee 100644 --- a/scripts/corrections/make_ptv_unfolding_corr.py +++ b/scripts/corrections/make_ptv_unfolding_corr.py @@ -58,7 +58,7 @@ logger = logging.setup_logger("make_ptv_unfolding_corr", 4 if args.debug else 3) -genh = input_tools.read_and_scale(args.genFile, "ZmumuPostVFP", "nominal_gen") +genh = input_tools.read_and_scale(args.genFile, "Zmumu_2016PostVFP", "nominal_gen") unfolded_res = pickle.load(open(args.unfoldingFile, "rb")) unfolded_datah = unfolded_res["results"]["pmaskedexp"]["chan_13TeV"]["Z"][ diff --git a/scripts/corrections/make_theory_corr.py b/scripts/corrections/make_theory_corr.py index 9b3d8248f..483063a4d 100644 --- a/scripts/corrections/make_theory_corr.py +++ b/scripts/corrections/make_theory_corr.py @@ -114,6 +114,14 @@ def parse_args(): action="store_true", help="Normalize the corrections", ) + parser.add_argument( + "--eras", + type=str, + nargs="+", + choices=common.supported_eras, + help="Data set to process", + default=["2016PostVFP", "2017", "2018"], + ) args = parser.parse_args() return args @@ -147,9 +155,6 @@ def read_corr(procName, generator, corrFiles, axes, smooth=None): fo_func = getattr(input_tools, f"read_matched_scetlib_{fo_generator}_hist") - zero_nons_bins = ( - 0 if "nnlojet" not in fo_generator else hist.tag.Slicer()[0:2] - ) # TODO: Should probably be more general... smooth_args = {} if smooth == "fo_sing": @@ -160,7 +165,7 @@ def read_corr(procName, generator, corrFiles, axes, smooth=None): fo_files[0], axes, charge=charge, - zero_nons_bins=zero_nons_bins, + zero_nons_bins=slice(0j, 1j), # set bins qT < 1GeV to 0 **smooth_args, ) else: @@ -212,8 +217,8 @@ def main(): } if args.proc == "z": - eventgen_procs = ["ZmumuPostVFP"] - filesByProc = {"ZmumuPostVFP": args.corrFiles} + eventgen_procs = ["Zmumu"] # , "DYJetsToMuMuMass10to50"] + filesByProc = {"Zmumu": args.corrFiles} else: wpfiles = list( filter( @@ -232,32 +237,33 @@ def main(): if len(wpfiles) != len(wmfiles): if args.duplicateWminus: logger.warning("Using W- correction as a proxy for W+!") - filesByProc = { - "WplusmunuPostVFP": wmfiles, - "WminusmunuPostVFP": wmfiles, - } + filesByProc = {"Wplusmunu": wmfiles, "Wminusmunu": wmfiles} else: raise ValueError( f"Expected equal number of files for W+ and W-, found {len(wpfiles)} (Wp) and {len(wmfiles)} (Wm)" ) else: - filesByProc = {"WplusmunuPostVFP": wpfiles, "WminusmunuPostVFP": wmfiles} + filesByProc = {"Wplusmunu": wpfiles, "Wminusmunu": wmfiles} if args.proc == "w": - eventgen_procs = ["WplusmunuPostVFP", "WminusmunuPostVFP"] + eventgen_procs = ["Wplusmunu", "Wminusmunu"] elif args.proc == "bsm": eventgen_procs = [ - "WtoNMu_MN-5-V-0p001", - "WtoNMu_MN-10-V-0p001", - "WtoNMu_MN-30-V-0p001", - "WtoNMu_MN-50-V-0p001", + "WtoNMuMN5V0p001", + "WtoNMuMN10V0p001", + "WtoNMuMN30V0p001", + "WtoNMuMN50V0p001", ] minnloh = hh.sumHists( [ input_tools.read_mu_hist_combine_tau( - args.minnloFile, proc, args.minnloh, combine_with_tau=args.proc != "bsm" + args.minnloFile, + proc, + args.minnloh, + eras=args.eras, + combine_with_tau=args.proc != "bsm", ) for proc in eventgen_procs ] @@ -354,8 +360,8 @@ def main(): generator = args.generator if args.postfix: - generator += args.postfix - outfile = f"{args.outpath}/{generator}Corr{args.proc.upper()}.pkl.lz4" + generator += f"_{args.postfix}" + outfile = f"{args.outpath}/{generator}_Corr{args.proc.upper()}.pkl.lz4" meta_dict = {} for f in [args.minnloFile] + args.corrFiles: @@ -409,6 +415,10 @@ def main(): "absY": "$|y^{{{final_state}}}|$", } + outdir = output_tools.make_plot_dir( + *args.plotdir.rsplit("/", 1), eoscp=args.eoscp + ) + for charge in minnloh.axes["charge"].centers: if args.duplicateWminus and charge == 1: continue @@ -420,48 +430,76 @@ def main(): proc = "Wp" if charge.imag > 0 else "Wm" final_state = "\\ell^{+}\\nu" if charge.imag > 0 else "\\ell^{-}\\nu" - fig, ax = plt.subplots(figsize=(6, 6)) - corrh[{"vars": 0, "charge": charge, "Q": 0}].plot(ax=ax, cmin=0.5, cmax=1.5) - - outdir = output_tools.make_plot_dir( - *args.plotdir.rsplit("/", 1), eoscp=args.eoscp - ) - plot_name = f"corr2D_{generator}_MiNNLO_{proc}" - plot_tools.save_pdf_and_png(outdir, plot_name) - output_tools.write_index_and_log( - outdir, plot_name, args=args, analysis_meta_info=meta_dict - ) + for imass, mass_edges in enumerate(minnloh.axes["Q"]): + if len(minnloh.axes["Q"].centers) > 1: + suffix = f"_{int(mass_edges[0])}to{int(mass_edges[1])}GeV" + extra_text_base = [ + f"{int(mass_edges[0])} < Q < {int(mass_edges[1])} GeV" + ] + else: + suffix = "" + extra_text_base = [] + + for ivar, var in enumerate(corrh.axes["vars"]): + + if len(corrh.axes["vars"]) > 1: + suffix += f"_{var}" + extra_text = [*extra_text_base, var] + else: + extra_text = extra_text_base + + if "vars" in minnloh.axes.name: + iminnloh = minnloh[{"Q": imass, "charge": charge, "vars": ivar}] + else: + iminnloh = minnloh[{"Q": imass, "charge": charge}] + + inumh = numh[{"Q": imass, "charge": charge, "vars": ivar}] + icorrh = corrh[{"Q": imass, "charge": charge, "vars": ivar}] + + fig, ax = plt.subplots(figsize=(6, 6)) + icorrh.plot(ax=ax, cmin=0.5, cmax=1.5) + + plot_name = f"corr2D_{generator}_MiNNLO_{proc}{suffix}" + plot_tools.save_pdf_and_png(outdir, plot_name) + output_tools.write_index_and_log( + outdir, plot_name, args=args, analysis_meta_info=meta_dict + ) - for varm, varn in zip(minnloh.axes.name[:-1], numh.axes.name[:-2]): - fig = plot_tools.makePlotWithRatioToRef( - [ - minnloh[{"charge": charge}].project(varm), - numh[{"vars": 0, "charge": charge}].project(varn), - ], - [ - "MiNNLO", - generator, - ], - colors=["orange", "mediumpurple"], - linestyles=[ - "solid", - "dashed", - ], - xlabel=xlabel[varm].format(final_state=final_state), - ylabel="Events/bin", - rlabel="x/MiNNLO", - legtext_size=24, - rrange=[0.8, 1.2], - yscale=1.1, - xlim=None, - binwnorm=1.0, - baseline=True, - ) - plot_name = f"{varm}_{generator}_MiNNLO_{proc}" - plot_tools.save_pdf_and_png(outdir, plot_name) - output_tools.write_index_and_log( - outdir, plot_name, args=args, analysis_meta_info=meta_dict - ) + for varm, varn in zip(iminnloh.axes.name, inumh.axes.name): + fig = plot_tools.makePlotWithRatioToRef( + [ + iminnloh.project(varm), + inumh.project(varn), + ], + [ + "MiNNLO", + generator.replace("_", " ").replace("FineBins ", ""), + ], + colors=["orange", "mediumpurple"], + linestyles=[ + "solid", + "dashed", + ], + xlabel=xlabel[varm].format(final_state=final_state), + ylabel="Events/bin", + rlabel="x/MiNNLO", + legtext_size=24, + nlegcols=1, + rrange=[0.8, 1.2], + yscale=1.1, + xlim=None, + binwnorm=1.0, + baseline=True, + extra_text=extra_text, + extra_text_loc=(0.5, 0.7) if varm == "qT" else (0.1, 0.2), + ) + plot_name = f"{varm}_{generator}_MiNNLO_{proc}{suffix}" + plot_tools.save_pdf_and_png(outdir, plot_name) + output_tools.write_index_and_log( + outdir, plot_name, args=args, analysis_meta_info=meta_dict + ) + + break # only plot first variation if output_tools.is_eosuser_path(args.plotdir) and args.eoscp: output_tools.copy_to_eos(outdir, args.plotdir) diff --git a/scripts/corrections/make_theory_corr_by_helicity.py b/scripts/corrections/make_theory_corr_by_helicity.py index 916ae19fc..7f4f91bfe 100644 --- a/scripts/corrections/make_theory_corr_by_helicity.py +++ b/scripts/corrections/make_theory_corr_by_helicity.py @@ -96,11 +96,15 @@ raise ValueError("Must specify at least one correction file") processes = ( - ["ZmumuPostVFP"] if args.proc == "z" else ["WplusmunuPostVFP", "WminusmunuPostVFP"] + ["Zmumu_2016PostVFP"] + if args.proc == "z" + else ["Wplusmunu_2016PostVFP", "Wminusmunu_2016PostVFP"] ) binning = { - "qT": common.ptV_corr_binning, + "qT": ( + common.ptZgen_binning_corr if args.proc == "z" else common.ptWgen_binning_corr + ), "absY": [0 + 0.5 * i for i in range(9)] + [5.0], } binning["absy"] = binning["absY"] diff --git a/scripts/corrections/make_theory_corr_ew.py b/scripts/corrections/make_theory_corr_ew.py index 0ad499658..fd64a68d2 100644 --- a/scripts/corrections/make_theory_corr_ew.py +++ b/scripts/corrections/make_theory_corr_ew.py @@ -63,9 +63,9 @@ charge_dict = {"Zmumu": 0, "Wplusmunu": 1, "Wminusmunu": 0} procs_dict = { - "Zmumu": "ZmumuPostVFP", - "Wminusmunu": "WminusmunuPostVFP", - "Wplusmunu": "WplusmunuPostVFP", + "Zmumu": "Zmumu_2016PostVFP", + "Wminusmunu": "Wminusmunu_2016PostVFP", + "Wplusmunu": "Wplusmunu_2016PostVFP", } project = args.project diff --git a/scripts/corrections/make_theory_corr_ew_powhegFO.py b/scripts/corrections/make_theory_corr_ew_powhegFO.py index 38d690593..4abb49bf9 100644 --- a/scripts/corrections/make_theory_corr_ew_powhegFO.py +++ b/scripts/corrections/make_theory_corr_ew_powhegFO.py @@ -81,7 +81,7 @@ # integrate over pt and phistar h = h[{"ptVlhe": hist.sum, "phiStarlhe": hist.sum}] -h = hh.rebinHist(h, "absYVlhe", common.absYV_binning) +h = hh.rebinHist(h, "absYVlhe", common.absYZgen_binning_corr) hcorr = hist.Hist(*h.axes) # safe default diff --git a/scripts/corrections/plot_theory_corr.py b/scripts/corrections/plot_theory_corr.py index 630c65985..bc263c3a7 100644 --- a/scripts/corrections/plot_theory_corr.py +++ b/scripts/corrections/plot_theory_corr.py @@ -31,7 +31,7 @@ parser.add_argument( "--datasets", nargs="*", - default=["ZmumuPostVFP"], + default=["Zmumu_2016PostVFP"], help="Apply corrections from indicated generator. First will be nominal correction.", ) parser.add_argument( diff --git a/scripts/histmakers/histmaker_template.py b/scripts/histmakers/histmaker_template.py index 30ba1c62a..706cc13c4 100644 --- a/scripts/histmakers/histmaker_template.py +++ b/scripts/histmakers/histmaker_template.py @@ -22,7 +22,6 @@ filt=args.filterProcs, excl=args.excludeProcs, base_path=args.dataPath, - mode=analysis_label, era=args.era, ) diff --git a/scripts/histmakers/mw_lowPU.py b/scripts/histmakers/mw_lowPU.py index 551bf3533..2d7004266 100644 --- a/scripts/histmakers/mw_lowPU.py +++ b/scripts/histmakers/mw_lowPU.py @@ -44,22 +44,20 @@ ################################### flavor = args.flavor # mu, e -if flavor == "mu": - sigProcs = ["Wminusmunu", "Wplusmunu"] - base_group = "Wmunu" -else: - sigProcs = ["Wminusenu", "Wplusenu"] - base_group = "Wenu" +base_group = f"W{flavor}nu" datasets = getDatasets( maxFiles=args.maxFiles, filt=args.filterProcs, excl=list( - set(args.excludeProcs + ["singlemuon"] if flavor == "e" else ["singleelectron"]) + set( + args.excludeProcs + [f"HighEGJet{args.era}"] + if flavor == "e" + else [f"SingleMuon{args.era}"] + ) ), base_path=args.dataPath, extended="msht20an3lo" not in args.pdfs, - mode=analysis_label, era=args.era, nanoVersion="v12", ) @@ -69,6 +67,7 @@ for d in datasets: logger.info(f"Dataset {d.name}") + mtw_min = 40 # for Wmass (roughly half the boson mass) # lepton cuts @@ -140,7 +139,9 @@ "transverseMass", ] ## was transverseMass -theory_helpers_procs = theory_corrections.make_theory_helpers(args) +theory_helpers_procs = theory_corrections.make_theory_helpers( + args.pdfs, args.theoryCorr +) axis_ptVgen = theory_helpers_procs["W"]["qcdScale"].hist.axes["ptVgen"] axis_chargeVgen = theory_helpers_procs["W"]["qcdScale"].hist.axes["chargeVgen"] @@ -175,7 +176,7 @@ # extra axes which can be used to label tensor_axes theory_corrs = [*args.theoryCorr, *args.ewTheoryCorr] corr_helpers = theory_corrections.load_corr_helpers( - [d.name for d in datasets if d.name in common.vprocs_lowpu], theory_corrs + [d.name for d in datasets if d.name in common.vprocs], theory_corrs ) # recoil initialization @@ -191,7 +192,7 @@ def build_graph(df, dataset): isQCDMC = dataset.group == "QCD" theory_helpers = None - if dataset.name in common.vprocs_lowpu: + if dataset.name in common.vprocs: theory_helpers = theory_helpers_procs[dataset.name[0]] if dataset.is_data: @@ -204,7 +205,7 @@ def build_graph(df, dataset): axes = nominal_axes cols = nominal_cols - if args.unfolding and dataset.name in sigProcs: + if args.unfolding and dataset.group == base_group: df = unfolding_tools.define_gen_level( df, dataset.name, args.unfoldingLevels, mode=analysis_label ) @@ -442,7 +443,7 @@ def build_graph(df, dataset): df, results, dataset, - common.vprocs_lowpu, + common.vprocs, leps_uncorr, leps_corr, cols_fakerate=columns_fakerate, @@ -517,7 +518,7 @@ def build_graph(df, dataset): ) ) - if dataset.name in common.vprocs_lowpu: + if dataset.name in common.vprocs: df = syst_tools.add_theory_hists( results, df, @@ -640,7 +641,7 @@ def build_graph(df, dataset): if not args.noRecoil and args.recoilUnc: df = recoilHelper.add_recoil_unc_W(df, results, dataset, c, a, n) - if args.unfolding and args.poiAsNoi and dataset.name in sigProcs: + if args.unfolding and args.poiAsNoi and dataset.group == base_group: for level in args.unfoldingLevels: noiAsPoiHistName = Datagroups.histName( "nominal", syst=f"{level}_yieldsUnfolding" @@ -658,7 +659,7 @@ def build_graph(df, dataset): ) ) - if dataset.name in sigProcs: + if dataset.group == base_group: # dummy lepton momentum scale netabins = 1 nweights = 21 diff --git a/scripts/histmakers/mw_with_mu_eta_pt.py b/scripts/histmakers/mw_with_mu_eta_pt.py index 78b6ace16..e1593d610 100644 --- a/scripts/histmakers/mw_with_mu_eta_pt.py +++ b/scripts/histmakers/mw_with_mu_eta_pt.py @@ -429,8 +429,8 @@ unfolder_z = unfolding_tools.UnfolderZ( reco_axes_edges={ - "ptll": common.get_dilepton_ptV_binning(), - "yll": common.yll_10quantiles_binning, + "ptll": common.ptZ_binning, + "yll": common.yll_20quantiles_binning, }, unfolding_axes_names=["ptVGen", "absYVGen", "helicitySig"], unfolding_levels=args.unfoldingLevels, @@ -442,7 +442,9 @@ if args.fitresult: unfolding_corr_helper = unfolding_tools.reweight_to_fitresult(args.fitresult) -theory_helpers_procs = theory_corrections.make_theory_helpers(args, procs=["Z", "W"]) +theory_helpers_procs = theory_corrections.make_theory_helpers( + args.pdfs, args.theoryCorr, procs=["Z", "W"] +) if args.theoryAgnostic: theoryAgnostic_axes, theoryAgnostic_cols = differential.get_theoryAgnostic_axes( @@ -679,15 +681,14 @@ def build_graph(df, dataset): logger.info(f"build graph for dataset: {dataset.name}") results = [] - isW = dataset.name in common.wprocs + isW = dataset.group in ["Wmunu", "Wtaunu"] isBSM = dataset.name.startswith("WtoNMu") - isWmunu = isBSM or dataset.name in [ - "WplusmunuPostVFP", - "WminusmunuPostVFP", + isWmunu = isBSM or dataset.group in ["Wmunu"] + isZ = dataset.group in [ + "Zmumu", + "Ztautau", ] - - isZ = dataset.name in common.zprocs - isZveto = isZ or dataset.name in ["DYJetsToMuMuMass10to50PostVFP"] + isZveto = isZ or dataset.group in ["DYlowMass"] isWorZ = isW or isZ isTop = dataset.group == "Top" isQCDMC = dataset.group == "QCD" @@ -886,8 +887,8 @@ def build_graph(df, dataset): cols = [*nominal_cols, *unfolding_cols[level]] break - elif dataset.name == "ZmumuPostVFP": - if args.unfolding and dataset.name == "ZmumuPostVFP": + elif dataset.name == "Zmumu_2016PostVFP": + if args.unfolding and dataset.name == "Zmumu_2016PostVFP": df = unfolder_z.add_gen_histograms( args, df, results, dataset, corr_helpers, theory_helpers ) @@ -1827,7 +1828,7 @@ def build_graph(df, dataset): ) ) - elif dataset.name == "ZmumuPostVFP": + elif dataset.name == "Zmumu_2016PostVFP": unfolder_z.add_poi_as_noi_histograms( df, results, @@ -1844,13 +1845,13 @@ def build_graph(df, dataset): "nominal_weight", ] # assume to have same coeffs for plus and minus (no reason for it not to be the case) - if dataset.name in ["WplusmunuPostVFP", "WplustaunuPostVFP"]: + if dataset.name in ["Wplusmunu_2016PostVFP", "Wplustaunu_2016PostVFP"]: helpers_class = muRmuFPolVar_helpers_plus process_name = "W" - elif dataset.name in ["WminusmunuPostVFP", "WminustaunuPostVFP"]: + elif dataset.name in ["Wminusmunu_2016PostVFP", "Wminustaunu_2016PostVFP"]: helpers_class = muRmuFPolVar_helpers_minus process_name = "W" - elif dataset.name in ["ZmumuPostVFP", "ZtautauPostVFP"]: + elif dataset.name in ["Zmumu_2016PostVFP", "Ztautau_2016PostVFP"]: helpers_class = muRmuFPolVar_helpers_Z process_name = "Z" else: diff --git a/scripts/histmakers/mw_with_mu_eta_pt_VETOEFFI.py b/scripts/histmakers/mw_with_mu_eta_pt_VETOEFFI.py index 78d1d1787..90d69c2b2 100644 --- a/scripts/histmakers/mw_with_mu_eta_pt_VETOEFFI.py +++ b/scripts/histmakers/mw_with_mu_eta_pt_VETOEFFI.py @@ -147,7 +147,7 @@ def build_graph(df, dataset): logger.info(f"build graph for dataset: {dataset.name}") results = [] isW = dataset.name in common.wprocs - isWmunu = dataset.name in ["WplusmunuPostVFP", "WminusmunuPostVFP"] + isWmunu = dataset.name in ["Wplusmunu_2016PostVFP", "Wminusmunu_2016PostVFP"] isZ = dataset.name in common.zprocs isWorZ = isW or isZ isTop = dataset.group == "Top" diff --git a/scripts/histmakers/mz_dilepton.py b/scripts/histmakers/mz_dilepton.py index 36884d92e..0a9ce5a72 100644 --- a/scripts/histmakers/mz_dilepton.py +++ b/scripts/histmakers/mz_dilepton.py @@ -115,7 +115,9 @@ parser = parsing.set_parser_default( parser, "aggregateGroups", ["Diboson", "Top", "Wtaunu", "Wmunu"] ) -parser = parsing.set_parser_default(parser, "excludeProcs", ["QCD"]) +parser = parsing.set_parser_default( + parser, "excludeProcs", ["QCD", "WtoNMu", "DYlowMass"] +) parser = parsing.set_parser_default( parser, "pt", common.get_default_ptbins(analysis_label) ) @@ -155,17 +157,17 @@ axis_ptV_thag = theoryAgnostic_axes[0] dilepton_ptV_binning = axis_ptV_thag.edges else: - dilepton_ptV_binning = common.get_dilepton_ptV_binning(args.finePtBinning) + dilepton_ptV_binning = common.ptZ_binning if not args.finePtBinning else range(200) if "yll" in args.axes: - # use 10 quantiles in case "yll" is used as nominal axis - edges_yll = common.yll_10quantiles_binning + # use 20 quantiles in case "yll" is used as nominal axis + edges_yll = common.yll_20quantiles_binning edges_absYll = edges_yll[len(edges_yll) // 2 :] axis_yll = hist.axis.Variable(edges_yll, name="yll") axis_absYll = hist.axis.Variable(edges_absYll, name="absYll", underflow=False) else: - axis_yll = hist.axis.Regular(20, -2.5, 2.5, name="yll") - axis_absYll = hist.axis.Regular(10, 0.0, 2.5, name="absYll", underflow=False) + axis_yll = hist.axis.Regular(100, -2.5, 2.5, name="yll") + axis_absYll = hist.axis.Regular(50, 0.0, 2.5, name="absYll", underflow=False) # available axes for dilepton validation plots all_axes = { @@ -312,13 +314,13 @@ overflow=False, ) - quantile_file = f"{common.data_dir}/angularCoefficients/mz_dilepton_scetlib_dyturboCorr_maxFiles_m1_alphaSunfoldingBinning_csQuantiles.hdf5" + quantile_file = f"{common.data_dir}/angularCoefficients/mz_dilepton_scetlib_dyturbo_CT18Z_N3p0LL_N2LO_Corr_maxFiles_m1_csQuantiles.hdf5" quantile_helper_csVars = make_quantile_helper( quantile_file, ["cosThetaStarll", "phiStarll"], ["ptll", "absYll"], name="nominal_csQuantiles", - processes=["ZmumuPostVFP"], + processes=["Zmumu_2016PostVFP"], n_quantiles=[n_quantiles], ) @@ -361,7 +363,9 @@ for p, grp in (("W", common.wprocs), ("Z", common.zprocs)) if any(d.name in grp for d in datasets) ] -theory_helpers_procs = theory_corrections.make_theory_helpers(args, procs=procs) +theory_helpers_procs = theory_corrections.make_theory_helpers( + args.pdfs, args.theoryCorr, procs=procs +) # extra axes which can be used to label tensor_axes if args.binnedScaleFactors: @@ -554,7 +558,7 @@ def build_graph(df, dataset): ] cols = [*cols, "run"] - if args.unfolding and dataset.name == "ZmumuPostVFP": + if args.unfolding and dataset.name == "Zmumu_2016PostVFP": df = unfolder_z.add_gen_histograms( args, df, results, dataset, corr_helpers, theory_helpers=theory_helpers ) @@ -993,7 +997,7 @@ def build_graph(df, dataset): ) results.append(hNValidPixelHitsNonTrig) - if args.unfolding and args.poiAsNoi and dataset.name == "ZmumuPostVFP": + if args.unfolding and args.poiAsNoi and dataset.name == "Zmumu_2016PostVFP": unfolder_z.add_poi_as_noi_histograms( df, results, @@ -1015,9 +1019,8 @@ def build_graph(df, dataset): if not args.noAuxiliaryHistograms: for obs in [ - "ptll", + ["ptll", "yll"], "mll", - "yll", "cosThetaStarll", "phiStarll", "etaPlus", @@ -1025,15 +1028,18 @@ def build_graph(df, dataset): "ptPlus", "ptMinus", ]: + if isinstance(obs, str): + obs = [obs] + obs_name = f"nominal_{'_'.join(obs)}" + obs_axes = [all_axes[o] for o in obs] + if dataset.is_data: - results.append(df.HistoBoost(f"nominal_{obs}", [all_axes[obs]], [obs])) + results.append(df.HistoBoost(obs_name, obs_axes, obs)) else: results.append( - df.HistoBoost( - f"nominal_{obs}", [all_axes[obs]], [obs, "nominal_weight"] - ) + df.HistoBoost(obs_name, obs_axes, [*obs, "nominal_weight"]) ) - if isWorZ: + if isWorZ and not args.onlyMainHistograms: df = syst_tools.add_theory_hists( results, df, @@ -1041,9 +1047,9 @@ def build_graph(df, dataset): dataset.name, corr_helpers, theory_helpers, - [all_axes[obs]], - [obs], - base_name=f"nominal_{obs}", + obs_axes, + obs, + base_name=obs_name, for_wmass=False, ) @@ -1055,18 +1061,19 @@ def build_graph(df, dataset): f"nominal_{obs}", [all_axes[obs]], [obs, "nominal_weight"] ) ) - df = syst_tools.add_theory_hists( - results, - df, - args, - dataset.name, - corr_helpers, - theory_helpers, - [all_axes[obs]], - [obs], - base_name=f"nominal_{obs}", - for_wmass=False, - ) + if not args.onlyMainHistograms: + df = syst_tools.add_theory_hists( + results, + df, + args, + dataset.name, + corr_helpers, + theory_helpers, + [all_axes[obs]], + [obs], + base_name=f"nominal_{obs}", + for_wmass=False, + ) # test plots if args.validationHists: diff --git a/scripts/histmakers/mz_lowPU.py b/scripts/histmakers/mz_lowPU.py index 09a204644..adec375d0 100644 --- a/scripts/histmakers/mz_lowPU.py +++ b/scripts/histmakers/mz_lowPU.py @@ -19,8 +19,6 @@ ) parser = parsing.set_parser_default(parser, "era", "2017H") -analysis_label = Datagroups.analysisLabel(os.path.basename(__file__)) - args = parser.parse_args() logger = logging.setup_logger(__file__, args.verbose, args.noColorLogger) @@ -45,8 +43,7 @@ ################################### flavor = args.flavor # mumu, ee -sigProcs = ["Zmumu"] if flavor == "mumu" else ["Zee"] -base_group = sigProcs[0] +base_group = f"Z{flavor}" # dilepton invariant mass cuts mass_min = 60 @@ -61,17 +58,17 @@ filt=args.filterProcs, excl=list( set( - args.excludeProcs + ["singlemuon"] if flavor == "ee" else ["singleelectron"] + args.excludeProcs + [f"HighEGJet{args.era}"] + if flavor == "ee" + else [f"SingleMuon{args.era}"] ) ), base_path=args.dataPath, extended="msht20an3lo" not in args.pdfs, - mode=analysis_label, era=args.era, nanoVersion="v12", ) - for d in datasets: logger.info(f"Dataset {d.name}") @@ -108,7 +105,9 @@ axes_mt = [axis_mt] cols_mt = ["transverseMass"] -theory_helpers_procs = theory_corrections.make_theory_helpers(args) +theory_helpers_procs = theory_corrections.make_theory_helpers( + args.pdfs, args.theoryCorr +) axis_ptVgen = theory_helpers_procs["Z"]["qcdScale"].hist.axes["ptVgen"] axis_chargeVgen = theory_helpers_procs["Z"]["qcdScale"].hist.axes["chargeVgen"] @@ -139,7 +138,7 @@ theory_corrs = [*args.theoryCorr, *args.ewTheoryCorr] corr_helpers = theory_corrections.load_corr_helpers( - [d.name for d in datasets if d.name in common.vprocs_lowpu], theory_corrs + [d.name for d in datasets if d.name in common.vprocs], theory_corrs ) # recoil initialization @@ -154,11 +153,8 @@ def build_graph(df, dataset): results = [] - isW = dataset.name in common.wprocs_lowpu - isZ = dataset.name in common.zprocs_lowpu - theory_helpers = None - if dataset.name in common.vprocs_lowpu: + if dataset.name in common.vprocs: theory_helpers = theory_helpers_procs[dataset.name[0]] if dataset.is_data: @@ -172,7 +168,7 @@ def build_graph(df, dataset): axes = nominal_axes cols = nominal_cols - if args.unfolding and dataset.name in sigProcs: + if args.unfolding and dataset.group == base_group: df = unfolder_z.add_gen_histograms( args, df, results, dataset, corr_helpers, theory_helpers ) @@ -538,7 +534,7 @@ def build_graph(df, dataset): ) ) - if dataset.name in common.vprocs_lowpu: + if dataset.name in common.vprocs: df = syst_tools.add_theory_hists( results, df, @@ -662,7 +658,7 @@ def build_graph(df, dataset): if not args.noRecoil and args.recoilUnc: df = recoilHelper.add_recoil_unc_Z(df, results, dataset, c, a, n) - if args.unfolding and args.poiAsNoi and dataset.name in sigProcs: + if args.unfolding and args.poiAsNoi and dataset.group == base_group: unfolder_z.add_poi_as_noi_histograms( df, results, diff --git a/scripts/histmakers/mz_wlike_with_mu_eta_pt.py b/scripts/histmakers/mz_wlike_with_mu_eta_pt.py index 7f761f094..64dc42f9a 100644 --- a/scripts/histmakers/mz_wlike_with_mu_eta_pt.py +++ b/scripts/histmakers/mz_wlike_with_mu_eta_pt.py @@ -292,7 +292,7 @@ ) theory_helpers_procs = theory_corrections.make_theory_helpers( - args, corrs=["qcdScale", "alphaS", "pdf"] + args.pdfs, args.theoryCorr, corrs=["qcdScale", "alphaS", "pdf"] ) # extra axes which can be used to label tensor_axes @@ -1310,7 +1310,10 @@ def build_graph(df, dataset): "nominal_weight", ] # assume to have same coeffs for plus and minus (no reason for it not to be the case) - if dataset.name == "ZmumuPostVFP" or dataset.name == "ZtautauPostVFP": + if ( + dataset.name == "Zmumu_2016PostVFP" + or dataset.name == "Ztautau_2016PostVFP" + ): helpers_class = muRmuFPolVar_helpers_Z process_name = "Z" for coeffKey in helpers_class.keys(): @@ -1335,7 +1338,7 @@ def build_graph(df, dataset): storage=hist.storage.Double(), ) ) - if args.unfolding and dataset.name == "ZmumuPostVFP": + if args.unfolding and dataset.name == "Zmumu_2016PostVFP": for level in args.unfoldingLevels: noiAsPoiHistName = Datagroups.histName( "nominal", syst=f"{level}_yieldsUnfolding" diff --git a/scripts/histmakers/w_z_gen_dists.py b/scripts/histmakers/w_z_gen_dists.py index e2ce3f17f..c2d4a2091 100644 --- a/scripts/histmakers/w_z_gen_dists.py +++ b/scripts/histmakers/w_z_gen_dists.py @@ -46,12 +46,6 @@ action="store_true", help="Use unfolding binning to produce the gen results", ) -parser.add_argument( - "--useCorrByHelicityBinning", - action="store_true", - help="Use finer absY binning to produce the gen results." - "Used in particular to produce the smooth PDF corrections.", -) parser.add_argument( "--singleLeptonHists", action="store_true", @@ -98,9 +92,9 @@ help="Add axis to store info if the event has an outgoing charm quark", ) parser.add_argument( - "--finePtVBinning", + "--fineBinning", action="store_true", - help="Use 1 GeV binning for ptVgen (e.g., for theory corrections)", + help="Use 0.5 GeV binning for ptVgen and 0.05 in absYVgen (e.g., for theory corrections)", ) parser.add_argument( "--centralBosonPDFWeight", @@ -108,8 +102,8 @@ help="Apply PDF reweighting using boson parameterized corrections", ) - parser = parsing.set_parser_default(parser, "filterProcs", common.vprocs) +parser = parsing.set_parser_default(parser, "era", "13TeVGen") args = parser.parse_args() if not args.theoryCorrections: @@ -126,10 +120,10 @@ extended="msht20an3lo" not in args.pdfs, nanoVersion="v9", base_path=args.dataPath, - mode=analysis_label, + era=args.era, ) -logger.debug(f"Will process samples {[d.name for d in datasets]}") +logger.info(f"Will process samples {[d.name for d in datasets]}") axis_ygen = hist.axis.Regular(10, -5.0, 5.0, name="y") col_rapidity = "yVgen" if args.signedY else "absYVgen" @@ -162,8 +156,29 @@ overflow=False, ) +# fine mass bins for studies +# axis_massZgen = hist.axis.Regular( +# 120, 0, 120.0, name="massVgen", underflow=True, overflow=True +# ) +# axis_massZgen = hist.axis.Variable( +# [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 13000], +# name="massVgen", +# underflow=False, +# overflow=False, +# ) + +axis_massWgen = hist.axis.Variable([4.0, 13000.0], name="massVgen") +axis_massZgen = hist.axis.Regular(1, 60.0, 120.0, name="massVgen") + theory_corrs = [*args.theoryCorr, *args.ewTheoryCorr] -corr_helpers = theory_corrections.load_corr_helpers(common.vprocs, theory_corrs) +procsWithTheoryCorr = [d.name for d in datasets if d.name in common.vprocs] +if len(procsWithTheoryCorr) and len(theory_corrs): + corr_helpers = theory_corrections.load_corr_helpers( + procsWithTheoryCorr, theory_corrs + ) +else: + corr_helpers = {} + corrs = [] if args.helicity and args.propagatePDFstoHelicity: @@ -171,7 +186,7 @@ if args.centralBosonPDFWeight: corrs.append("pdf_central") theory_helpers_procs = theory_corrections.make_theory_helpers( - args, procs=["Z", "W"], corrs=corrs + args.pdfs, args.theoryCorr, procs=["Z", "W"], corrs=corrs ) @@ -197,34 +212,23 @@ def build_graph(df, dataset): else: theory_helpers = {} - if args.addCharmAxis: - axis_massWgen = hist.axis.Variable( - [4.0, 13000.0], name="massVgen", underflow=True, overflow=False - ) - else: - axis_massWgen = hist.axis.Regular( - 120, 0, 120.0, name="massVgen", underflow=True, overflow=True - ) - - axis_massZgen = hist.axis.Regular(12, 60.0, 120.0, name="massVgen") - theoryAgnostic_axes, _ = differential.get_theoryAgnostic_axes( ptV_flow=True, absYV_flow=True, wlike="Z" in dataset.name ) axis_ptV_thag = theoryAgnostic_axes[0] axis_yV_thag = theoryAgnostic_axes[1] - if args.useUnfoldingBinning: + if args.useUnfoldingBinning and "Z" in dataset.name: unfolding_axes, unfolding_cols, unfolding_selections = ( differential.get_dilepton_axes( ["ptVGen", "absYVGen"], { - "ptll": common.get_dilepton_ptV_binning(fine=False), - "yll": (common.yll_10quantiles_binning), + "ptll": common.ptZ_binning, + "yll": common.yll_20quantiles_binning, }, "prefsr", add_out_of_acceptance_axis=False, - rebin_pt=not args.genPtBinningAsReco, + rebin_pt=None if args.genPtBinningAsReco else unfolding_tools.rebin_pt, ) ) axis_absYVgen = hist.axis.Variable( @@ -237,20 +241,6 @@ def build_graph(df, dataset): name="ptVgen", underflow=False, ) - - axis_massZgen = hist.axis.Regular(1, 60.0, 120.0, name="massVgen") - elif args.useCorrByHelicityBinning: - axis_absYVgen = hist.axis.Variable( - common.absYVgen_binning_corr, - name="absYVgen", - underflow=False, - ) - axis_ptVgen = hist.axis.Variable( - common.ptVgen_binning_corr, - name="ptVgen", - underflow=False, - ) - axis_massZgen = hist.axis.Regular(1, 60.0, 120.0, name="massVgen") elif args.useTheoryAgnosticBinning: axis_absYVgen = hist.axis.Variable( axis_yV_thag.edges, # same axis as theory agnostic norms @@ -263,31 +253,24 @@ def build_graph(df, dataset): underflow=False, ) else: + if args.fineBinning: + edges_ptV = np.append(np.arange(0, 100.5, 0.5), 13000.0) + edges_absYV = np.arange(0, 5.05, 0.05) + else: + edges_ptV = ( + common.ptZgen_binning_corr if isZ else common.ptWgen_binning_corr + ) + edges_absYV = ( + common.absYZgen_binning_corr if isZ else common.absYWgen_binning_corr + ) + axis_absYVgen = hist.axis.Variable( - [ - 0.0, - 0.25, - 0.5, - 0.75, - 1.0, - 1.25, - 1.5, - 1.75, - 2.0, - 2.25, - 2.5, - 2.75, - 3.0, - 3.25, - 3.5, - 4.0, - 5.0, - ], # this is the same binning as hists from theory corrections + edges_absYV, name="absYVgen", underflow=False, ) axis_ptVgen = hist.axis.Variable( - (*common.get_dilepton_ptV_binning(fine=args.finePtVBinning), 13000.0), + edges_ptV, name="ptVgen", underflow=False, ) @@ -311,7 +294,9 @@ def build_graph(df, dataset): df, dataset.name, corr_helpers, args, theory_helpers ) - if isZ: + if isZ or dataset.group in [ + "DYlowMass", + ]: nominal_axes = [ axis_massZgen, axis_rapidity, @@ -896,8 +881,8 @@ def build_graph(df, dataset): and "winhac" not in dataset.name and "LHEScaleWeight" in df.GetColumnNames() and "LHEPdfWeight" in df.GetColumnNames() + and not args.onlyMainHistograms ): - df = syst_tools.add_theory_hists( results, df, @@ -969,29 +954,20 @@ def build_graph(df, dataset): helicity_xsecs_out = {} for dataset in datasets: name = dataset.name - if "nominal_gen_helicity_xsecs_scale" not in resultdict[name]["output"]: - logger.warning( - f"Failed to find nominal_gen_helicity_xsecs_scale hist for proc {name}. Skipping!" - ) - continue for var in ["", "lhe", "hardProcess", "postShower", "postBeamRemnants"]: - if name not in [ - "ZmumuPostVFP", - "Zee_MiNNLO", - "Zmumu_MiNNLO", - "WplusmunuPostVFP", - "WminusmunuPostVFP", - ]: - continue - if var == "": suffix = "" else: suffix = f"_{var}" - helicity_xsecs = resultdict[name]["output"][ - f"nominal_gen_helicity_xsecs_scale{suffix}" - ].get() + histname = f"nominal_gen_helicity_xsecs_scale{suffix}" + if histname not in resultdict[name]["output"].keys(): + logger.warning( + f"Failed to find '{histname}' hist for proc '{name}'. Skipping!" + ) + continue + + helicity_xsecs = resultdict[name]["output"][histname].get() key = f"{name[0]}{suffix}" @@ -1014,17 +990,11 @@ def build_graph(df, dataset): "CT18alphaS002", "NNPDF40alphaS001", ]: - if name not in [ - "ZmumuPostVFP", - "Zee_MiNNLO", - "Zmumu_MiNNLO", - "WplusmunuPostVFP", - "WminusmunuPostVFP", - ]: - continue - histname = f"nominal_gen_helicity_nominal_gen_pdf{var}" if histname not in resultdict[name]["output"].keys(): + logger.warning( + f"Failed to find '{histname}' hist for proc '{name}'. Skipping!" + ) continue helicity_xsecs = resultdict[name]["output"][histname].get() diff --git a/scripts/plotting/makeScetlibComparisons.py b/scripts/plotting/makeScetlibComparisons.py index 818e116ed..8300f400e 100644 --- a/scripts/plotting/makeScetlibComparisons.py +++ b/scripts/plotting/makeScetlibComparisons.py @@ -16,9 +16,9 @@ lookup = { "minnlo": { "dirs": { - "z": ("ZmumuPostVFP", "output"), - "wp": ("WplusmunuPostVFP", "output"), - "wm": ("WminusmunuPostVFP", "output"), + "z": ("Zmumu_2016PostVFP", "output"), + "wp": ("Wplusmunu_2016PostVFP", "output"), + "wm": ("Wminusmunu_2016PostVFP", "output"), }, "ptV": { "hist": "nominal_gen", diff --git a/scripts/plotting/make_postfit_vgen.py b/scripts/plotting/make_postfit_vgen.py index f3c4a274a..ef6da6a80 100644 --- a/scripts/plotting/make_postfit_vgen.py +++ b/scripts/plotting/make_postfit_vgen.py @@ -94,7 +94,11 @@ def load_hist(filename, fittype="postfit", helicity=False): unfolded_data = pickle.load(open(args.unfolded, "rb")) if args.unfolded else None -procs = ["WplusmunuPostVFP", "WminusmunuPostVFP"] if args.w else ["ZmumuPostVFP"] +procs = ( + ["Wplusmunu_2016PostVFP", "Wminusmunu_2016PostVFP"] + if args.w + else ["Zmumu_2016PostVFP"] +) hists_nom = [] hists_err = [] diff --git a/scripts/rabbit/feedRabbitTheory.py b/scripts/rabbit/feedRabbitTheory.py index 166bedaa5..8cfa9bf15 100644 --- a/scripts/rabbit/feedRabbitTheory.py +++ b/scripts/rabbit/feedRabbitTheory.py @@ -673,16 +673,16 @@ def apply_coarse_correction(fine_hist, coarse_corr, check_align=True): lumi = 16800 inputs = input_tools.load_results_h5py(h5file) - weight_sum = inputs["WplusmunuPostVFP"]["weight_sum"] - xsec = inputs["WplusmunuPostVFP"]["dataset"]["xsec"] - h_Wp_lep_fiducial = inputs["WplusmunuPostVFP"]["output"][ + weight_sum = inputs["Wplusmunu_2016PostVFP"]["weight_sum"] + xsec = inputs["Wplusmunu_2016PostVFP"]["dataset"]["xsec"] + h_Wp_lep_fiducial = inputs["Wplusmunu_2016PostVFP"]["output"][ "nominal_gen_prefsrlep" ].get() h_Wp_lep_fiducial *= xsec * lumi / weight_sum - weight_sum = inputs["WminusmunuPostVFP"]["weight_sum"] - xsec = inputs["WminusmunuPostVFP"]["dataset"]["xsec"] - h_Wm_lep_fiducial = inputs["WminusmunuPostVFP"]["output"][ + weight_sum = inputs["Wminusmunu_2016PostVFP"]["weight_sum"] + xsec = inputs["Wminusmunu_2016PostVFP"]["dataset"]["xsec"] + h_Wm_lep_fiducial = inputs["Wminusmunu_2016PostVFP"]["output"][ "nominal_gen_prefsrlep" ].get() h_Wm_lep_fiducial *= xsec * lumi / weight_sum @@ -784,17 +784,17 @@ def apply_coarse_correction(fine_hist, coarse_corr, check_align=True): ) as h5file: results = input_tools.load_results_h5py(h5file) lumi = 16800 - h_Wp = results["WplusmunuPostVFP"]["output"][ + h_Wp = results["Wplusmunu_2016PostVFP"]["output"][ "prefsr_pdfAlphaSByHelicity" ].get() - weight_sum = results["WplusmunuPostVFP"]["weight_sum"] - xsec = results["WplusmunuPostVFP"]["dataset"]["xsec"] + weight_sum = results["Wplusmunu_2016PostVFP"]["weight_sum"] + xsec = results["Wplusmunu_2016PostVFP"]["dataset"]["xsec"] h_Wp *= xsec * lumi / weight_sum - h_Wm = results["WminusmunuPostVFP"]["output"][ + h_Wm = results["Wminusmunu_2016PostVFP"]["output"][ "prefsr_pdfAlphaSByHelicity" ].get() - weight_sum = results["WminusmunuPostVFP"]["weight_sum"] - xsec = results["WminusmunuPostVFP"]["dataset"]["xsec"] + weight_sum = results["Wminusmunu_2016PostVFP"]["weight_sum"] + xsec = results["Wminusmunu_2016PostVFP"]["dataset"]["xsec"] h_Wm *= xsec * lumi / weight_sum h_W = hh.addHists(h_Wp, h_Wm) h_W = h_W.project("absEtaGen", "ptGen", "qGen", "vars") @@ -825,7 +825,7 @@ def apply_coarse_correction(fine_hist, coarse_corr, check_align=True): args.predAiFile.replace("w_z_helicity_xsecs", "w_z_gen_dists"), "r" ) as ff: inputs = input_tools.load_results_h5py(ff) - alpha_vars_hels = inputs["ZmumuPostVFP"]["output"][ + alpha_vars_hels = inputs["Zmumu_2016PostVFP"]["output"][ "nominal_gen_helicity_nominal_gen_pdfCT18ZalphaS002" ].get() writer.add_systematic( @@ -861,8 +861,8 @@ def apply_coarse_correction(fine_hist, coarse_corr, check_align=True): "r", ) as ff: inputs = input_tools.load_results_h5py(ff) - mass_vars_Wp = inputs["WplusmunuPostVFP"]["prefsr_massWeightW"].get() - mass_vars_Wm = inputs["WminusmunuPostVFP"]["prefsr_massWeightW"].get() + mass_vars_Wp = inputs["Wplusmunu_2016PostVFP"]["prefsr_massWeightW"].get() + mass_vars_Wm = inputs["Wminusmunu_2016PostVFP"]["prefsr_massWeightW"].get() mass_vars_W = hh.addHists(mass_vars_Wp, mass_vars_Wm) mass_vars_W = mass_vars_W.project( @@ -925,15 +925,17 @@ def apply_coarse_correction(fine_hist, coarse_corr, check_align=True): results = input_tools.load_results_h5py(h5file) lumi = 16800 - h_Wp = results["WplusmunuPostVFP"]["output"]["prefsr_scetlib_dyturboCorr"].get() - weight_sum = results["WplusmunuPostVFP"]["weight_sum"] - xsec = results["WplusmunuPostVFP"]["dataset"]["xsec"] + h_Wp = results["Wplusmunu_2016PostVFP"]["output"][ + "prefsr_scetlib_dyturboCorr" + ].get() + weight_sum = results["Wplusmunu_2016PostVFP"]["weight_sum"] + xsec = results["Wplusmunu_2016PostVFP"]["dataset"]["xsec"] h_Wp *= xsec * lumi / weight_sum - h_Wm = results["WminusmunuPostVFP"]["output"][ + h_Wm = results["Wminusmunu_2016PostVFP"]["output"][ "prefsr_scetlib_dyturboCorr" ].get() - weight_sum = results["WminusmunuPostVFP"]["weight_sum"] - xsec = results["WminusmunuPostVFP"]["dataset"]["xsec"] + weight_sum = results["Wminusmunu_2016PostVFP"]["weight_sum"] + xsec = results["Wminusmunu_2016PostVFP"]["dataset"]["xsec"] h_Wm *= xsec * lumi / weight_sum h_W = hh.addHists(h_Wp, h_Wm) generator_vars["W"] = {} @@ -941,26 +943,34 @@ def apply_coarse_correction(fine_hist, coarse_corr, check_align=True): "absEtaGen", "ptGen", "qGen", "vars" ) - h_Wp = results["WplusmunuPostVFP"]["output"]["prefsr_pdfMSHT20mcrange"].get() - weight_sum = results["WplusmunuPostVFP"]["weight_sum"] - xsec = results["WplusmunuPostVFP"]["dataset"]["xsec"] + h_Wp = results["Wplusmunu_2016PostVFP"]["output"][ + "prefsr_pdfMSHT20mcrange" + ].get() + weight_sum = results["Wplusmunu_2016PostVFP"]["weight_sum"] + xsec = results["Wplusmunu_2016PostVFP"]["dataset"]["xsec"] h_Wp *= xsec * lumi / weight_sum - h_Wm = results["WminusmunuPostVFP"]["output"]["prefsr_pdfMSHT20mcrange"].get() - weight_sum = results["WminusmunuPostVFP"]["weight_sum"] - xsec = results["WminusmunuPostVFP"]["dataset"]["xsec"] + h_Wm = results["Wminusmunu_2016PostVFP"]["output"][ + "prefsr_pdfMSHT20mcrange" + ].get() + weight_sum = results["Wminusmunu_2016PostVFP"]["weight_sum"] + xsec = results["Wminusmunu_2016PostVFP"]["dataset"]["xsec"] h_Wm *= xsec * lumi / weight_sum h_W = hh.addHists(h_Wp, h_Wm) generator_vars["W"][f"{args.predGenerator}MSHT20mcrange"] = h_W.project( "absEtaGen", "ptGen", "qGen", "pdfVar" ) - h_Wp = results["WplusmunuPostVFP"]["output"]["prefsr_pdfMSHT20mbrange"].get() - weight_sum = results["WplusmunuPostVFP"]["weight_sum"] - xsec = results["WplusmunuPostVFP"]["dataset"]["xsec"] + h_Wp = results["Wplusmunu_2016PostVFP"]["output"][ + "prefsr_pdfMSHT20mbrange" + ].get() + weight_sum = results["Wplusmunu_2016PostVFP"]["weight_sum"] + xsec = results["Wplusmunu_2016PostVFP"]["dataset"]["xsec"] h_Wp *= xsec * lumi / weight_sum - h_Wm = results["WminusmunuPostVFP"]["output"]["prefsr_pdfMSHT20mbrange"].get() - weight_sum = results["WminusmunuPostVFP"]["weight_sum"] - xsec = results["WminusmunuPostVFP"]["dataset"]["xsec"] + h_Wm = results["Wminusmunu_2016PostVFP"]["output"][ + "prefsr_pdfMSHT20mbrange" + ].get() + weight_sum = results["Wminusmunu_2016PostVFP"]["weight_sum"] + xsec = results["Wminusmunu_2016PostVFP"]["dataset"]["xsec"] h_Wm *= xsec * lumi / weight_sum h_W = hh.addHists(h_Wp, h_Wm) generator_vars["W"][f"{args.predGenerator}MSHT20mbrange"] = h_W.project( @@ -1131,13 +1141,13 @@ def apply_coarse_correction(fine_hist, coarse_corr, check_align=True): "r", ) as ff: inputs = input_tools.load_results_h5py(ff) - pdf_vars = inputs["ZmumuPostVFP"]["output"][ + pdf_vars = inputs["Zmumu_2016PostVFP"]["output"][ "nominal_gen_helicity_pdfCT18Z" ].get() - pdf_vars_Wp = inputs["WplusmunuPostVFP"]["output"][ + pdf_vars_Wp = inputs["Wplusmunu_2016PostVFP"]["output"][ "nominal_gen_helicity_pdfCT18Z" ].get() - pdf_vars_Wm = inputs["WminusmunuPostVFP"]["output"][ + pdf_vars_Wm = inputs["Wminusmunu_2016PostVFP"]["output"][ "nominal_gen_helicity_pdfCT18Z" ].get() pdf_vars_W = hh.addHists(pdf_vars_Wp, pdf_vars_Wm) @@ -1226,17 +1236,17 @@ def apply_coarse_correction(fine_hist, coarse_corr, check_align=True): ) as h5file: results = input_tools.load_results_h5py(h5file) lumi = 16800 - h_Wp = results["WplusmunuPostVFP"]["output"][ + h_Wp = results["Wplusmunu_2016PostVFP"]["output"][ "prefsr_pdfCT18ZUncertByHelicity" ].get() - weight_sum = results["WplusmunuPostVFP"]["weight_sum"] - xsec = results["WplusmunuPostVFP"]["dataset"]["xsec"] + weight_sum = results["Wplusmunu_2016PostVFP"]["weight_sum"] + xsec = results["Wplusmunu_2016PostVFP"]["dataset"]["xsec"] h_Wp *= xsec * lumi / weight_sum - h_Wm = results["WminusmunuPostVFP"]["output"][ + h_Wm = results["Wminusmunu_2016PostVFP"]["output"][ "prefsr_pdfCT18ZUncertByHelicity" ].get() - weight_sum = results["WminusmunuPostVFP"]["weight_sum"] - xsec = results["WminusmunuPostVFP"]["dataset"]["xsec"] + weight_sum = results["Wminusmunu_2016PostVFP"]["weight_sum"] + xsec = results["Wminusmunu_2016PostVFP"]["dataset"]["xsec"] h_Wm *= xsec * lumi / weight_sum h_W = hh.addHists(h_Wp, h_Wm) h_W = h_W.project("absEtaGen", "ptGen", "qGen", "pdfVar") @@ -1368,13 +1378,17 @@ def apply_coarse_correction(fine_hist, coarse_corr, check_align=True): ) as h5file: results = input_tools.load_results_h5py(h5file) lumi = 16800 - h_Wp = results["WplusmunuPostVFP"]["output"]["prefsr_qcdScaleByHelicity"].get() - weight_sum = results["WplusmunuPostVFP"]["weight_sum"] - xsec = results["WplusmunuPostVFP"]["dataset"]["xsec"] + h_Wp = results["Wplusmunu_2016PostVFP"]["output"][ + "prefsr_qcdScaleByHelicity" + ].get() + weight_sum = results["Wplusmunu_2016PostVFP"]["weight_sum"] + xsec = results["Wplusmunu_2016PostVFP"]["dataset"]["xsec"] h_Wp *= xsec * lumi / weight_sum - h_Wm = results["WminusmunuPostVFP"]["output"]["prefsr_qcdScaleByHelicity"].get() - weight_sum = results["WminusmunuPostVFP"]["weight_sum"] - xsec = results["WminusmunuPostVFP"]["dataset"]["xsec"] + h_Wm = results["Wminusmunu_2016PostVFP"]["output"][ + "prefsr_qcdScaleByHelicity" + ].get() + weight_sum = results["Wminusmunu_2016PostVFP"]["weight_sum"] + xsec = results["Wminusmunu_2016PostVFP"]["dataset"]["xsec"] h_Wm *= xsec * lumi / weight_sum h_W = hh.addHists(h_Wp, h_Wm) diff --git a/scripts/rabbit/setupRabbit.py b/scripts/rabbit/setupRabbit.py index 49efd6ca7..2af984666 100644 --- a/scripts/rabbit/setupRabbit.py +++ b/scripts/rabbit/setupRabbit.py @@ -180,7 +180,13 @@ def make_parser(parser=None): type=str, default=None, help="Add BSM as independent process, not propagating the effect into the fakes", - choices=["WtoNMu_0", "WtoNMu_5", "WtoNMu_10", "WtoNMu_30", "WtoNMu_50"], + choices=[ + "WtoNMuMass5", + "WtoNMuMass10", + "WtoNMuMass30", + "WtoNMuMass50", + "WtoMuNuSMEFT", + ], ) parser.add_argument( "--addBSMMixing", diff --git a/scripts/utilities/run_histmakers.sh b/scripts/utilities/run_histmakers.sh index 0e63e35d2..40526d70d 100644 --- a/scripts/utilities/run_histmakers.sh +++ b/scripts/utilities/run_histmakers.sh @@ -15,13 +15,13 @@ shift if [ "$MODE" == "wmass" ]; then HISTMAKER="mw_with_mu_eta_pt" - separateProcs=("WminusmunuPostVFP" "WplusmunuPostVFP" "WminustaunuPostVFP" "WplustaunuPostVFP") + separateProcs=("Wminusmunu_2016PostVFP" "Wplusmunu_2016PostVFP" "Wminustaunu_2016PostVFP" "Wplustaunu_2016PostVFP") elif [ "$MODE" == "wlike" ]; then HISTMAKER="mz_wlike_with_mu_eta_pt" - separateProcs=("ZmumuPostVFP" "ZtautauPostVFP") + separateProcs=("Zmumu_2016PostVFP" "Ztautau_2016PostVFP") elif [ "$MODE" == "dilepton" ]; then HISTMAKER="mz_dilepton" - separateProcs=("ZmumuPostVFP" "ZtautauPostVFP") + separateProcs=("Zmumu_2016PostVFP" "Ztautau_2016PostVFP") fi OUTPUT_FILE=$OUTPUT_DIR/${HISTMAKER}_${POSTFIX}.hdf5 diff --git a/utilities/common.py b/utilities/common.py index 6aa4e42ba..5d7f4b71f 100644 --- a/utilities/common.py +++ b/utilities/common.py @@ -8,15 +8,33 @@ wremnants_dir = f"{pathlib.Path(__file__).parent}/../wremnants" data_dir = f"{pathlib.Path(__file__).parent}/../wremnants-data/data/" +BR_Z_LEP = 3 * 0.0336 # PDG +BR_Z_Nu = 3 * 0.067 +BR_Z_Q = 1 - (BR_Z_LEP + BR_Z_Nu) +BR_W_LEP = 3 * 0.1086 # PDG BR_TAUToMU = 0.1739 BR_TAUToE = 0.1782 -# cross sections in pb at sqrt(s)=13Tev -xsec_DYJetsToMuMu = 2001.9 -xsec_WplusJetsToMuNu = 11765.9 -xsec_WminusJetsToMuNu = 8703.87 -xsec_DYJetsToMuMuMass10to50 = 6997.0 Z_TAU_TO_LEP_RATIO = 1.0 - (1.0 - BR_TAUToMU - BR_TAUToE) ** 2 +# cross sections in pb at sqrt(s)=13Tev (TODO: add source information) +xsec_DYJetsToLL = 2001.9 +xsec_WplusJetsToLNu = 11765.9 +xsec_WminusJetsToLNu = 8703.87 +xsec_DYJetsToLLMass10to50 = 6997.0 + +xsec_WW = 118.7 +xsec_WZ = 47.13 # from https://twiki.cern.ch/twiki/bin/view/CMS/SummaryTable1G25ns +xsec_ZZ = 16.523 # from https://twiki.cern.ch/twiki/bin/view/CMS/SummaryTable1G25ns + +# TODO replace by BR +xsec_WWTo2L2Nu = 12.6 # xsec_WW * BR_W_LEP * BR_W_LEP +xsec_WWTo1L1Nu = 52.146 # xsec_WW * BR_W_LEP * (1 - BR_W_LEP) * 2 # (2 is because one W or the other can go to Q) +xsec_WZTo3LNu = 4.91 # 4.42965*1.109, 1.109 is the NLO to NNLO kfactor, for this one would need to make sure about the NLO XS, depends a lot on the dilepton mass cut +xsec_WZTo2Q2L = 5.4341 # 4.9*1.109 +xsec_WZTo1L1Nu2Q = 11.781 # 10.71*1.10 +xsec_ZZTo2L2Nu = 0.60 # check xsec_ZZ * BR_Z_Nu * BR_Z_LEP * 2 +xsec_ZZTo2Q2L = 5.1 # check xsec_ZZ * BR_Z_Q * (BR_Z_LEP+BR_Z_Nu) * 2 + # ------------------------------------ # GenXsecAnalyzer: # ------------------------------------ @@ -28,14 +46,76 @@ # After filter: final equivalent lumi for 1M events (1/fb) = 1.780e+02 +- 2.178e-01 xsec_GGtoMuMu = 5.619 -# BSM heavy neutrino samples +# BSM heavy neutrino samples, just a dummy number xsec_WtoNMu = 100 -wprocs = [ - "WplusmunuPostVFP", - "WminusmunuPostVFP", - "WminustaunuPostVFP", - "WplustaunuPostVFP", +## eras +eras_run2 = ["2016PreVFP", "2016PostVFP", "2017", "2018"] + +supported_eras = eras_run2 + [ + "2016PostVFP", + "2017G", + "2017H", + "2023_PUAVE1", + "2023_PUAVE2", + "2023_PUAVE5", + "2023_PUAVE10", + "13TeVGen", +] + +## Samples with sqrt{S} = 13 TeV +# central MiNNLO samples with muon decay +wprocs_mu_minnlo_run2 = [f"Wplusmunu_{e}" for e in eras_run2] + [ + f"Wminusmunu_{e}" for e in eras_run2 +] +zprocs_mu_minnlo_run2 = [ + f"Zmumu_{e}" for e in eras_run2 +] # + [f"DYJetsToMuMuMass10to50_{e}" for e in eras_run2] + +# central MiNNLO samples with muon or e decay +wprocs_emu_minnlo_2017H = [ + "Wplusmunu_2017H", + "Wminusmunu_2017H", + "Wplusenu_2017H", + "Wminusenu_2017H", +] +zprocs_emu_minnlo_2017H = ["Zmumu_2017H", "Zee_2017H"] +vprocs_emu_minnlo_2017H = wprocs_emu_minnlo_2017H + zprocs_emu_minnlo_2017H + +wprocs_emu_minnlo = wprocs_mu_minnlo_run2 + wprocs_emu_minnlo_2017H +zprocs_emu_minnlo = zprocs_mu_minnlo_run2 + zprocs_emu_minnlo_2017H +vprocs_emu_minnlo = wprocs_emu_minnlo + zprocs_emu_minnlo + +# central MiNNLO samples with tau +wprocs_tau_minnlo_run2 = [f"Wplustaunu_{e}" for e in eras_run2] + [ + f"Wminustaunu_{e}" for e in eras_run2 +] +zprocs_tau_minnlo_run2 = [f"Ztautau_{e}" for e in eras_run2] + [ + f"Ztautau10to50_{e}" for e in eras_run2 +] + +wprocs_tau_minnlo_2017H = [ + "Wplustaunu_2017H", + "Wminustaunu_2017H", +] +zprocs_tau_minnlo_2017H = [ + "Ztautau_2017H", +] + +wprocs_tau_minnlo = wprocs_tau_minnlo_run2 + wprocs_tau_minnlo_2017H +zprocs_tau_minnlo = zprocs_tau_minnlo_run2 + zprocs_tau_minnlo_2017H +vprocs_tau_minnlo = wprocs_tau_minnlo + zprocs_tau_minnlo + +wprocs_minnlo = wprocs_emu_minnlo + wprocs_tau_minnlo +zprocs_minnlo = zprocs_emu_minnlo + zprocs_tau_minnlo +vprocs_minnlo = wprocs_minnlo + zprocs_minnlo + +wprocs_2017H = wprocs_emu_minnlo_2017H + wprocs_tau_minnlo_2017H +zprocs_2017H = zprocs_emu_minnlo_2017H + zprocs_tau_minnlo_2017H +vprocs_2017H = wprocs_2017H + zprocs_2017H + +# alternative gen samples at sqrt{s} = 13 +wprocs_alt = [ "Wplusmunu_MiNNLO-noqedisr", "Wminusmunu_MiNNLO-noqedisr", "Wplusmunu_horace-lo-photos", @@ -56,14 +136,8 @@ "Wminusmunu_winhac-nlo", "WplusCharmToMuNu", "WminusCharmToMuNu", - "WtoNMu_MN-5-V-0p001", - "WtoNMu_MN-10-V-0p001", - "WtoNMu_MN-30-V-0p001", - "WtoNMu_MN-50-V-0p001", ] -zprocs = [ - "ZmumuPostVFP", - "ZtautauPostVFP", +zprocs_alt = [ "ZmumuMiNLO", "ZmumuNNLOPS", "Zmumu_MiNNLO-noqedisr", @@ -84,28 +158,48 @@ "Zmumu_powheg-nloew", ] -vprocs = wprocs + zprocs -zprocs_recoil = ["ZmumuPostVFP"] -wprocs_recoil = ["WplusmunuPostVFP", "WminusmunuPostVFP"] - -wprocs_lowpu = [ - "Wminusmunu", - "Wminusenu", - "Wminustaunu", - "Wplusmunu", - "Wplusenu", - "Wplustaunu", +wprocs_bsm = [ + "WtoNMuMass5_2016PostVFP", + "WtoNMuMass10_2016PostVFP", + "WtoNMuMass30_2016PostVFP", + "WtoNMuMass50_2016PostVFP", + "WtoMuNuSMEFT_2016PostVFP", +] + +## Samples with sqrt{S} = 5020GeV +wprocs_emu_minnlo_2017G = [ + "Wplusmunu_2017G", + "Wminusmunu_2017G", + "Wplusenu_2017G", + "Wminusenu_2017G", +] +zprocs_emu_minnlo_2017G = ["Zmumu_2017G", "Zee_2017G"] +vprocs_emu_minnlo_2017G = wprocs_emu_minnlo_2017G + zprocs_emu_minnlo_2017G + +wprocs_tau_minnlo_2017G = [ + "Wplustaunu_2017G", + "Wminustaunu_2017G", +] +zprocs_tau_minnlo_2017G = [ + "Ztautau_2017G", ] -zprocs_lowpu = ["Zmumu", "Zee", "Ztautau"] -vprocs_lowpu = wprocs_lowpu + zprocs_lowpu +vprocs_tau_minnlo_2017G = wprocs_tau_minnlo_2017G + zprocs_tau_minnlo_2017G + +wprocs_minnlo_2017G = wprocs_emu_minnlo_2017G + wprocs_tau_minnlo_2017G +zprocs_minnlo_2017G = zprocs_emu_minnlo_2017G + zprocs_tau_minnlo_2017G +vprocs_minnlo_2017G = wprocs_minnlo_2017G + zprocs_minnlo_2017G + +# all W and Z samples +wprocs = wprocs_minnlo + wprocs_alt + wprocs_bsm + wprocs_minnlo_2017G +zprocs = zprocs_minnlo + zprocs_alt + zprocs_minnlo_2017G +vprocs = wprocs + zprocs + vprocs_minnlo_2017G + +zprocs_recoil = ["Zmumu_2016PostVFP"] +wprocs_recoil = ["Wplusmunu_2016PostVFP", "Wminusmunu_2016PostVFP"] + zprocs_recoil_lowpu = ["Zmumu", "Zee"] wprocs_recoil_lowpu = ["Wminusmunu", "Wminusenu", "Wplusmunu", "Wplusenu"] -background_MCprocs = ["Top", "Diboson", "QCD", "DYlowMass"] -zprocs_all = zprocs_lowpu + zprocs -wprocs_all = wprocs_lowpu + wprocs -vprocs_all = vprocs_lowpu + vprocs - # input files for muon momentum scale nuisances calib_dir = f"{data_dir}/calibration/" closure_dir = f"{data_dir}/closure/" @@ -136,9 +230,8 @@ "M": 1e-6, } -## 5% quantiles from aMC@NLO used in SMP-18-012 -# ptV_5quantiles_binning = [0.0, 1.971, 2.949, 3.838, 4.733, 5.674, 6.684, 7.781, 8.979, 10.303, 11.777, 13.435, 15.332, 17.525, 20.115, 23.245, 27.173, 32.414, 40.151, 53.858, 13000.0] -## 10% quantiles from aMC@NLO used in SMP-18-012 with some rounding <== This one worked fine with toys +## for W used in SMP-18-012 +# 10% quantiles from aMC@NLO used in SMP-18-012 with some rounding <== This one worked fine with toys ptV_10quantiles_binning = [ 0.0, 2.95, @@ -152,6 +245,30 @@ 40.15, 13000.0, ] +# 5% quantiles from aMC@NLO used in SMP-18-012 +ptV_20quantiles_binning = [ + 0.0, + 1.971, + 2.949, + 3.838, + 4.733, + 5.674, + 6.684, + 7.781, + 8.979, + 10.303, + 11.777, + 13.435, + 15.332, + 17.525, + 20.115, + 23.245, + 27.173, + 32.414, + 40.151, + 53.858, + 13000.0, +] # Integer rounded version of the 5% quantiles h[::hist.rebin(2)] for 10% quantiles ptV_binning = [ 0, @@ -176,33 +293,80 @@ 54, 13000, ] -ptV_corr_binning = ptV_binning[:-4] + list(range(30, 110, 10)) -absYV_binning = [ +## for Z +# approximate 2.5% quantiles, used in SMP-25-16, SMP-25-17 for the Z detector level fits +ptZ_binning = [ 0, - 0.25, - 0.5, - 0.75, 1, - 1.25, 1.5, - 1.75, 2, - 2.25, 2.5, - 2.75, 3, - 3.25, 3.5, - 3.75, 4, + 4.5, + 5, + 5.5, + 6, + 6.5, + 7, + 7.5, + 8, + 8.5, + 9, + 9.5, + 10, + 10.5, + 11, + 11.5, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 22, + 24, + 26, + 28, + 30, + 33, + 37, + 44, + 100, ] - +# for the Z for SMP-25-016, SMP-25-17 yll_10quantiles_binning = [-2.5, -1.5, -1.0, -0.5, -0.25, 0, 0.25, 0.5, 1.0, 1.5, 2.5] +yll_20quantiles_binning = [ + -2.5, + -1.8, + -1.5, + -1.3, + -1.1, + -0.9, + -0.7, + -0.5, + -0.3, + -0.15, + 0, + 0.15, + 0.3, + 0.5, + 0.7, + 0.9, + 1.1, + 1.3, + 1.5, + 1.8, + 2.5, +] -absYVgen_binning_corr = np.concatenate( - (np.arange(0, 2.6, 0.25), [2.75, 3.0, 3.25, 3.5, 3.75, 4.0, 5.0]) -) -ptVgen_binning_corr = [ +## for Ai based corrections and uncertainties (e.g. TheoryCorrections/ByHelicity/) +# for the W, 40 quantiles +ptWgen_binning_corr = [ 0, 1, 2, @@ -245,7 +409,42 @@ 54, 75, 100, - 1300, + 13000, +] +absYWgen_binning_corr = [ + 0, + 0.25, + 0.5, + 0.75, + 1, + 1.25, + 1.5, + 1.75, + 2, + 2.25, + 2.5, + 2.75, + 3, + 3.25, + 3.5, + 3.75, + 4, + 5, +] +# for the Z, based on reco binning, but including additional bins where reco binning is too coarse +ptZgen_binning_corr = [*ptZ_binning[:-1], 54, 75, 100, 1300] +absYZgen_binning_corr = [ + *yll_20quantiles_binning[10:-1], + 2.0, + 2.25, + 2.5, + 2.75, + 3, + 3.25, + 3.5, + 3.75, + 4, + 5, ] # categorical axes in python bindings always have an overflow bin, so use a regular axis for the charge @@ -349,56 +548,6 @@ def get_binning_fakes_relIso(high_iso_bins=False): return edges -def get_dilepton_ptV_binning(fine=False): - return ( - [ - 0, - 1, - 1.5, - 2, - 2.5, - 3, - 3.5, - 4, - 4.5, - 5, - 5.5, - 6, - 6.5, - 7, - 7.5, - 8, - 8.5, - 9, - 9.5, - 10, - 10.5, - 11, - 11.5, - 12, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - 22, - 24, - 26, - 28, - 30, - 33, - 37, - 44, - 100, - ] - if not fine - else range(200) - ) - - def get_default_ptbins(analysis_label, unfolding=False, gen=False): vals = [30, 26.0, 56.0] if analysis_label[0] == "w" else [34, 26.0, 60.0] if unfolding and gen: diff --git a/utilities/io_tools/input_tools.py b/utilities/io_tools/input_tools.py index 971c1e535..e226dc47a 100644 --- a/utilities/io_tools/input_tools.py +++ b/utilities/io_tools/input_tools.py @@ -196,11 +196,11 @@ def read_dyturbo_vars_hist(base_name, var_axis=None, axes=("Y", "qT"), charge=No # NOTE: kappaFO varies muR and muF together, muf varies only muF scales_map = { "pdf0": "mur1-muf1", - "kappaFO0.5-kappaf2.": "murH-muf1", + "kappaFO0.5-kappaf2.": "mur0p5-muf1", "kappaFO2.-kappaf0.5": "mur2-muf1", - "kappaf0.5": "mur1-mufH", + "kappaf0.5": "mur1-muf0p5", "kappaf2.": "mur1-muf2", - "kappaFO0.5": "murH-mufH", + "kappaFO0.5": "mur0p5-muf0p5", "kappaFO2.": "mur2-muf2", } @@ -631,10 +631,33 @@ def read_matched_scetlib_hist( hnonsing = hh.addHists(-1 * hfo_sing, hfo, flow=False, by_ax_name=False) if "qT" in hfo.axes.name and zero_nons_bins is not None: + + def translate_slice(ax, s): + if not isinstance(s, slice): + return s + start = ( + int(ax.index(s.start.imag) + s.start.real) + if isinstance(s.start, complex) + else s.start + ) + stop = ( + int(ax.index(s.stop.imag) + s.stop.real + 1) + if isinstance(s.stop, complex) + else s.stop + ) + + return slice(start, stop, s.step) + slices = tuple( - zero_nons_bins if ax == "qT" else slice(None) for ax in hnonsing.axes.name + ( + translate_slice(hnonsing.axes[ax], zero_nons_bins) + if ax == "qT" + else slice(None) + ) + for ax in hnonsing.axes.name ) - hnonsing.view()[slices] = np.zeros_like(hnonsing[{"qT": zero_nons_bins}]) + hnonsing.values(flow=True)[slices] = 0 + hnonsing.variances(flow=True)[slices] = 0 # variations are driven by resummed result, collect common variations from nonsingular piece # if needed @@ -846,13 +869,57 @@ def read_dyturbo_angular_coeffs( return h -def read_mu_hist_combine_tau(minnlof, mu_sample, hist_name, combine_with_tau=True): - hmu = read_and_scale(minnlof, mu_sample, hist_name, apply_xsec=False) - sumw = read_sumw(minnlof, mu_sample) - xsec = read_xsec(minnlof, mu_sample) +def read_mu_hist_combine_tau( + minnlof, mu_sample, hist_name, eras, combine_with_tau=True +): + with h5py.File(minnlof, "r") as h5file: + results = load_results_h5py(h5file) + sumw = 0 + xsec = 0 + hmu = None + + for era in eras: + + mu_sample_era = f"{mu_sample}_{era}" + if mu_sample_era not in results.keys(): + logger.warning(f"Sample {mu_sample_era} not found, continue without") + else: + sumw += read_sumw(minnlof, mu_sample_era) + hmu_era = load_and_scale( + results, mu_sample_era, hist_name, apply_xsec=False + ) + xsec_era = read_xsec(minnlof, mu_sample_era) + if xsec == 0: + xsec = xsec_era + elif xsec_era != xsec: + raise RuntimeError( + f"Incompatible cross sections for sample {mu_sample} across eras {eras}" + ) + + if hmu is None: + hmu = hmu_era + else: + hmu += hmu_era + + if combine_with_tau: + tau_sample_era = mu_sample_era.replace("mu", "tau") + if tau_sample_era not in results.keys(): + logger.warning( + f"Sample {tau_sample_era} not found, continue without" + ) + continue + + hmu_era = load_and_scale( + results, tau_sample_era, hist_name, apply_xsec=False + ) + if hmu is None: + hmu = hmu_era + else: + hmu += hmu_era + + sumw += read_sumw(minnlof, tau_sample_era) + + if xsec == 0: + raise RuntimeError("Got cross section of 0") - if combine_with_tau: - tau_sample = mu_sample.replace("mu", "tau") - hmu += read_and_scale(minnlof, tau_sample, hist_name, apply_xsec=False) - sumw += read_sumw(minnlof, tau_sample) - return hmu * xsec / sumw + return hmu * xsec / sumw diff --git a/utilities/parsing.py b/utilities/parsing.py index d22cfa4ef..e70890e62 100644 --- a/utilities/parsing.py +++ b/utilities/parsing.py @@ -268,18 +268,7 @@ def __call__(self, parser, namespace, values, option_string=None): "-e", "--era", type=str, - choices=[ - "2016PreVFP", - "2016PostVFP", - "2017", - "2017G", - "2017H", - "2018", - "2023_PUAVE1", - "2023_PUAVE2", - "2023_PUAVE5", - "2023_PUAVE10", - ], + choices=common.supported_eras, help="Data set to process", default="2016PostVFP", ) diff --git a/wremnants-data b/wremnants-data index 72e28e309..53838ccfd 160000 --- a/wremnants-data +++ b/wremnants-data @@ -1 +1 @@ -Subproject commit 72e28e309ea6a216b797e56090b54de9b58375e8 +Subproject commit 53838ccfdaf0e903eb48b63567b32f4c50ab0422 diff --git a/wremnants/combine_helpers.py b/wremnants/combine_helpers.py index 62762c2f8..22afe9c06 100644 --- a/wremnants/combine_helpers.py +++ b/wremnants/combine_helpers.py @@ -557,9 +557,8 @@ def add_bsm_mixing( if mixing > 0: # load bsm members - model, mass = bsm_name.split("_") bsm_member_info = datagroups.get_members_from_results( - startswith=[f"{model}_MN-{mass}-"] + startswith=f"{bsm_name}_{datagroups.era}" ) bsm_members = [Datagroup_member(k, v) for k, v in bsm_member_info.items()] @@ -603,22 +602,22 @@ def add_bsm_mixing( def add_bsm_process( datagroups, - bsm_process, + bsm_name, ): # add BSM sample as new process - model, mass = bsm_process.split("_") bsm_members = datagroups.get_members_from_results( - startswith=[f"{model}_MN-{mass}-"] + startswith=f"{bsm_name}_{datagroups.era}" ) if len(bsm_members) != 1: raise NotImplementedError( f"Expected exactly 1 BSM member, but got {len(bsm_members)}" ) + # since this group is created manually, the BSM is not added to the fakes (which is likely intented thing for BSM) datagroups.addGroup( - bsm_process, + bsm_name, members=bsm_members, ) - datagroups.unconstrainedProcesses.append(bsm_process) + datagroups.unconstrainedProcesses.append(bsm_name) # Get SM cross section xsec = 0 @@ -626,5 +625,5 @@ def add_bsm_process( xsec += m.xsec # scale BSM cross section to SM cross section - for m in datagroups.groups[bsm_process].members: + for m in datagroups.groups[bsm_name].members: m.xsec = xsec diff --git a/wremnants/combine_theoryAgnostic_helper.py b/wremnants/combine_theoryAgnostic_helper.py index a7a6a6d87..bcd0c0499 100644 --- a/wremnants/combine_theoryAgnostic_helper.py +++ b/wremnants/combine_theoryAgnostic_helper.py @@ -149,6 +149,11 @@ def slice_histogram(h): result = {} + # for backwards compatibility, TODO remove when no longer needed + scale_hists = { + k.replace("uPostVFP", "u_2016PostVFP"): v for k, v in scale_hists.items() + } + for g in self.datagroups.procGroups["signal_samples"]: if sign != "": if sign is not None: @@ -163,21 +168,21 @@ def slice_histogram(h): else: result[m.name] = lambda h: slice_histogram(h) else: - scale_hist = scale_hists["WplusmunuPostVFP"] - result["WplusmunuPostVFP"] = ( + scale_hist = scale_hists["Wplusmunu_2016PostVFP"] + result["Wplusmunu_2016PostVFP"] = ( lambda h, scale_hist=scale_hist: apply_transformations( h, scale_hist ) ) - scale_hist = scale_hists["WminusmunuPostVFP"] - result["WminusmunuPostVFP"] = ( + scale_hist = scale_hists["Wminusmunu_2016PostVFP"] + result["Wminusmunu_2016PostVFP"] = ( lambda h, scale_hist=scale_hist: apply_transformations( h, scale_hist ) ) else: - scale_hist = scale_hists["ZmumuPostVFP"] - result["ZmumuPostVFP"] = ( + scale_hist = scale_hists["Zmumu_2016PostVFP"] + result["Zmumu_2016PostVFP"] = ( lambda h, scale_hist=scale_hist: apply_transformations( h, scale_hist ) diff --git a/wremnants/combine_theory_helper.py b/wremnants/combine_theory_helper.py index 98a99f811..5cb30b5d5 100644 --- a/wremnants/combine_theory_helper.py +++ b/wremnants/combine_theory_helper.py @@ -905,7 +905,7 @@ def add_uncorrelated_np_uncertainties(self): def add_pdf_uncertainty(self, operation=None, scale=-1.0): pdf = self.datagroups.args_from_metadata("pdfs")[0] - pdfInfo = theory_tools.pdf_info_map("ZmumuPostVFP", pdf) + pdfInfo = theory_tools.pdf_info_map("Zmumu_2016PostVFP", pdf) pdfName = pdfInfo["name"] scale = ( scale @@ -980,7 +980,7 @@ def add_pdf_uncertainty(self, operation=None, scale=-1.0): def add_pdf_alphas_variation(self, noi=False): pdf = self.datagroups.args_from_metadata("pdfs")[0] - pdfInfo = theory_tools.pdf_info_map("ZmumuPostVFP", pdf) + pdfInfo = theory_tools.pdf_info_map("Zmumu_2016PostVFP", pdf) pdfName = pdfInfo["name"] as_range = pdfInfo["alphasRange"] @@ -1000,7 +1000,7 @@ def add_pdf_alphas_variation(self, noi=False): "Falling back to default alphaS corrections scetlib_dyturbo_CT18Z_N3p0LL_N2LO_pdfasCorr." ) pdf = "ct18z" - pdfInfo = theory_tools.pdf_info_map("ZmumuPostVFP", pdf) + pdfInfo = theory_tools.pdf_info_map("Zmumu_2016PostVFP", pdf) pdfName = pdfInfo["name"] as_range = pdfInfo["alphasRange"] as_range = theory_tools.pdfMap[pdf]["alphasRange"] diff --git a/wremnants/datasets/datagroups.py b/wremnants/datasets/datagroups.py index 59a7eb3f5..719b0abfc 100644 --- a/wremnants/datasets/datagroups.py +++ b/wremnants/datasets/datagroups.py @@ -333,22 +333,18 @@ def set_histselectors( and (len(mcCorr) == 0 or mcCorr[0] not in ["none", None]) ): # set QCD MC nonclosure corrections - if "QCDmuEnrichPt15PostVFP" not in self.results: + histname_qcd_mc = f"QCDmuEnrichPt15_{self.era}" + if histname_qcd_mc not in self.results: logger.warning( - "Dataset 'QCDmuEnrichPt15PostVFP' not in results, continue without fake correction" + f"Dataset '{histname_qcd_mc}' not in results, continue without fake correction" ) return - if ( - "unweighted" - not in self.results["QCDmuEnrichPt15PostVFP"]["output"] - ): + if "unweighted" not in self.results[histname_qcd_mc]["output"]: logger.warning( "Histogram 'unweighted' not found, continue without fake correction" ) return - hQCD = self.results["QCDmuEnrichPt15PostVFP"]["output"][ - "unweighted" - ].get() + hQCD = self.results[histname_qcd_mc]["output"]["unweighted"].get() self.groups[g].histselector.set_correction(hQCD, axes_names=mcCorr) else: self.groups[g].histselector = signalselector( @@ -1816,7 +1812,7 @@ def addPseudodataHistogramFakes( ) hist_fake = pseudodataGroups.groups[self.fakeName].hists[pseudodata] elif pseudodata == "mcClosure": - hist_fake = pseudodataGroups.results["QCDmuEnrichPt15PostVFP"][ + hist_fake = pseudodataGroups.results[f"QCDmuEnrichPt15_{self.era}"][ "output" ]["unweighted"].get() diff --git a/wremnants/datasets/datagroups2016.py b/wremnants/datasets/datagroups2016.py index 8b3665116..7bf61a503 100644 --- a/wremnants/datasets/datagroups2016.py +++ b/wremnants/datasets/datagroups2016.py @@ -51,9 +51,7 @@ def make_datagroups_2016( ) dg.addGroup( "DYlowMass", - members=dg.get_members_from_results( - startswith=["DYlowMass", "DYJetsToMuMuMass10to50"] - ), + members=dg.get_members_from_results(startswith=["DYJetsToMuMuMass10to50"]), ) dg.addGroup( "Top", diff --git a/wremnants/datasets/datasetDict_gen.py b/wremnants/datasets/datasetDict13TeVGen.py similarity index 94% rename from wremnants/datasets/datasetDict_gen.py rename to wremnants/datasets/datasetDict13TeVGen.py index f64c9715f..9f7107a6b 100644 --- a/wremnants/datasets/datasetDict_gen.py +++ b/wremnants/datasets/datasetDict13TeVGen.py @@ -1,7 +1,7 @@ from utilities.common import ( - xsec_DYJetsToMuMu, - xsec_WminusJetsToMuNu, - xsec_WplusJetsToMuNu, + xsec_DYJetsToLL, + xsec_WminusJetsToLNu, + xsec_WplusJetsToLNu, ) # winhac cross sections from: https://gitlab.cern.ch/cms-wmass/private/issue-tracking/-/issues/34#note_7052239 @@ -27,7 +27,7 @@ horace_v4 = False horace_v5 = False -genDataDict = { +dataDict_13TeVGen = { "ZmumuMiNLO": { "filepaths": [ "{BASE_PATH}/DYJetsToMuMu_TuneCP5_13TeV-powheg-NNLOPS-pythia8-photos/RunIISummer15wmLHEGS/221121_114507" @@ -94,7 +94,7 @@ "filepaths": [ "{BASE_PATH}/DYJetsToMuMu_H2ErratumFix_NoQEDISR_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" ], - "xsec": xsec_DYJetsToMuMu, + "xsec": xsec_DYJetsToLL, "group": "Zmumu", }, "Wplusmunu_horace-lo-photos": { @@ -149,7 +149,7 @@ "filepaths": [ "{BASE_PATH}/WplusJetsToMuNu_H2ErratumFix_NoQEDISR_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" ], - "xsec": xsec_WplusJetsToMuNu, + "xsec": xsec_WplusJetsToLNu, "group": "Wmunu", }, "WplusCharmToMuNu": { @@ -215,7 +215,7 @@ "filepaths": [ "{BASE_PATH}/WminusJetsToMuNu_H2ErratumFix_NoQEDISR_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" ], - "xsec": xsec_WminusJetsToMuNu, + "xsec": xsec_WminusJetsToLNu, "group": "Wmunu", }, "WminusCharmToMuNu": { @@ -228,7 +228,7 @@ } # renesance -genDataDict.update( +dataDict_13TeVGen.update( { "Zmumu_renesance-lo": { "filepaths": [ @@ -250,7 +250,7 @@ # NanoLHE # The Powheg EW LHE samples have negative weights but "genWeight" is always just 1, so we will use LHEWeight_originalXWGTUP instead. That also gives us the total cross section for each subsample, so we set that to 1 in this dict. # TODO copy these samples to central area when they are complete -genDataDict.update( +dataDict_13TeVGen.update( { "Zmumu_powheg-weak-low": { "filepaths": ["{BASE_PATH}/svn4049/46mll80"], @@ -271,27 +271,27 @@ ) if horace_v1: - genDataDict.update( + dataDict_13TeVGen.update( { "Zmumu_horace-v1-alpha-old-fsr-off-isr-pythia": { "filepaths": [ "{BASE_PATH}/Horace_v1/ZToMuMu_TuneCP5_13TeV-horace-alpha-old-fsr-off-isr-pythia" ], - "xsec": xsec_DYJetsToMuMu, + "xsec": xsec_DYJetsToLL, "group": "Zmumu", }, "Zmumu_horace-v1-born-fsr-photos-isr-pythia": { "filepaths": [ "{BASE_PATH}/Horace_v1/ZToMuMu_TuneCP5_13TeV-horace-born-fsr-photos-isr-pythia" ], - "xsec": xsec_DYJetsToMuMu, + "xsec": xsec_DYJetsToLL, "group": "Zmumu", }, "Zmumu_horace-v1-born-fsr-photoslow-isr-pythia": { "filepaths": [ "{BASE_PATH}/Horace_v1/ZToMuMu_TuneCP5_13TeV-horace-born-fsr-photoslow-isr-pythia" ], - "xsec": xsec_DYJetsToMuMu, + "xsec": xsec_DYJetsToLL, "group": "Zmumu", }, "Zmumu_horace-v1-lo-photos": { @@ -305,7 +305,7 @@ "filepaths": [ "{BASE_PATH}/Horace_v1/ZToMuMu_TuneCP5_13TeV-horace-born-fsr-pythia-isr-pythia" ], - "xsec": xsec_DYJetsToMuMu, + "xsec": xsec_DYJetsToLL, "group": "Zmumu", }, "Zmumu_horace-v1-nlo": { @@ -319,14 +319,14 @@ "filepaths": [ "{BASE_PATH}/Horace_v1/ZToMuMu_TuneCP5_13TeV-horace-exp-old-fsr-off-isr-pythia" ], - "xsec": xsec_DYJetsToMuMu, + "xsec": xsec_DYJetsToLL, "group": "Zmumu", }, } ) if horace_v2: - genDataDict.update( + dataDict_13TeVGen.update( { "Zmumu_horace-v2-lo-photos": { "filepaths": [ @@ -346,14 +346,14 @@ "filepaths": [ "{BASE_PATH}/Horace_v2/ZToMuMu_TuneCP5_13TeV-horace-exp-old-fsr-off-isr-pythia" ], - "xsec": xsec_DYJetsToMuMu, + "xsec": xsec_DYJetsToLL, "group": "Zmumu", }, } ) if horace_v3: - genDataDict.update( + dataDict_13TeVGen.update( { "Zmumu_horace-v3-lo-photos": { "filepaths": [ @@ -366,7 +366,7 @@ "filepaths": [ "{BASE_PATH}/Horace_v3/ZToMuMu_TuneCP5_13TeV-horace-exp-old-fsr-off-isr-pythia" ], - "xsec": xsec_DYJetsToMuMu, + "xsec": xsec_DYJetsToLL, "group": "Zmumu", }, "Zmumu_horace-v3-nlo": { @@ -387,7 +387,7 @@ "filepaths": [ "{BASE_PATH}/Horace_v3/WplusToMuNu_TuneCP5_13TeV-horace-exp-old-fsr-off-isr-pythia" ], - "xsec": xsec_WplusJetsToMuNu, + "xsec": xsec_WplusJetsToLNu, "group": "Wmunu", }, "Wplusmunu_horace-v3-nlo": { @@ -408,7 +408,7 @@ "filepaths": [ "{BASE_PATH}/Horace_v3/WminusToMuNu_TuneCP5_13TeV-horace-exp-old-fsr-off-isr-pythia" ], - "xsec": xsec_WminusJetsToMuNu, + "xsec": xsec_WminusJetsToLNu, "group": "Wmunu", }, "Wminusmunu_horace-v3-nlo": { @@ -422,27 +422,27 @@ ) if horace_v5: - genDataDict.update( + dataDict_13TeVGen.update( { "Zmumu_horace-v5-alpha-fsr-off-isr-off": { "filepaths": [ "{BASE_PATH}/Horace_v5/ZToMuMu_TuneCP5_13TeV-horace-alpha-fsr-off-isr-off" ], - "xsec": xsec_DYJetsToMuMu, + "xsec": xsec_DYJetsToLL, "group": "Zmumu", }, "Zmumu_horace-v5-alpha-old-fsr-off-isr-off": { "filepaths": [ "{BASE_PATH}/Horace_v5/ZToMuMu_TuneCP5_13TeV-horace-alpha-old-fsr-off-isr-off" ], - "xsec": xsec_DYJetsToMuMu, + "xsec": xsec_DYJetsToLL, "group": "Zmumu", }, "Zmumu_horace-v5-alpha-old-fsr-off-isr-pythia": { "filepaths": [ "{BASE_PATH}/Horace_v5/ZToMuMu_TuneCP5_13TeV-horace-alpha-old-fsr-off-isr-pythia" ], - "xsec": xsec_DYJetsToMuMu, + "xsec": xsec_DYJetsToLL, "group": "Zmumu", }, "Zmumu_horace-v5-nlo": { diff --git a/wremnants/datasets/datasetDict2017G_v9.py b/wremnants/datasets/datasetDict2017G_v9.py index 3159afbe6..2348a21d4 100644 --- a/wremnants/datasets/datasetDict2017G_v9.py +++ b/wremnants/datasets/datasetDict2017G_v9.py @@ -7,11 +7,11 @@ # from GenXSecAnalyzer xsec_DYJetsToLL = 698.3 # +/- 2.133 -xsec_WplusJetsToMuNu = 4477 # +/- 17.27 -xsec_WminusJetsToMuNu = 2940 # +/- 9.153 +xsec_WplusJetsToLNu = 4477 # +/- 17.27 +xsec_WminusJetsToLL = 2940 # +/- 9.153 dataDictV9_2017G = { - "data2017G": { + "SingleMuon_2017G": { "filepaths": [ "{BASE_PATH}/SingleMuon/Run2017G-UL2017_MiniAODv2_NanoAODv9_GT36-v2", ], @@ -19,46 +19,46 @@ "lumicsv": lumicsv, "lumijson": lumijson, }, - "Zmumu2017G": { + "Zmumu_2017G": { "filepaths": [ "{BASE_PATH}/DYJetsToMuMu_H2ErratumFix_PDFExt_TuneCP5_5020GeV-powhegMiNNLO-pythia8-photos/RunIISummer20UL17pp5TeVNanoAODv9-106X_mc2017_realistic_forppRef5TeV_v3-v2", ], "xsec": xsec_DYJetsToLL, "group": "Zmumu", }, - "Ztautau2017G": { + "Ztautau_2017G": { "filepaths": [ "{BASE_PATH}/DYJetsToTauTau_TauToMuorE_H2ErratumFix_PDFExt_TuneCP5_5020GeV-powhegMiNNLO-pythia8-photos/RunIISummer20UL17pp5TeVNanoAODv9-106X_mc2017_realistic_forppRef5TeV_v3-v2", ], "xsec": xsec_DYJetsToLL * common.Z_TAU_TO_LEP_RATIO, "group": "Ztautau", }, - "Wplusmunu2017G": { + "Wplusmunu_2017G": { "filepaths": [ "{BASE_PATH}/WplusJetsToMuNu_H2ErratumFix_PDFExt_TuneCP5_5020GeV-powhegMiNNLO-pythia8-photos/RunIISummer20UL17pp5TeVNanoAODv9-106X_mc2017_realistic_forppRef5TeV_v3-v2", ], - "xsec": xsec_WplusJetsToMuNu, + "xsec": xsec_WplusJetsToLNu, "group": "Wmunu", }, - "Wminusmunu2017G": { + "Wminusmunu_2017G": { "filepaths": [ "{BASE_PATH}/WminusJetsToMuNu_H2ErratumFix_PDFExt_TuneCP5_5020GeV-powhegMiNNLO-pythia8-photos/RunIISummer20UL17pp5TeVNanoAODv9-106X_mc2017_realistic_forppRef5TeV_v3-v2", ], - "xsec": xsec_WminusJetsToMuNu, + "xsec": xsec_WminusJetsToLL, "group": "Wmunu", }, # "Wplustaunu2017G": { # "filepaths": [ # "{BASE_PATH}/WplusJetsToTauNu_TauToMuorE_H2ErratumFix_PDFExt_TuneCP5_5020GeV-powhegMiNNLO-pythia8-photos/RunIISummer20UL17pp5TeVNanoAODv9-106X_mc2017_realistic_forppRef5TeV_v3-v2", # ], - # "xsec": common.BR_TAUToMU * xsec_WplusJetsToMuNu, + # "xsec": xsec_WplusJetsToLNu * (common.BR_TAUToMU + common.BR_TAUToE), # "group": "Wtaunu", # }, - "Wminustaunu2017G": { + "Wminustaunu_2017G": { "filepaths": [ "{BASE_PATH}/WminusJetsToTauNu_TauToMuorE_H2ErratumFix_PDFExt_TuneCP5_5020GeV-powhegMiNNLO-pythia8-photos/RunIISummer20UL17pp5TeVNanoAODv9-106X_mc2017_realistic_forppRef5TeV_v3-v2", ], - "xsec": common.BR_TAUToMU * xsec_WminusJetsToMuNu, + "xsec": xsec_WminusJetsToLL * (common.BR_TAUToMU + common.BR_TAUToE), "group": "Wtaunu", }, } diff --git a/wremnants/datasets/datasetDict_lowPU.py b/wremnants/datasets/datasetDict2017H.py similarity index 53% rename from wremnants/datasets/datasetDict_lowPU.py rename to wremnants/datasets/datasetDict2017H.py index 71b21f07f..2b4550cea 100644 --- a/wremnants/datasets/datasetDict_lowPU.py +++ b/wremnants/datasets/datasetDict2017H.py @@ -4,118 +4,115 @@ lumicsv_mu = f"{common.data_dir}/lowPU/bylsoutput_HLT_HIMu17_Full.csv" lumicsv_el = f"{common.data_dir}/lowPU/bylsoutput_HLT_HIEle20_Full.csv" -# cross sections from: https://twiki.cern.ch/twiki/bin/viewauth/CMS/StandardModelCrossSectionsat13TeV -BR_W_LEP = 3 * 0.1086 # PDG - -dataDictLowPU = { - "singleelectron": { +dataDict_2017H = { + "HighEGJet_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v2/HighEGJet", + "{BASE_PATH}/LowPU/NanoAOD_v2/HighEGJet", ], "group": "Data", "lumicsv": lumicsv_el, "lumijson": lumijson, }, - "singlemuon": { + "SingleMuon_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v2/SingleMuon", + "{BASE_PATH}/LowPU/NanoAOD_v2/SingleMuon", ], "group": "Data", "lumicsv": lumicsv_mu, "lumijson": lumijson, }, - "Zee": { + "Zee_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v3/DYJetsToEE_M-50_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" + "{BASE_PATH}/LowPU/NanoAOD_v3/DYJetsToEE_M-50_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" ], - "xsec": 2025.74, + "xsec": common.xsec_DYJetsToLL, "group": "Zee", }, - "Wplusenu": { + "Wplusenu_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v3/WplusJetsToENu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" + "{BASE_PATH}/LowPU/NanoAOD_v3/WplusJetsToENu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" ], - "xsec": 11572.19, + "xsec": common.xsec_WplusJetsToLNu, "group": "Wenu", }, - "Wminusenu": { + "Wminusenu_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v3/WminusJetsToENu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" + "{BASE_PATH}/LowPU/NanoAOD_v3/WminusJetsToENu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" ], - "xsec": 8677.3, + "xsec": common.xsec_WminusJetsToLNu, "group": "Wenu", }, - "Zmumu": { + "Zmumu_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v3/DYJetsToMuMu_M-50_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" + "{BASE_PATH}/LowPU/NanoAOD_v3/DYJetsToMuMu_M-50_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" ], - "xsec": 2025.74, + "xsec": common.xsec_DYJetsToLL, "group": "Zmumu", }, - "Wplusmunu": { + "Wplusmunu_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v3/WplusJetsToMuNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" + "{BASE_PATH}/LowPU/NanoAOD_v3/WplusJetsToMuNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" ], - "xsec": 11572.19, + "xsec": common.xsec_WplusJetsToLNu, "group": "Wmunu", }, - "Wminusmunu": { + "Wminusmunu_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v3/WminusJetsToMuNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" + "{BASE_PATH}/LowPU/NanoAOD_v3/WminusJetsToMuNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" ], - "xsec": 8677.3, + "xsec": common.xsec_WminusJetsToLNu, "group": "Wmunu", }, - "Ztautau": { + "Ztautau_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v3/DYJetsToTauTau_M-50_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" + "{BASE_PATH}/LowPU/NanoAOD_v3/DYJetsToTauTau_M-50_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" ], - "xsec": 2025.74, + "xsec": common.xsec_DYJetsToLL, "group": "Ztautau", }, - "Wplustaunu": { + "Wplustaunu_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v3/WplusJetsToTauNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" + "{BASE_PATH}/LowPU/NanoAOD_v3/WplusJetsToTauNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" ], - "xsec": 11572.19, + "xsec": common.xsec_WplusJetsToLNu, "group": "Wtaunu", }, - "Wminustaunu": { + "Wminustaunu_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v3/WminusJetsToTauNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" + "{BASE_PATH}/LowPU/NanoAOD_v3/WminusJetsToTauNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos" ], - "xsec": 8677.3, + "xsec": common.xsec_WminusJetsToLNu, "group": "Wtaunu", }, - "WWTo2L2Nu": { + "WWTo2L2Nu_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v2/WWTo2L2Nu_TuneCP5_13TeV-powheg-pythia8" + "{BASE_PATH}/LowPU/NanoAOD_v2/WWTo2L2Nu_TuneCP5_13TeV-powheg-pythia8" ], - "xsec": 118.7 * BR_W_LEP * BR_W_LEP, + "xsec": 118.7 * common.BR_W_LEP * common.BR_W_LEP, "group": "Diboson", }, - "WZTo3LNu": { + "WZTo3LNu_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v2/WZTo3LNu_TuneCP5_13TeV-powheg-pythia8" + "{BASE_PATH}/LowPU/NanoAOD_v2/WZTo3LNu_TuneCP5_13TeV-powheg-pythia8" ], "xsec": 4.912, "group": "Diboson", }, - "ZZ": { - "filepaths": ["{BASE_PATH}/{ERA}/NanoAOD_v2/ZZ_TuneCP5_13TeV-pythia8"], + "ZZ_2017H": { + "filepaths": ["{BASE_PATH}/LowPU/NanoAOD_v2/ZZ_TuneCP5_13TeV-pythia8"], "xsec": 16.523, "group": "Diboson", }, - "TTTo2L2Nu": { + "TTTo2L2Nu_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v2/TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8" + "{BASE_PATH}/LowPU/NanoAOD_v2/TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8" ], "xsec": 87.31483776, "group": "Top", }, - "TTToSemiLeptonic": { + "TTToSemiLeptonic_2017H": { "filepaths": [ - "{BASE_PATH}/{ERA}/NanoAOD_v2/TTToSemiLeptonic_TuneCP5_13TeV-powheg-pythia8" + "{BASE_PATH}/LowPU/NanoAOD_v2/TTToSemiLeptonic_TuneCP5_13TeV-powheg-pythia8" ], "xsec": 364.35, "group": "Top", diff --git a/wremnants/datasets/datasetDict2017_v9.py b/wremnants/datasets/datasetDict2017_v9.py index 66cbe7cc4..6b6564f1e 100644 --- a/wremnants/datasets/datasetDict2017_v9.py +++ b/wremnants/datasets/datasetDict2017_v9.py @@ -3,241 +3,238 @@ lumicsv = f"{common.data_dir}/bylsoutput_2017.csv" lumijson = f"{common.data_dir}/Cert_294927-306462_13TeV_UL2017_Collisions17_HLT_IsoMu24_v_CustomJSON.txt" -# NOTES -# BASE_PATH is /scratchnvme/wmass/NANOV9/postVFP (so 2017 BASE path is {BASE_PATH}/../y2017v6/) have to update at some point -# ZtautauPostVFP sample is one available from centrl production, so dataDictV9_2017 = { - "data2017B": { + "SingleMuon_2017B": { "filepaths": [ - "{BASE_PATH}/../y2017v6/SingleMuon/NanoV9Run2017B_{NANO_PROD_TAG}", + "{BASE_PATH}/SingleMuon/NanoV9Run2017B_{NANO_PROD_TAG}", ], "group": "Data", "lumicsv": lumicsv, "lumijson": lumijson, "das_name": "private", }, - "data2017C": { + "SingleMuon_2017C": { "filepaths": [ - "{BASE_PATH}/../y2017v6/SingleMuon/NanoV9Run2017C_{NANO_PROD_TAG}", + "{BASE_PATH}/SingleMuon/NanoV9Run2017C_{NANO_PROD_TAG}", ], "group": "Data", "lumicsv": lumicsv, "lumijson": lumijson, "das_name": "private", }, - "data2017D": { + "SingleMuon_2017D": { "filepaths": [ - "{BASE_PATH}/../y2017v6/SingleMuon/NanoV9Run2017D_{NANO_PROD_TAG}", + "{BASE_PATH}/SingleMuon/NanoV9Run2017D_{NANO_PROD_TAG}", ], "group": "Data", "lumicsv": lumicsv, "lumijson": lumijson, "das_name": "private", }, - "data2017E": { + "SingleMuon_2017E": { "filepaths": [ - "{BASE_PATH}/../y2017v6/SingleMuon/NanoV9Run2017E_{NANO_PROD_TAG}", + "{BASE_PATH}/SingleMuon/NanoV9Run2017E_{NANO_PROD_TAG}", ], "group": "Data", "lumicsv": lumicsv, "lumijson": lumijson, "das_name": "private", }, - "data2017F": { + "SingleMuon_2017F": { "filepaths": [ - "{BASE_PATH}/../y2017v6/SingleMuon/NanoV9Run2017F_{NANO_PROD_TAG}", + "{BASE_PATH}/SingleMuon/NanoV9Run2017F_{NANO_PROD_TAG}", ], "group": "Data", "lumicsv": lumicsv, "lumijson": lumijson, "das_name": "private", }, - "ZmumuPostVFP": { + "Zmumu_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/DYJetsToMuMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/DYJetsToMuMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2017_{NANO_PROD_TAG}" ], - "xsec": common.xsec_DYJetsToMuMu, + "xsec": common.xsec_DYJetsToLL, "group": "Zmumu", "das_name": "private", }, - "DYJetsToMuMuMass10to50PostVFP": { + "DYJetsToMuMuMass10to50_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/DYJetsToMuMu_M-10to50_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/DYJetsToMuMu_M-10to50_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2017_{NANO_PROD_TAG}" ], - "xsec": common.xsec_DYJetsToMuMuMass10to50, + "xsec": common.xsec_DYJetsToLLMass10to50, "group": "DYlowMass", "das_name": "/DYJetsToMuMu_M-10to50_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1*v1/NANOAODSIM", }, - "ZtautauPostVFP": { # this sample needs to be produced using old Mass fix one + "Ztautau_2017": { # this sample needs to be produced using old Mass fix one "filepaths": [ - "{BASE_PATH}/../y2017v6//DYJetsToTauTau_M-50_AtLeastOneEorMuDecay_massWgtFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}//DYJetsToTauTau_M-50_AtLeastOneEorMuDecay_massWgtFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2017_{NANO_PROD_TAG}" ], # At least one tau->e or mu decay, so everything that's not all other decays - "xsec": common.xsec_DYJetsToMuMu * common.Z_TAU_TO_LEP_RATIO, + "xsec": common.xsec_DYJetsToLL * common.Z_TAU_TO_LEP_RATIO, "group": "Ztautau", "das_name": "/DYJetsToTauTau_M-50_AtLeastOneEorMuDecay_massWgtFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v2/NANOAODSIM", }, - "WplusmunuPostVFP": { + "Wplusmunu_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/WplusJetsToMuNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/WplusJetsToMuNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2017_{NANO_PROD_TAG}" ], - "xsec": common.xsec_WplusJetsToMuNu, + "xsec": common.xsec_WplusJetsToLNu, "group": "Wmunu", "das_name": "private", }, - "WminusmunuPostVFP": { + "Wminusmunu_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/WminusJetsToMuNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/WminusJetsToMuNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2017_{NANO_PROD_TAG}" ], - "xsec": common.xsec_WminusJetsToMuNu, + "xsec": common.xsec_WminusJetsToLNu, "group": "Wmunu", "das_name": "private", }, - "WplustaunuPostVFP": { + "Wplustaunu_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/WplusJetsToTauNu_TauToMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2017_{NANO_PROD_TAG}", + "{BASE_PATH}/WplusJetsToTauNu_TauToMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2017_{NANO_PROD_TAG}", ], - "xsec": common.BR_TAUToMU * common.xsec_WplusJetsToMuNu, + "xsec": common.xsec_WplusJetsToLNu * common.BR_TAUToMU, "group": "Wtaunu", "das_name": "private", }, - "WminustaunuPostVFP": { + "Wminustaunu_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/WminusJetsToTauNu_TauToMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/WminusJetsToTauNu_TauToMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2017_{NANO_PROD_TAG}" ], - "xsec": common.BR_TAUToMU * common.xsec_WminusJetsToMuNu, + "xsec": common.xsec_WminusJetsToLNu * common.BR_TAUToMU, "group": "Wtaunu", "das_name": "private", }, - "TTLeptonicPostVFP": { + "TTLeptonic_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" ], "xsec": 88.29, "group": "Top", "das_name": "/TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v1/NANOAODSIM", }, - "TTSemileptonicPostVFP": { ##could not copy full stat of this sample due to lack of storage + "TTSemileptonic_2017": { ##could not copy full stat of this sample due to lack of storage "filepaths": [ - "{BASE_PATH}/../y2017v6/TTToSemiLeptonic_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/TTToSemiLeptonic_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" ], "xsec": 366.34, "group": "Top", "das_name": "/TTToSemiLeptonic_TuneCP5_13TeV-powheg-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v1/NANOAODSIM", }, - "SingleTschanLepDecaysPostVFP": { + "SingleTschanLepDecays_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/ST_s-channel_4f_leptonDecays_TuneCP5_13TeV-amcatnlo-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/ST_s-channel_4f_leptonDecays_TuneCP5_13TeV-amcatnlo-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" ], "xsec": 3.609, "group": "Top", "das_name": "/ST_s-channel_4f_leptonDecays_TuneCP5_13TeV-amcatnlo-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v1/NANOAODSIM", }, - "SingleTtWAntitopPostVFP": { + "SingleTtWAntitop_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/ST_tW_antitop_5f_NoFullyHadronicDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/ST_tW_antitop_5f_NoFullyHadronicDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" ], "xsec": 19.55, # 35.85 * (1.0-((1-0.1086*3)*(1-0.1086*3))) = 19.5 pb "group": "Top", "das_name": "/ST_tW_antitop_5f_NoFullyHadronicDecays_TuneCP5_13TeV-powheg-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v1/NANOAODSIM", }, - "SingleTtWTopPostVFP": { + "SingleTtWTop_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/ST_tW_top_5f_NoFullyHadronicDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/ST_tW_top_5f_NoFullyHadronicDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" ], "xsec": 19.55, "group": "Top", "das_name": "/ST_tW_top_5f_NoFullyHadronicDecays_TuneCP5_13TeV-powheg-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v1/NANOAODSIM", }, - "SingleTtchanAntitopPostVFP": { + "SingleTtchanAntitop_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/ST_t-channel_antitop_5f_InclusiveDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/ST_t-channel_antitop_5f_InclusiveDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" ], "xsec": 80.0, "group": "Top", "das_name": "/ST_t-channel_antitop_5f_InclusiveDecays_TuneCP5_13TeV-powheg-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v1/NANOAODSIM", }, - "SingleTtchanTopPostVFP": { + "SingleTtchanTop_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/ST_t-channel_top_5f_InclusiveDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/ST_t-channel_top_5f_InclusiveDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" ], "xsec": 134.2, "group": "Top", "das_name": "/ST_t-channel_top_5f_InclusiveDecays_TuneCP5_13TeV-powheg-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v1/NANOAODSIM", }, # inclusive samples, keep for reference - # 'WWPostVFP' : { + # 'WW2017' : { # 'filepaths' : # ["{BASE_PATH}/WW_TuneCP5_13TeV-pythia8/*.root/NanoV9MC2017_{NANO_PROD_TAG}"], # 'xsec' : 118.7, # 'group' : "Diboson", # }, - # 'WZPostVFP' : { + # 'WZ2017' : { # 'filepaths' : # ["{BASE_PATH}/WZ_TuneCP5_13TeV-pythia8/*.root/NanoV9MC2017_{NANO_PROD_TAG}"], # 'xsec' : 47.026760, # to check, taken from WZTo1L1Nu2Q dividing by BR: 10.71/(3*0.1086)/(1-3*0.033658-0.2) # 'group' : "Diboson", # }, ## - "WWTo2L2NuPostVFP": { + "WWTo2L2Nu_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/WWTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/WWTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" ], - "xsec": 12.6, # 118.7*0.1086*0.1086*9 + "xsec": common.xsec_WWTo2L2Nu, "group": "Diboson", "das_name": "/WWTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v2/NANOAODSIM", }, - "WWTo1L1Nu2QPostVFP": { + "WWTo1L1Nu2Q_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/WWTo1L1Nu2Q_4f_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/WWTo1L1Nu2Q_4f_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" ], - "xsec": 52.146, # 118.7*[(3*0.1086)*(1-3*0.1086)]*2 (2 is because one W or the other can go to Q) + "xsec": common.xsec_WWTo1L1Nu, "group": "Diboson", "das_name": "/WWTo1L1Nu2Q_4f_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v1/NANOAODSIM", }, - "WZTo3LNuPostVFP": { + "WZTo3LNu_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/WZTo3LNu_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/WZTo3LNu_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" ], - "xsec": 4.91, # 4.42965*1.109, 1.109 is the NLO to NNLO kfactor, for this one would need to make sure about the NLO XS, depends a lot on the dilepton mass cut + "xsec": common.xsec_WZTo3LNu, "group": "Diboson", "das_name": "/WZTo3LNu_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v2/NANOAODSIM", }, - "WZTo2Q2LPostVFP": { + "WZTo2Q2L_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/WZTo2Q2L_mllmin4p0_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/WZTo2Q2L_mllmin4p0_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" ], - "xsec": 5.4341, # 4.9*1.109 + "xsec": common.xsec_WZTo2Q2L, "group": "Diboson", "das_name": "/WZTo2Q2L_mllmin4p0_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v1/NANOAODSIM", }, - "WZTo1L1Nu2QPostVFP": { + "WZTo1L1Nu2Q2Q_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/WZTo1L1Nu2Q_4f_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/WZTo1L1Nu2Q_4f_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" ], - "xsec": 11.781, # 10.71*1.10 + "xsec": common.xsec_WZTo1L1Nu2Q, "group": "Diboson", "das_name": "/WZTo1L1Nu2Q_4f_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v1/NANOAODSIM", }, - "ZZTo2L2NuPostVFP": { + "ZZTo2L2Nu_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/ZZTo2L2Nu_TuneCP5_13TeV_powheg_pythia8/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/ZZTo2L2Nu_TuneCP5_13TeV_powheg_pythia8/NanoV9MC2017_{NANO_PROD_TAG}" ], - "xsec": 0.60, + "xsec": common.xsec_ZZTo2L2Nu, "group": "Diboson", "das_name": "/ZZTo2L2Nu_TuneCP5_13TeV_powheg_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v1/NANOAODSIM", }, - "ZZTo2Q2LPostVFP": { + "ZZTo2Q2L_2017": { "filepaths": [ - "{BASE_PATH}/../y2017v6/ZZTo2Q2L_mllmin4p0_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" + "{BASE_PATH}/ZZTo2Q2L_mllmin4p0_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2017_{NANO_PROD_TAG}" ], - "xsec": 5.1, + "xsec": common.xsec_ZZTo2Q2L, "group": "Diboson", "das_name": "/ZZTo2Q2L_mllmin4p0_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2017_realistic_v16_L1v1-v1/NANOAODSIM", }, - "QCDmuEnrichPt15PostVFP": { # Not copied + "QCDmuEnrichPt15_2017": { # Not copied "filepaths": [ - "{BASE_PATH}/../y2017v6/QCD_Pt-20_MuEnrichedPt15_TuneCP5_13TeV-pythia8/NanoV9MC2017_{NANO_PROD_TAG}/" + "{BASE_PATH}/QCD_Pt-20_MuEnrichedPt15_TuneCP5_13TeV-pythia8/NanoV9MC2017_{NANO_PROD_TAG}/" ], "xsec": 238800, "group": "QCD", diff --git a/wremnants/datasets/datasetDict2018_v9.py b/wremnants/datasets/datasetDict2018_v9.py index f32e3e06a..d0c746138 100644 --- a/wremnants/datasets/datasetDict2018_v9.py +++ b/wremnants/datasets/datasetDict2018_v9.py @@ -3,237 +3,234 @@ lumicsv = f"{common.data_dir}/bylsoutput_2018.csv" lumijson = f"{common.data_dir}/Cert_314472-325175_13TeV_UL2018_Collisions18_HLT_IsoMu24_v_CustomJSON.txt" -# NOTES -# BASE_PATH is /scratchnvme/wmass/NANOV9/postVFP (so 2018 BASE path is {BASE_PATH}/../y2018v6/) have to update at some point -# ZtautauPostVFP sample is one available from centrl production, so dataDictV9_2018 = { - "data2018A": { + "SingleMuon_2018A": { "filepaths": [ - "{BASE_PATH}/../y2018v6/SingleMuon/NanoV9Run2018A_{NANO_PROD_TAG}", + "{BASE_PATH}/SingleMuon/NanoV9Run2018A_{NANO_PROD_TAG}", ], "group": "Data", "lumicsv": lumicsv, "lumijson": lumijson, "das_name": "private", }, - "data2018B": { + "SingleMuon_2018B": { "filepaths": [ - "{BASE_PATH}/../y2018v6/SingleMuon/NanoV9Run2018B_{NANO_PROD_TAG}", + "{BASE_PATH}/SingleMuon/NanoV9Run2018B_{NANO_PROD_TAG}", ], "group": "Data", "lumicsv": lumicsv, "lumijson": lumijson, "das_name": "private", }, - "data2018C": { + "SingleMuon_2018C": { "filepaths": [ - "{BASE_PATH}/../y2018v6/SingleMuon/NanoV9Run2018C_{NANO_PROD_TAG}", + "{BASE_PATH}/SingleMuon/NanoV9Run2018C_{NANO_PROD_TAG}", ], "group": "Data", "lumicsv": lumicsv, "lumijson": lumijson, "das_name": "private", }, - "data2018D": { + "SingleMuon_2018D": { "filepaths": [ - "{BASE_PATH}/../y2018v6/SingleMuon/NanoV9Run2018D_{NANO_PROD_TAG}", + "{BASE_PATH}/SingleMuon/NanoV9Run2018D_{NANO_PROD_TAG}", ], "group": "Data", "lumicsv": lumicsv, "lumijson": lumijson, "das_name": "private", }, - "ZmumuPostVFP": { + "Zmumu_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/DYJetsToMuMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", - "{BASE_PATH}/../y2018v6/DYJetsToMuMu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", + "{BASE_PATH}/DYJetsToMuMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", + "{BASE_PATH}/DYJetsToMuMu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", ], - "xsec": common.xsec_DYJetsToMuMu, + "xsec": common.xsec_DYJetsToLL, "group": "Zmumu", "das_name": "private", }, - "DYJetsToMuMuMass10to50PostVFP": { + "DYJetsToMuMuMass10to50_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/DYJetsToMuMu_M-10to50_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/DYJetsToMuMu_M-10to50_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}" ], - "xsec": common.xsec_DYJetsToMuMuMass10to50, + "xsec": common.xsec_DYJetsToLLMass10to50, "group": "DYlowMass", "das_name": "/DYJetsToMuMu_M-10to50_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1*v1/NANOAODSIM", }, - "ZtautauPostVFP": { + "Ztautau_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/DYJetsToTauTau_M-50_AtLeastOneEorMuDecay_H2ErratumFix_PDF_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/DYJetsToTauTau_M-50_AtLeastOneEorMuDecay_H2ErratumFix_PDF_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}" ], # At least one tau->e or mu decay, so everything that's not all other decays - "xsec": common.xsec_DYJetsToMuMu * common.Z_TAU_TO_LEP_RATIO, + "xsec": common.xsec_DYJetsToLL * common.Z_TAU_TO_LEP_RATIO, "group": "Ztautau", "das_name": "/DYJetsToTauTau_M-50_AtLeastOneEorMuDecay_H2ErratumFix_PDF_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v2/NANOAODSIM", }, - "WplusmunuPostVFP": { + "Wplusmunu_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/WplusJetsToMuNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", - "{BASE_PATH}/../y2018v6/WplusJetsToMuNu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", + "{BASE_PATH}/WplusJetsToMuNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", + "{BASE_PATH}/WplusJetsToMuNu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", ], - "xsec": common.xsec_WplusJetsToMuNu, + "xsec": common.xsec_WplusJetsToLNu, "group": "Wmunu", "das_name": "private", }, - "WminusmunuPostVFP": { + "Wminusmunu_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/WminusJetsToMuNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", - "{BASE_PATH}/../y2018v6/WminusJetsToMuNu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", + "{BASE_PATH}/WminusJetsToMuNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", + "{BASE_PATH}/WminusJetsToMuNu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", ], - "xsec": common.xsec_WminusJetsToMuNu, + "xsec": common.xsec_WminusJetsToLNu, "group": "Wmunu", "das_name": "private", }, - "WplustaunuPostVFP": { + "Wplustaunu_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/WplusJetsToTauNu_TauToMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", - "{BASE_PATH}/../y2018v6/WplusJetsToTauNu_TauToMu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", + "{BASE_PATH}/WplusJetsToTauNu_TauToMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", + "{BASE_PATH}/WplusJetsToTauNu_TauToMu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", ], - "xsec": common.BR_TAUToMU * common.xsec_WplusJetsToMuNu, + "xsec": common.BR_TAUToMU * common.xsec_WplusJetsToLNu, "group": "Wtaunu", "das_name": "private", }, - "WminustaunuPostVFP": { + "Wminustaunu_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/WminusJetsToTauNu_TauToMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", - "{BASE_PATH}/../y2018v6/WminusJetsToTauNu_TauToMu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", + "{BASE_PATH}/WminusJetsToTauNu_TauToMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", + "{BASE_PATH}/WminusJetsToTauNu_TauToMu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MC2018_{NANO_PROD_TAG}", ], - "xsec": common.BR_TAUToMU * common.xsec_WminusJetsToMuNu, + "xsec": common.BR_TAUToMU * common.xsec_WminusJetsToLNu, "group": "Wtaunu", "das_name": "private", }, - "TTLeptonicPostVFP": { + "TTLeptonic_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" ], "xsec": 88.29, "group": "Top", "das_name": "/TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM", }, - "TTSemileptonicPostVFP": { + "TTSemileptonic_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/TTToSemiLeptonic_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/TTToSemiLeptonic_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" ], "xsec": 366.34, "group": "Top", "das_name": "/TTToSemiLeptonic_TuneCP5_13TeV-powheg-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM", }, - "SingleTschanLepDecaysPostVFP": { + "SingleTschanLepDecays_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/ST_s-channel_4f_leptonDecays_TuneCP5_13TeV-amcatnlo-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/ST_s-channel_4f_leptonDecays_TuneCP5_13TeV-amcatnlo-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" ], "xsec": 3.609, "group": "Top", "das_name": "/ST_s-channel_4f_leptonDecays_TuneCP5_13TeV-amcatnlo-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM", }, - "SingleTtWAntitopPostVFP": { + "SingleTtWAntitop_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/ST_tW_antitop_5f_NoFullyHadronicDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/ST_tW_antitop_5f_NoFullyHadronicDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" ], "xsec": 19.55, # 35.85 * (1.0-((1-0.1086*3)*(1-0.1086*3))) = 19.5 pb "group": "Top", "das_name": "/ST_tW_antitop_5f_NoFullyHadronicDecays_TuneCP5_13TeV-powheg-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM", }, - "SingleTtWTopPostVFP": { + "SingleTtWTop_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/ST_tW_top_5f_NoFullyHadronicDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/ST_tW_top_5f_NoFullyHadronicDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" ], "xsec": 19.55, "group": "Top", "das_name": "/ST_tW_top_5f_NoFullyHadronicDecays_TuneCP5_13TeV-powheg-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM", }, - "SingleTtchanAntitopPostVFP": { + "SingleTtchanAntitop_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/ST_t-channel_antitop_5f_InclusiveDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/ST_t-channel_antitop_5f_InclusiveDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" ], "xsec": 80.0, "group": "Top", "das_name": "/ST_t-channel_antitop_5f_InclusiveDecays_TuneCP5_13TeV-powheg-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM", }, - "SingleTtchanTopPostVFP": { + "SingleTtchanTop_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/ST_t-channel_top_5f_InclusiveDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/ST_t-channel_top_5f_InclusiveDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" ], "xsec": 134.2, "group": "Top", "das_name": "/ST_t-channel_top_5f_InclusiveDecays_TuneCP5_13TeV-powheg-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM", }, # inclusive samples, keep for reference - # 'WWPostVFP' : { + # 'WW2018' : { # 'filepaths' : # ["{BASE_PATH}/WW_TuneCP5_13TeV-pythia8/*.root/NanoV9MC2018_{NANO_PROD_TAG}"], # 'xsec' : 118.7, # 'group' : "Diboson", # }, - # 'WZPostVFP' : { + # 'WZ2018' : { # 'filepaths' : # ["{BASE_PATH}/WZ_TuneCP5_13TeV-pythia8/*.root/NanoV9MC2018_{NANO_PROD_TAG}"], # 'xsec' : 47.026760, # to check, taken from WZTo1L1Nu2Q dividing by BR: 10.71/(3*0.1086)/(1-3*0.033658-0.2) # 'group' : "Diboson", # }, ## - "WWTo2L2NuPostVFP": { + "WWTo2L2Nu_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/WWTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/WWTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" ], - "xsec": 12.6, # 118.7*0.1086*0.1086*9 + "xsec": common.xsec_WWTo2L2Nu, "group": "Diboson", "das_name": "/WWTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v2/NANOAODSIM", }, - "WWTo1L1Nu2QPostVFP": { + "WWTo1L1Nu2Q_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/WWTo1L1Nu2Q_4f_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/WWTo1L1Nu2Q_4f_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" ], - "xsec": 52.146, # 118.7*[(3*0.1086)*(1-3*0.1086)]*2 (2 is because one W or the other can go to Q) + "xsec": common.xsec_WWTo1L1Nu, "group": "Diboson", "das_name": "/WWTo1L1Nu2Q_4f_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM", }, - "WZTo3LNuPostVFP": { + "WZTo3LNu_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/WZTo3LNu_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/WZTo3LNu_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" ], - "xsec": 4.91, # 4.42965*1.109, 1.109 is the NLO to NNLO kfactor, for this one would need to make sure about the NLO XS, depends a lot on the dilepton mass cut + "xsec": common.xsec_WZTo3LNu, "group": "Diboson", "das_name": "/WZTo3LNu_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v2/NANOAODSIM", }, - "WZTo2Q2LPostVFP": { + "WZTo2Q2L_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/WZTo2Q2L_mllmin4p0_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/WZTo2Q2L_mllmin4p0_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" ], - "xsec": 5.4341, # 4.9*1.109 + "xsec": common.xsec_WZTo2Q2L, "group": "Diboson", "das_name": "/WZTo2Q2L_mllmin4p0_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM", }, - "WZTo1L1Nu2QPostVFP": { + "WZTo1L1Nu2Q2Q_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/WZTo1L1Nu2Q_4f_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/WZTo1L1Nu2Q_4f_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" ], - "xsec": 11.781, # 10.71*1.10 + "xsec": common.xsec_WZTo1L1Nu2Q, "group": "Diboson", "das_name": "/WZTo1L1Nu2Q_4f_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM", }, - "ZZTo2L2NuPostVFP": { + "ZZTo2L2Nu_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/ZZTo2L2Nu_TuneCP5_13TeV_powheg_pythia8/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/ZZTo2L2Nu_TuneCP5_13TeV_powheg_pythia8/NanoV9MC2018_{NANO_PROD_TAG}" ], - "xsec": 0.60, + "xsec": common.xsec_ZZTo2L2Nu, "group": "Diboson", "das_name": "/ZZTo2L2Nu_TuneCP5_13TeV_powheg_pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM", }, - "ZZTo2Q2LPostVFP": { + "ZZTo2Q2L_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/ZZTo2Q2L_mllmin4p0_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" + "{BASE_PATH}/ZZTo2Q2L_mllmin4p0_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MC2018_{NANO_PROD_TAG}" ], - "xsec": 5.1, + "xsec": common.xsec_ZZTo2Q2L, "group": "Diboson", "das_name": "/ZZTo2Q2L_mllmin4p0_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIISummer20UL18NanoAODv9-106X_upgrade2018_realistic_v16_L1v1-v1/NANOAODSIM", }, - "QCDmuEnrichPt15PostVFP": { + "QCDmuEnrichPt15_2018": { "filepaths": [ - "{BASE_PATH}/../y2018v6/QCD_Pt-20_MuEnrichedPt15_TuneCP5_13TeV-pythia8/NanoV9MC2018_{NANO_PROD_TAG}/" + "{BASE_PATH}/QCD_Pt-20_MuEnrichedPt15_TuneCP5_13TeV-pythia8/NanoV9MC2018_{NANO_PROD_TAG}/" ], "xsec": 238800, "group": "QCD", diff --git a/wremnants/datasets/datasetDict_v9.py b/wremnants/datasets/datasetDictPostVFP_v9.py similarity index 74% rename from wremnants/datasets/datasetDict_v9.py rename to wremnants/datasets/datasetDictPostVFP_v9.py index c4d45cee7..746f4c34f 100644 --- a/wremnants/datasets/datasetDict_v9.py +++ b/wremnants/datasets/datasetDictPostVFP_v9.py @@ -7,8 +7,8 @@ f"{common.data_dir}/Cert_271036-284044_13TeV_Legacy2016_Collisions16_JSON.txt" ) -dataDictV9 = { - "dataPostVFP": { +dataDictV9_PostVFP = { + "SingleMuon_2016PostVFP": { "filepaths": [ "{BASE_PATH}/SingleMuon/NanoV9Run2016FDataPostVFP_{NANO_PROD_TAG}", "{BASE_PATH}/SingleMuon/NanoV9Run2016GDataPostVFP_{NANO_PROD_TAG}", @@ -18,99 +18,99 @@ "lumicsv": lumicsv, "lumijson": lumijson, }, - "ZmumuPostVFP": { + "Zmumu_2016PostVFP": { "filepaths": [ "{BASE_PATH}/DYJetsToMuMu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], - "xsec": common.xsec_DYJetsToMuMu, + "xsec": common.xsec_DYJetsToLL, "group": "Zmumu", }, - "DYJetsToMuMuMass10to50PostVFP": { + "DYJetsToMuMuMass10to50_2016PostVFP": { "filepaths": [ "{BASE_PATH}/DYJetsToMuMu_M-10to50_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], - "xsec": common.xsec_DYJetsToMuMuMass10to50, + "xsec": common.xsec_DYJetsToLLMass10to50, "group": "DYlowMass", }, - "ZtautauPostVFP": { + "Ztautau_2016PostVFP": { "filepaths": [ "{BASE_PATH}/DYJetsToTauTau_M-50_AtLeastOneEorMuDecay_H2ErratumFix_PDF_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], # At least one tau->e or mu decay, so everything that's not all other decays - "xsec": common.xsec_DYJetsToMuMu * common.Z_TAU_TO_LEP_RATIO, + "xsec": common.xsec_DYJetsToLL * common.Z_TAU_TO_LEP_RATIO, "group": "Ztautau", }, - "WplusmunuPostVFP": { + "Wplusmunu_2016PostVFP": { "filepaths": [ "{BASE_PATH}/WplusJetsToMuNu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], - "xsec": common.xsec_WplusJetsToMuNu, + "xsec": common.xsec_WplusJetsToLNu, "group": "Wmunu", }, - "WminusmunuPostVFP": { + "Wminusmunu_2016PostVFP": { "filepaths": [ "{BASE_PATH}/WminusJetsToMuNu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], - "xsec": common.xsec_WminusJetsToMuNu, + "xsec": common.xsec_WminusJetsToLNu, "group": "Wmunu", }, - "WplustaunuPostVFP": { + "Wplustaunu_2016PostVFP": { "filepaths": [ "{BASE_PATH}/WplusJetsToTauNu_TauToMu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], - "xsec": common.BR_TAUToMU * common.xsec_WplusJetsToMuNu, + "xsec": common.BR_TAUToMU * common.xsec_WplusJetsToLNu, "group": "Wtaunu", }, - "WminustaunuPostVFP": { + "Wminustaunu_2016PostVFP": { "filepaths": [ "{BASE_PATH}/WminusJetsToTauNu_TauToMu_H2ErratumFix_PDFExt_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], - "xsec": common.BR_TAUToMU * common.xsec_WminusJetsToMuNu, + "xsec": common.BR_TAUToMU * common.xsec_WminusJetsToLNu, "group": "Wtaunu", }, - "TTLeptonicPostVFP": { + "TTLeptonic_2016PostVFP": { "filepaths": [ "{BASE_PATH}/TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], "xsec": 88.29, "group": "Top", }, - "TTSemileptonicPostVFP": { + "TTSemileptonic_2016PostVFP": { "filepaths": [ "{BASE_PATH}/TTToSemiLeptonic_TuneCP5_13TeV-powheg-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], "xsec": 366.34, "group": "Top", }, - "SingleTschanLepDecaysPostVFP": { + "SingleTschanLepDecays_2016PostVFP": { "filepaths": [ "{BASE_PATH}/ST_s-channel_4f_leptonDecays_TuneCP5_13TeV-amcatnlo-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], "xsec": 3.609, "group": "Top", }, - "SingleTtWAntitopPostVFP": { + "SingleTtWAntitop_2016PostVFP": { "filepaths": [ "{BASE_PATH}/ST_tW_antitop_5f_NoFullyHadronicDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], "xsec": 19.55, # 35.85 * (1.0-((1-0.1086*3)*(1-0.1086*3))) = 19.5 pb "group": "Top", }, - "SingleTtWTopPostVFP": { + "SingleTtWTop_2016PostVFP": { "filepaths": [ "{BASE_PATH}/ST_tW_top_5f_NoFullyHadronicDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], "xsec": 19.55, "group": "Top", }, - "SingleTtchanAntitopPostVFP": { + "SingleTtchanAntitop_2016PostVFP": { "filepaths": [ "{BASE_PATH}/ST_t-channel_antitop_5f_InclusiveDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], "xsec": 80.0, "group": "Top", }, - "SingleTtchanTopPostVFP": { + "SingleTtchanTop_2016PostVFP": { "filepaths": [ "{BASE_PATH}/ST_t-channel_top_5f_InclusiveDecays_TuneCP5_13TeV-powheg-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], @@ -131,84 +131,84 @@ # 'group' : "Diboson", # }, ## - "WWTo2L2NuPostVFP": { + "WWTo2L2Nu_2016PostVFP": { "filepaths": [ "{BASE_PATH}/WWTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], - "xsec": 12.6, # 118.7*0.1086*0.1086*9 + "xsec": common.xsec_WWTo2L2Nu, "group": "Diboson", }, - "WWTo1L1Nu2QPostVFP": { + "WWTo1L1Nu2Q_2016PostVFP": { "filepaths": [ "{BASE_PATH}/WWTo1L1Nu2Q_4f_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], - "xsec": 52.146, # 118.7*[(3*0.1086)*(1-3*0.1086)]*2 (2 is because one W or the other can go to Q) + "xsec": common.xsec_WWTo1L1Nu, "group": "Diboson", }, - "WZTo3LNuPostVFP": { + "WZTo3LNu_2016PostVFP": { "filepaths": [ "{BASE_PATH}/WZTo3LNu_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], - "xsec": 4.91, # 4.42965*1.109, 1.109 is the NLO to NNLO kfactor, for this one would need to make sure about the NLO XS, depends a lot on the dilepton mass cut + "xsec": common.xsec_WZTo3LNu, "group": "Diboson", }, - "WZTo2Q2LPostVFP": { + "WZTo2Q2L_2016PostVFP": { "filepaths": [ "{BASE_PATH}/WZTo2Q2L_mllmin4p0_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], - "xsec": 5.4341, # 4.9*1.109 + "xsec": common.xsec_WZTo2Q2L, "group": "Diboson", }, - "WZTo1L1Nu2QPostVFP": { + "WZTo1L1Nu2Q_2016PostVFP": { "filepaths": [ "{BASE_PATH}/WZTo1L1Nu2Q_4f_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], - "xsec": 11.781, # 10.71*1.10 + "xsec": common.xsec_WZTo1L1Nu2Q, "group": "Diboson", }, - "ZZTo2L2NuPostVFP": { + "ZZTo2L2Nu_2016PostVFP": { "filepaths": [ "{BASE_PATH}/ZZTo2L2Nu_TuneCP5_13TeV_powheg_pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], - "xsec": 0.60, + "xsec": common.xsec_ZZTo2L2Nu, "group": "Diboson", }, - "ZZTo2Q2LPostVFP": { + "ZZTo2Q2L_2016PostVFP": { "filepaths": [ "{BASE_PATH}/ZZTo2Q2L_mllmin4p0_TuneCP5_13TeV-amcatnloFXFX-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], - "xsec": 5.1, + "xsec": common.xsec_ZZTo2Q2L, "group": "Diboson", }, - "QCDmuEnrichPt15PostVFP": { + "QCDmuEnrichPt15_2016PostVFP": { "filepaths": [ "{BASE_PATH}/QCD_Pt-20_MuEnrichedPt15_TuneCP5_13TeV-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}" ], "xsec": 238800, "group": "QCD", }, - "GGToMuMuMass5to50PostVFP": { + "GGToMuMuMass5to50_2016PostVFP": { "filepaths": [ "{BASE_PATH}/GGToMuMu_M-5To50_TuneCP5_13TeV-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], "xsec": common.xsec_GGtoMuMu, "group": "PhotonInduced", }, - "GGToLLPostVFP": { + "GGToLL_2016PostVFP": { "filepaths": [ "{BASE_PATH}/GGToLL_TuneCP5_13TeV-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], "xsec": 14.93, "group": "PhotonInduced", }, - "QGToDYQTo2LPostVFP": { + "QGToDYQTo2L_2016PostVFP": { "filepaths": [ "{BASE_PATH}/QGToDYQTo2L_TuneCP5_13TeV-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], "xsec": 1.373, "group": "PhotonInduced", }, - "QGToWQToLNuPostVFP": { + "QGToWQToLNu_2016PostVFP": { "filepaths": [ "{BASE_PATH}/QGToWQToLNu_TuneCP5_13TeV-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], @@ -217,70 +217,77 @@ "xsec_dn": 3.588e01, "group": "PhotonInduced", }, - "WtoNMu_MN-5-V-0p001": { + "WtoNMuMass5_2016PostVFP": { "filepaths": [ - "{BASE_PATH}/WtoNMu_MN-5-V-0p001_TuneCP5_13TeV_madgraph-pythia8/" + "{BASE_PATH}/WtoNMu_MN-5-V-0p001_TuneCP5_13TeV_madgraph-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], "xsec": common.xsec_WtoNMu, - "group": "WtoNMu_5", + "group": "WtoNMu", }, - "WtoNMu_MN-10-V-0p001": { + "WtoNMuMass10_2016PostVFP": { "filepaths": [ - "{BASE_PATH}/WtoNMu_MN-10-V-0p001_TuneCP5_13TeV_madgraph-pythia8/" + "{BASE_PATH}/WtoNMu_MN-10-V-0p001_TuneCP5_13TeV_madgraph-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], "xsec": common.xsec_WtoNMu, - "group": "WtoNMu_10", + "group": "WtoNMu", }, - "WtoNMu_MN-30-V-0p001": { + "WtoNMuMass30_2016PostVFP": { "filepaths": [ - "{BASE_PATH}/WtoNMu_MN-30-V-0p001_TuneCP5_13TeV_madgraph-pythia8/" + "{BASE_PATH}/WtoNMu_MN-30-V-0p001_TuneCP5_13TeV_madgraph-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], "xsec": common.xsec_WtoNMu, - "group": "WtoNMu_30", + "group": "WtoNMu", }, - "WtoNMu_MN-50-V-0p001": { + "WtoNMuMass50_2016PostVFP": { "filepaths": [ - "{BASE_PATH}/WtoNMu_MN-50-V-0p001_TuneCP5_13TeV_madgraph-pythia8/" + "{BASE_PATH}/WtoNMu_MN-50-V-0p001_TuneCP5_13TeV_madgraph-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}", ], "xsec": common.xsec_WtoNMu, - "group": "WtoNMu_50", + "group": "WtoNMu", + }, + "WtoMuNuSMEFT_2016PostVFP": { + "filepaths": [ + "{BASE_PATH}/WtoMuNu_nuSMEFT_MNu-0p1_Lambda-246_TuneCP5_13TeV_madgraph-pythia8/NanoV9MCPostVFP_{NANO_PROD_TAG}", + ], + "xsec": common.xsec_WtoNMu, + "group": "WtoNMu", }, } # extended version with additional samples (but missing some pdf sets) -dataDictV9extended = copy.deepcopy(dataDictV9) +dataDictV9_PostVFP_extended = copy.deepcopy(dataDictV9_PostVFP) -dataDictV9extended["ZmumuPostVFP"]["filepaths"].extend( +dataDictV9_PostVFP_extended["Zmumu_2016PostVFP"]["filepaths"].extend( [ "{BASE_PATH}/DYJetsToMuMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ] ) -dataDictV9extended["ZtautauPostVFP"]["filepaths"].extend( +dataDictV9_PostVFP_extended["Ztautau_2016PostVFP"]["filepaths"].extend( [ "{BASE_PATH}/DYJetsToTauTau_M-50_AtLeastOneEorMuDecay_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ] ) -dataDictV9extended["WplusmunuPostVFP"]["filepaths"].extend( +dataDictV9_PostVFP_extended["Wplusmunu_2016PostVFP"]["filepaths"].extend( [ "{BASE_PATH}/WplusJetsToMuNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ] ) -dataDictV9extended["WminusmunuPostVFP"]["filepaths"].extend( +dataDictV9_PostVFP_extended["Wminusmunu_2016PostVFP"]["filepaths"].extend( [ "{BASE_PATH}/WminusJetsToMuNu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ] ) -dataDictV9extended["WplustaunuPostVFP"]["filepaths"].extend( +dataDictV9_PostVFP_extended["Wplustaunu_2016PostVFP"]["filepaths"].extend( [ "{BASE_PATH}/WplusJetsToTauNu_TauToMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ] ) -dataDictV9extended["WminustaunuPostVFP"]["filepaths"].extend( +dataDictV9_PostVFP_extended["Wminustaunu_2016PostVFP"]["filepaths"].extend( [ "{BASE_PATH}/WminusJetsToTauNu_TauToMu_H2ErratumFix_TuneCP5_13TeV-powhegMiNNLO-pythia8-photos/NanoV9MCPostVFP_{NANO_PROD_TAG}", ] diff --git a/wremnants/datasets/dataset_tools.py b/wremnants/datasets/dataset_tools.py index 8affc438d..698843964 100644 --- a/wremnants/datasets/dataset_tools.py +++ b/wremnants/datasets/dataset_tools.py @@ -5,26 +5,29 @@ import XRootD.client import narf +from wremnants.datasets.datasetDict13TeVGen import dataDict_13TeVGen from wremnants.datasets.datasetDict2017_v9 import dataDictV9_2017 from wremnants.datasets.datasetDict2017G_v9 import dataDictV9_2017G +from wremnants.datasets.datasetDict2017H import dataDict_2017H from wremnants.datasets.datasetDict2018_v9 import dataDictV9_2018 -from wremnants.datasets.datasetDict_gen import genDataDict -from wremnants.datasets.datasetDict_lowPU import dataDictLowPU from wremnants.datasets.datasetDict_lowPU2023 import dataDictLowPU2023 # set the debug level for logging incase of full printout -from wremnants.datasets.datasetDict_v9 import dataDictV9, dataDictV9extended +from wremnants.datasets.datasetDictPostVFP_v9 import ( + dataDictV9_PostVFP, + dataDictV9_PostVFP_extended, +) from wums import logging logger = logging.child_logger(__name__) default_nfiles = { - "WminusmunuPostVFP": 1700, - "WplusmunuPostVFP": 2000, - "WminustaunuPostVFP": 400, - "WplustaunuPostVFP": 500, - "ZmumuPostVFP": 900, - "ZtautauPostVFP": 1200, + "Wminusmunu_2016PostVFP": 1700, + "Wplusmunu_2016PostVFP": 2000, + "Wminustaunu_2016PostVFP": 400, + "Wplustaunu_2016PostVFP": 500, + "Zmumu_2016PostVFP": 900, + "Ztautau_2016PostVFP": 1200, } @@ -177,23 +180,15 @@ def makeFilelist( return toreturn -def getDataPath(mode=None): +def getDataPath(): import socket hostname = socket.gethostname() if hostname.endswith(".cern.ch"): - if mode and "lowpu" in mode: - base_path = "root://eoscms.cern.ch//store/cmst3/group/wmass/LowPU" - else: - base_path = ( - "root://eoscms.cern.ch//store/cmst3/group/wmass/w-mass-13TeV/NanoAOD" - ) + base_path = "/scratch/shared/NanoAOD" elif hostname.endswith(".mit.edu"): - if mode and "lowpu" in mode: - base_path = "/scratch/submit/cms/wmass/NanoAOD/LowPU" - else: - base_path = "/scratch/submit/cms/wmass/NanoAOD" + base_path = "/scratch/submit/cms/wmass/NanoAOD" elif hostname == "cmsanalysis.pi.infn.it": # NOTE: If anyone wants to run lowpu analysis at Pisa they'd probably want a different path base_path = "/scratchnvme/wmass/NANOV9/postVFP" @@ -216,7 +211,6 @@ def getDatasets( maxFiles=default_nfiles, filt=None, excl=None, - mode=None, base_path=None, nanoVersion="v9", data_tags=[ @@ -240,42 +234,43 @@ def getDatasets( maxFiles = default_nfiles if not base_path: - base_path = getDataPath(mode) + base_path = getDataPath() logger.info(f"Loading samples from {base_path}.") # TODO avoid use of nested if statements with e.g. a unified dict - if nanoVersion == "v9": - if era == "2016PostVFP": - dataDict = dataDictV9 - if extended: - dataDict = dataDictV9extended - logger.info("Using NanoAOD V9 for 2016PostVFP") - elif era == "2017": - dataDict = dataDictV9_2017 - logger.info("Using NanoAOD V9 for 2017") - elif era == "2017G": - dataDict = dataDictV9_2017G - logger.info("Using NanoAOD V9 for 2017G") - elif era == "2018": - dataDict = dataDictV9_2018 - logger.info("Using NanoAOD V9 for 2018") - else: - raise ValueError(f"Unsupported era {era}") - elif nanoVersion == "v12": # 2022/2023 - pass + if era == "2016PostVFP": + dataDict = dataDictV9_PostVFP + if extended: + dataDict = dataDictV9_PostVFP_extended + logger.info("Using NanoAOD V9 for 2016PostVFP") + elif era == "2017": + dataDict = dataDictV9_2017 + logger.info("Using NanoAOD V9 for 2017") + elif era == "2017G": + dataDict = dataDictV9_2017G + logger.info("Using NanoAOD V9 for 2017G") + elif era == "2017H": + dataDict = dataDict_2017H + logger.info("Using NanoAOD V9 for 2017H") + elif era == "2018": + dataDict = dataDictV9_2018 + logger.info("Using NanoAOD V9 for 2018") + elif era == "13TeVGen": + dataDict = dataDictV9_PostVFP_extended if extended else dataDictV9_PostVFP + dataDict.update( + { + **dataDict_13TeVGen, + **{k: v for k, v in dataDictV9_2017.items() if v["group"] != "Data"}, + **{k: v for k, v in dataDict_2017H.items() if v["group"] != "Data"}, + **{k: v for k, v in dataDictV9_2018.items() if v["group"] != "Data"}, + } + ) + logger.info("Using NanoAOD V9 for all eras") + elif "2023_PUAVE" in era: + dataDict = dataDictLowPU2023 + logger.info("Using NanoAOD V9 for 2018") else: - raise ValueError("Only NanoAODv9/v12 is supported") - - if mode: - if "gen" in mode: - dataDict.update(genDataDict) - elif "lowpu" in mode: - if era == "2017H": - dataDict = dataDictLowPU - elif "2023_PUAVE" in era: - dataDict = dataDictLowPU2023 - else: - raise ValueError(f"Low pileup era {era} not supported") + raise ValueError(f"Unsupported era {era}") narf_datasets = [] for sample, info in dataDict.items(): @@ -284,7 +279,7 @@ def getDatasets( if excl not in [None, []] and (info["group"] in excl or sample in excl): continue - if sample in genDataDict: + if sample in dataDict_13TeVGen: base_path_sample = base_path.replace("NanoAOD", "NanoGen") else: base_path_sample = base_path @@ -322,8 +317,6 @@ def getDatasets( ) if is_data: - if mode == "gen": - continue narf_info.update( dict( is_data=True, diff --git a/wremnants/helicity_utils.py b/wremnants/helicity_utils.py index 7384f3bdc..41133c95c 100644 --- a/wremnants/helicity_utils.py +++ b/wremnants/helicity_utils.py @@ -34,6 +34,7 @@ def make_helicity_weight_helper( is_z=False, filename=f"{common.data_dir}/angularCoefficients/w_z_helicity_xsecs_theoryAgnosticBinning_scetlib_dyturboCorr_maxFiles_m1.hdf5", rebin_ptVgen_edges=None, + rebin_absYVgen_edges=None, ): with h5py.File(filename, "r") as ff: @@ -46,6 +47,10 @@ def make_helicity_weight_helper( hist_helicity_xsec_scales = hh.rebinHist( hist_helicity_xsec_scales, "ptVgen", rebin_ptVgen_edges ) + if rebin_absYVgen_edges is not None: + hist_helicity_xsec_scales = hh.rebinHist( + hist_helicity_xsec_scales, "absYVgen", rebin_absYVgen_edges + ) corrh = helicity_xsec_to_angular_coeffs(hist_helicity_xsec_scales) diff --git a/wremnants/histmaker_tools.py b/wremnants/histmaker_tools.py index 432937c37..5c36cf57e 100644 --- a/wremnants/histmaker_tools.py +++ b/wremnants/histmaker_tools.py @@ -237,7 +237,7 @@ def make_quantile_helper( axes, dependent_axes=[], name="nominal", - processes=["ZmumuPostVFP"], + processes=["Zmumu_2016PostVFP"], n_quantiles=[], ): """ diff --git a/wremnants/muon_validation.py b/wremnants/muon_validation.py index e6b567cce..e3061b79f 100644 --- a/wremnants/muon_validation.py +++ b/wremnants/muon_validation.py @@ -327,7 +327,7 @@ def make_hists_for_manual_scale_shifts(df, axes, cols, cols_gen_smeared, results def muon_scale_variation_from_manual_shift( resultdict, - procs=["WplusmunuPostVFP", "WminusmunuPostVFP", "ZmumuPostVFP"], + procs=["Wplusmunu_2016PostVFP", "Wminusmunu_2016PostVFP", "Zmumu_2016PostVFP"], ): for proc in procs: proc_hists = resultdict[proc]["output"] diff --git a/wremnants/syst_tools.py b/wremnants/syst_tools.py index 26a80a05a..df5b7ae19 100644 --- a/wremnants/syst_tools.py +++ b/wremnants/syst_tools.py @@ -71,27 +71,27 @@ def uncHist(unc): } transforms["scetlib_dyturboMSHT20Up"] = { "action": lambda h: pdfUnc(h, "pdfMSHT20", "vars")[0], - "procs": common.vprocs_all, + "procs": common.vprocs, } transforms["scetlib_dyturboMSHT20Down"] = { "action": lambda h: pdfUnc(h, "pdfMSHT20", "vars")[1], - "procs": common.vprocs_all, + "procs": common.vprocs, } transforms["scetlib_dyturboCT18ZUp"] = { "action": lambda h: pdfUnc(h, "pdfCT18Z", "vars")[0], - "procs": common.vprocs_all, + "procs": common.vprocs, } transforms["scetlib_dyturboCT18ZDown"] = { "action": lambda h: pdfUnc(h, "pdfCT18Z", "vars")[1], - "procs": common.vprocs_all, + "procs": common.vprocs, } transforms["scetlib_dyturboMSHT20an3loUp"] = { "action": lambda h: pdfUnc(h, "pdfMSHT20", "vars")[0], - "procs": common.zprocs_all, + "procs": common.zprocs, } transforms["scetlib_dyturboMSHT20an3loDown"] = { "action": lambda h: pdfUnc(h, "pdfMSHT20", "vars")[1], - "procs": common.zprocs_all, + "procs": common.zprocs, } transforms["ewUp"] = { "action": lambda h, **args: ( @@ -1201,7 +1201,7 @@ def add_syst_hist( def define_mass_width_sin2theta_weights(df, proc): # TODO can these be parsed more automatically? - if proc in common.zprocs_all: + if proc in common.zprocs: m0 = 91.1876 gamma0 = 2.4941343245745466 massvals = [ @@ -1332,7 +1332,7 @@ def define_mass_width_sin2theta_weights(df, proc): "auto res = widthWeight_tensor; res = nominal_weight*res; return res;", ) - if proc in common.zprocs_all: + if proc in common.zprocs: if df.HasColumn("MEParamWeightAltSet4"): df = df.Alias("sin2thetaWeight_col", "MEParamWeightAltSet4") elif df.HasColumn("LHEReweightingWeight"): @@ -1394,7 +1394,7 @@ def massWeightNames(matches=None, proc="", exclude=[]): for i in range(nweights) if int(abs(central - i) * 10) not in exclude ] - if proc and (proc in common.zprocs_all or proc == "Z") and 2.1 not in exclude: + if proc and (proc in common.zprocs or proc == "Z") and 2.1 not in exclude: # This is the PDG uncertainty (turned off for now since it doesn't seem to have been read into the nano) names.extend(["massShiftZ2p1MeVDown", "massShiftZ2p1MeVUp"]) @@ -1899,9 +1899,15 @@ def is_flavor_dependent_np(var_label): 0, 1, name="chargeVgenNP", underflow=False, overflow=False ) + axis_absYVgen = hist.axis.Variable( + common.absYWgen_binning_corr if isW else common.absYZgen_binning_corr, + name="absYVgenNP", + underflow=False, + ) + # since the last column might be an additional weight, the extra columns and axes have to go at the appropriate place nax = len(axes) - axes_FlavDepNP = [*axes, theory_tools.axis_absYVgen, axis_chargegen] + axes_FlavDepNP = [*axes, axis_absYVgen, axis_chargegen] cols_FlavDepNP = cols[:nax] + ["absYVgen", "chargeVgen"] + cols[nax:] name = Datagroups.histName(base_name, syst=tensor_name) add_syst_hist( @@ -2524,7 +2530,7 @@ def add_theory_hists( scale_axes = axes scale_cols = cols - isZ = dataset_name in common.zprocs_all + isZ = dataset_name in common.zprocs df = theory_tools.define_scale_tensor(df) diff --git a/wremnants/theoryAgnostic_tools.py b/wremnants/theoryAgnostic_tools.py index 018d83069..1eb94ec0c 100644 --- a/wremnants/theoryAgnostic_tools.py +++ b/wremnants/theoryAgnostic_tools.py @@ -63,7 +63,9 @@ def add_xnorm_histograms( # TODO: this does not look correct since theoryAgnostic_axes only contain only polarization independent observables (pTV, YV, mV, qV) # and the helicity weights are only nonzero for polarization dependent variables (cos(theta*), phi*, lepton eta, ...) - df_xnorm = define_helicity_weights(df_xnorm, is_z=dataset_name == "ZmumuPostVFP") + df_xnorm = define_helicity_weights( + df_xnorm, is_z=dataset_name == "Zmumu_2016PostVFP" + ) df_xnorm = df_xnorm.DefinePerSample("xnorm", "0.5") axis_xnorm = hist.axis.Regular( 1, 0.0, 1.0, name="count", underflow=False, overflow=False diff --git a/wremnants/theory_corrections.py b/wremnants/theory_corrections.py index 5b849a5bd..0cae59eb5 100644 --- a/wremnants/theory_corrections.py +++ b/wremnants/theory_corrections.py @@ -513,43 +513,34 @@ def make_corr_by_helicity( def make_theory_helpers( - args, + pdfs, + theory_corr=[], procs=["Z", "W"], corrs=["qcdScale", "pdf", "pdf_from_corr", "alphaS", "pdf_central"], ): theory_helpers_procs = {p: {} for p in procs} - if "Z" in procs and "qcdScale" in corrs: - theory_helpers_procs["Z"]["qcdScale"] = make_qcd_uncertainty_helper_by_helicity( - is_z=True, - filename=( - f"{common.data_dir}/angularCoefficients/w_z_helicity_xsecs_maxFiles_m1_alphaSunfoldingBinning_helicity.hdf5" - if args.unfolding - else f"{common.data_dir}/angularCoefficients/w_z_moments.hdf5" - ), - rebin_ptVgen=False, - return_tensor=True, - ) - if "W" in procs and "qcdScale" in corrs: - theory_helpers_procs["W"]["qcdScale"] = make_qcd_uncertainty_helper_by_helicity( - is_z=False, - filename=(f"{common.data_dir}/angularCoefficients/w_z_moments.hdf5"), - rebin_ptVgen=False, - return_tensor=True, - ) - for proc in theory_helpers_procs.keys(): + if "qcdScale" in corrs: + theory_helpers_procs[proc]["qcdScale"] = ( + make_qcd_uncertainty_helper_by_helicity( + is_z=proc == "Z", + rebin_ptVgen=False, + return_tensor=True, + ) + ) + if "pdf" in corrs: theory_helpers_procs[proc]["pdf"] = ( make_pdfs_uncertainties_helper_by_helicity( proc=proc, - pdfs=args.pdfs, + pdfs=pdfs, ) ) if "pdf_from_corr" in corrs: - pdf_from_corrs = [x + "_Corr" for x in args.theoryCorr if "pdfvar" in x] + pdf_from_corrs = [x + "_Corr" for x in theory_corr if "pdfvar" in x] theory_helpers_procs[proc]["pdf_from_corr"] = ( make_pdfs_from_corrs_uncertainties_helper_by_helicity( proc=proc, @@ -557,7 +548,7 @@ def make_theory_helpers( ) ) if "alphaS" in corrs: - as_vars = [x + "_Corr" for x in args.theoryCorr if "pdfas" in x] + as_vars = [x + "_Corr" for x in theory_corr if "pdfas" in x] theory_helpers_procs[proc]["alphaS"] = ( make_alphaS_uncertainties_helper_by_helicity( proc=proc, @@ -568,11 +559,11 @@ def make_theory_helpers( theory_helpers_procs[proc]["pdf_central"] = ( make_uncertainty_helper_by_helicity( proc=proc, - nom=theory_tools.pdfMap[args.pdfs[0]]["name"], + nom=theory_tools.pdfMap[pdfs[0]]["name"], den="pdf_uncorr", central_weights=True, filename=common.data_dir - + f"/TheoryCorrections/ByHelicity/PDFs/w_z_gen_dists_maxFiles_m1_{args.pdfs[0]}_pdfByHelicity_skimmed.hdf5", + + f"/TheoryCorrections/ByHelicity/PDFs/w_z_gen_dists_maxFiles_m1_{pdfs[0]}_pdfByHelicity_skimmed.hdf5", ) ) @@ -581,10 +572,10 @@ def make_theory_helpers( def make_qcd_uncertainty_helper_by_helicity( is_z=False, - filename=f"{common.data_dir}/angularCoefficients/w_z_moments.hdf5", + filename=f"{common.data_dir}/angularCoefficients/w_z_helicity_xsecs.hdf5", rebin_ptVgen=common.ptV_binning, rebin_absYVgen=False, - rebin_massVgen=True, + rebin_massVgen=False, return_tensor=True, ): @@ -807,27 +798,33 @@ def make_uncertainty_helper_by_helicity( if filename_den is None: filename_den = filename - # load helicity cross sections from file + # load helicity cross sections from file #TODO: include DYJetsToMuMuMass10to50 proc_map = { - "Z": ("ZmumuPostVFP",), - "W": ("WplusmunuPostVFP", "WminusmunuPostVFP"), + "Z": ("Zmumu",), + "W": ("Wplusmunu", "Wminusmunu"), } def _collect_hist(hist_name, filename): hist_key = f"nominal_gen_{hist_name}" hists = [] - for process in proc_map.get(proc, ()): - if not os.path.exists(filename): - logger.warning( - f"File {filename} does not exist. Not creating histogram of variations by helicities for process {proc} and variation {nom}." - ) - return None - with h5py.File(filename, "r") as h5file: + if not os.path.exists(filename): + logger.warning( + f"File {filename} does not exist. Not creating histogram of variations by helicities." + ) + return None + with h5py.File(filename, "r") as h5file: + for process in proc_map.get(proc, ()): results = input_tools.load_results_h5py(h5file) - outputs = results.get(process, {}).get("output", {}) + process_key = [k for k in results.keys() if k.startswith(process)] + if len(process_key) == 0: + logger.warning( + f"Did not find key for process {process} in {filename}. Not creating histogram of variations by helicities for process {process} and variation {nom}." + ) + return None + outputs = results[process_key[0]].get("output", {}) if hist_key not in outputs: logger.warning( - f"Did not find {hist_key} in {filename}. Not creating histogram of variations by helicities for process {proc} and variation {nom}." + f"Did not find {hist_key} in {filename}. Not creating histogram of variations by helicities for process {process} and variation {nom}." ) return None hists.append(outputs[hist_key].get()) diff --git a/wremnants/theory_tools.py b/wremnants/theory_tools.py index 30f4cf43d..1f6265e22 100644 --- a/wremnants/theory_tools.py +++ b/wremnants/theory_tools.py @@ -20,31 +20,6 @@ [0.25, 0.75, 1.25, 2.75], name="muFfact", underflow=False, overflow=False ) -axis_absYVgen = hist.axis.Variable( - # [0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2, 2.25, 2.5, 2.75, 3, 3.25, 3.5, 3.75, 4, 4.25, 4.5, 4.75, 5, 10], - [ - 0.0, - 0.25, - 0.5, - 0.75, - 1.0, - 1.25, - 1.5, - 1.75, - 2.0, - 2.25, - 2.5, - 2.75, - 3.0, - 3.25, - 3.5, - 4.0, - 5.0, - ], # this is the same binning as hists from theory corrections - name="absYVgenNP", - underflow=False, -) - scale_tensor_axes = (axis_muRfact, axis_muFfact) pdfMap = { @@ -238,7 +213,7 @@ ] extended_pdf_datasets = [ - x for x in common.vprocs_all if not any(y in x for y in ["NNLOPS", "MiNLO"]) + x for x in common.vprocs if not any(y in x for y in ["NNLOPS", "MiNLO"]) ] @@ -841,7 +816,7 @@ def define_pdf_columns(df, dataset_name, pdfs, noAltUnc): ) if ( len(pdfs) == 0 - or dataset_name not in common.vprocs_all + or dataset_name not in common.vprocs or "horace" in dataset_name or "winhac" in dataset_name or "LHEPdfWeight" not in df.GetColumnNames() diff --git a/wremnants/unfolding_tools.py b/wremnants/unfolding_tools.py index fa805a970..78211679d 100644 --- a/wremnants/unfolding_tools.py +++ b/wremnants/unfolding_tools.py @@ -311,6 +311,16 @@ def reweight_to_fitresult( return corr_helper +def rebin_pt(edges): + # use 2 ptll bin for each ptVGen bin, except first and last + # first gen bin same size as reco bin, then 1 gen bin for 2 reco bins + new_edges = np.array([*edges[:2], *edges[3::2]]) + if len(new_edges) % 2: + # in case it's an odd number of edges, last two bins are overflow + edges = edges[:-1] + return new_edges + + class UnfolderZ: """ To be used in histmakers to define columns and add histograms for unfolding of Z dilepton kinematics @@ -344,15 +354,6 @@ def __init__( self.poi_as_noi = poi_as_noi self.unfolding_levels = unfolding_levels - def rebin_pt(edges): - # use 2 ptll bin for each ptVGen bin, except first and last - # first gen bin same size as reco bin, then 1 gen bin for 2 reco bins - new_edges = np.array([*edges[:2], *edges[3::2]]) - if len(new_edges) % 2: - # in case it's an odd number of edges, last two bins are overflow - edges = edges[:-1] - return new_edges - self.weightsByHelicity_helper_unfolding = None self.unfolding_axes = {} @@ -374,12 +375,15 @@ def rebin_pt(edges): if self.add_helicity_axis: if self.weightsByHelicity_helper_unfolding is None: - edges = [ax for ax in a if ax.name == "ptVGen"][0].edges + # need to rebin to the edges used for the unfolding, and remove of out of acceptance bins (|Y|>2.5 and pT>44) + pt_edges = [ax for ax in a if ax.name == "ptVGen"][0].edges + absY_edges = [ax for ax in a if ax.name == "absYVGen"][0].edges # helper to derive helicity xsec shape from event by event reweighting self.weightsByHelicity_helper_unfolding = helicity_utils.make_helicity_weight_helper( is_z=True, - filename=f"{common.data_dir}/angularCoefficients/w_z_helicity_xsecs_maxFiles_m1_alphaSunfoldingBinning_helicity.hdf5", - rebin_ptVgen_edges=edges, + rebin_ptVgen_edges=pt_edges, + rebin_absYVgen_edges=absY_edges, + filename=f"{common.data_dir}/angularCoefficients/w_z_helicity_xsecs.hdf5", ) for ax in a: