Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 7 additions & 3 deletions FF_calculation/FF_QCD.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,12 @@
import ROOT

import helper.ff_functions as ff_func
import helper.logging_helper as logging_helper
import helper.plotting as plotting
from helper.functions import RuntimeVariables


@logging_helper.grouped_logs(lambda args: f"{args[6]}")
def calculation_QCD_FFs(
args: Tuple[Any, ...],
) -> Dict[str, Union[str, Dict[str, str]]]:
Expand Down Expand Up @@ -46,7 +48,7 @@ def calculation_QCD_FFs(
*_, # SRlike_hists, ARlike_hists only used in ttbar calculation
) = args

log = logging.getLogger(logger)
log = logging_helper.setup_logging(logger=logging.getLogger(logger))

# init histogram dict for FF measurement
SRlike_hists = dict()
Expand Down Expand Up @@ -188,6 +190,7 @@ def calculation_QCD_FFs(
return ff_func.fill_corrlib_expression(corrlib_exp, splitting.variables, splitting.split)


@logging_helper.grouped_logs(lambda args: f"{args[7]}")
def non_closure_correction(
args: Tuple[Any, ...],
) -> Dict[str, np.ndarray]:
Expand Down Expand Up @@ -224,7 +227,7 @@ def non_closure_correction(
for_DRtoSR,
) = args

log = logging.getLogger(logger)
log = logging_helper.setup_logging(logger=logging.getLogger(logger))

# init histogram dict for FF measurement
SRlike_hists = dict()
Expand Down Expand Up @@ -410,6 +413,7 @@ def non_closure_correction(
return correction_dict


@logging_helper.grouped_logs(lambda args: f"{args[6]}")
def DR_SR_correction(
args: Tuple[Any, ...],
) -> Dict[str, np.ndarray]:
Expand Down Expand Up @@ -442,7 +446,7 @@ def DR_SR_correction(
corr_evaluators,
) = args

log = logging.getLogger(logger)
log = logging_helper.setup_logging(logger=logging.getLogger(logger))

# init histogram dict for FF measurement
SRlike_hists = dict()
Expand Down
10 changes: 7 additions & 3 deletions FF_calculation/FF_Wjets.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,12 @@
import ROOT

import helper.ff_functions as ff_func
import helper.logging_helper as logging_helper
import helper.plotting as plotting
from helper.functions import RuntimeVariables


@logging_helper.grouped_logs(lambda args: f"{args[6]}")
def calculation_Wjets_FFs(
args: Tuple[Any, ...],
) -> Dict[str, Union[Dict[str, str], Dict[str, Dict[str, str]]]]:
Expand Down Expand Up @@ -46,7 +48,7 @@ def calculation_Wjets_FFs(
*_, # SRlike_hists, ARlike_hists only used in ttbar calculation
) = args

log = logging.getLogger(logger)
log = logging_helper.setup_logging(logger=logging.getLogger(logger))

# init histogram dict for FF measurement
SRlike_hists = dict()
Expand Down Expand Up @@ -245,6 +247,7 @@ def calculation_Wjets_FFs(
return ff_func.fill_corrlib_expression(corrlib_exp, splitting.variables, splitting.split)


@logging_helper.grouped_logs(lambda args: f"{args[7]}")
def non_closure_correction(
args: Tuple[Any, ...],
) -> Dict[str, np.ndarray]:
Expand Down Expand Up @@ -281,7 +284,7 @@ def non_closure_correction(
for_DRtoSR,
) = args

log = logging.getLogger(logger)
log = logging_helper.setup_logging(logger=logging.getLogger(logger))

# init histogram dict for FF measurement
SRlike_hists = dict()
Expand Down Expand Up @@ -520,6 +523,7 @@ def non_closure_correction(
return correction_dict


@logging_helper.grouped_logs(lambda args: f"{args[6]}")
def DR_SR_correction(
args: Tuple[Any, ...],
) -> Dict[str, np.ndarray]:
Expand Down Expand Up @@ -553,7 +557,7 @@ def DR_SR_correction(
corr_evaluators,
) = args

log = logging.getLogger(logger)
log = logging_helper.setup_logging(logger=logging.getLogger(logger))

# init histogram dict for FF measurement
SRlike_hists = dict()
Expand Down
10 changes: 7 additions & 3 deletions FF_calculation/FF_ttbar.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,12 @@
import ROOT

import helper.ff_functions as ff_func
import helper.logging_helper as logging_helper
import helper.plotting as plotting
from helper.functions import RuntimeVariables


@logging_helper.grouped_logs(lambda args: f"{args[6]}")
def calculation_ttbar_FFs(
args: Tuple[Any, ...],
) -> Dict[str, Union[str, Dict[str, str]]]:
Expand Down Expand Up @@ -49,7 +51,7 @@ def calculation_ttbar_FFs(
ARlike_hists, # ARlike_hists: Dict[str, ROOT.TH1D],
) = args

log = logging.getLogger(logger)
log = logging_helper.setup_logging(logger=logging.getLogger(logger))

# init histogram dict for FF measurement from MC
SR_hists = dict()
Expand Down Expand Up @@ -176,6 +178,7 @@ def calculation_ttbar_FFs(
return ff_func.fill_corrlib_expression(corrlib_exp, splitting.variables, splitting.split)


@logging_helper.grouped_logs(lambda *args, **kwargs: args[4])
def calculation_FF_data_scaling_factor(
config: Dict[str, Union[str, Dict, List]],
process_conf: Dict[str, Union[str, Dict, List]],
Expand All @@ -198,7 +201,7 @@ def calculation_FF_data_scaling_factor(
Tuple of dictionaries containing the histograms for the signal-like and application-like regions

"""
log = logging.getLogger(logger)
log = logging_helper.setup_logging(logger=logging.getLogger(logger))

# init histogram dict for FF data correction
SRlike_hists = dict()
Expand Down Expand Up @@ -311,6 +314,7 @@ def calculation_FF_data_scaling_factor(
return SRlike_hists, ARlike_hists


@logging_helper.grouped_logs(lambda args: f"{args[7]}")
def non_closure_correction(
args: Tuple[Any, ...],
) -> Dict[str, np.ndarray]:
Expand Down Expand Up @@ -351,7 +355,7 @@ def non_closure_correction(
*_, # for_DRtoSR not needed for ttbar
) = args

log = logging.getLogger(logger)
log = logging_helper.setup_logging(logger=logging.getLogger(logger))

# init histogram dict for FF measurement
SR_hists = dict()
Expand Down
4 changes: 3 additions & 1 deletion FF_calculation/fractions.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,12 @@
import ROOT

import helper.ff_functions as ff_func
import helper.logging_helper as logging_helper
import helper.plotting as plotting
from helper.functions import RuntimeVariables


@logging_helper.grouped_logs(lambda args: f"{args[6]}")
def fraction_calculation(
args: Tuple[Any, ...],
) -> Dict[str, Dict[str, Dict[str, List[float]]]]:
Expand Down Expand Up @@ -44,7 +46,7 @@ def fraction_calculation(
*_, # SRlike_hists, ARlike_hists only needed for ttbar
) = args

log = logging.getLogger(logger)
log = logging_helper.setup_logging(logger=logging.getLogger(logger))

AR_hists = dict()
SR_hists = dict()
Expand Down
21 changes: 14 additions & 7 deletions ff_calculation.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import helper.correctionlib_json as corrlib
import helper.ff_functions as ff_func
import helper.functions as func
import helper.logging_helper as logging_helper
from FF_calculation.fractions import fraction_calculation
from helper.hooks_and_patches import Histo1DPatchedRDataFrame, PassThroughWrapper

Expand All @@ -37,6 +38,11 @@
Flag to use intermediary filtered ROOT RDataFrames even if cached versions are available.
""",
)
parser.add_argument(
"--log-level",
default="INFO",
help="Logging level to use. (default: INFO)",
)

FF_CALCULATION_FUNCTIONS = {
"QCD": FF_QCD.calculation_QCD_FFs,
Expand All @@ -62,6 +68,7 @@
}


@logging_helper.grouped_logs
def FF_calculation(
config: Dict[str, Union[str, Dict, List]],
sample_paths: List[str],
Expand Down Expand Up @@ -134,6 +141,7 @@ def FF_calculation(
return ff_func.fill_corrlib_expression(results, split_collections.split_variables)


@logging_helper.grouped_logs(lambda args: f"ff_calculation.{args[0]}")
def run_ff_calculation(
args: Tuple[str, Dict[str, Union[Dict, List, str]], List[str], str]
) -> Tuple[Tuple, Dict]:
Expand All @@ -147,7 +155,7 @@ def run_ff_calculation(
Depending on the "process" either a dictionary with fake factor function expressions or a dictionary with process fraction values
"""
process, config, sample_paths, output_path = args
log = logging.getLogger(f"ff_calculation.{process}")
log = logging_helper.setup_logging(logger=logging.getLogger(f"ff_calculation.{process}"))

log.info(f"Calculating fake factors for the {process} process.")
log.info("-" * 50)
Expand Down Expand Up @@ -192,12 +200,9 @@ def run_ff_calculation(
if "process_fractions_subleading" in config:
subcategories = subcategories + ["process_fractions_subleading"]

func.setup_logger(
log_file=save_path_plots + "/ff_calculation.log",
log_name="ff_calculation",
log_level=logging.INFO,
subcategories=subcategories,
)
logging_helper.LOG_FILENAME = save_path_plots + "/ff_calculation.log"
logging_helper.LOG_LEVEL = getattr(logging, args.log_level.upper(), logging.INFO)
log = logging_helper.setup_logging(logger=logging.getLogger("ff_calculation"), level=logging_helper.LOG_LEVEL)

# getting all the ntuple input files
sample_paths = func.get_samples(config=config)
Expand Down Expand Up @@ -257,3 +262,5 @@ def run_ff_calculation(

with open(os.path.join(save_path_plots, "done"), "w") as done_file:
done_file.write("")

log.info("Fake factor calculation finished successfully.")
30 changes: 20 additions & 10 deletions ff_corrections.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from ff_calculation import FF_calculation
from helper.ff_evaluators import FakeFactorCorrectionEvaluator, FakeFactorEvaluator, DRSRCorrectionEvaluator
from helper.hooks_and_patches import Histo1DPatchedRDataFrame, PassThroughWrapper
import helper.logging_helper as logging_helper

parser = argparse.ArgumentParser()

Expand All @@ -46,6 +47,11 @@
correction calculations.
""",
)
parser.add_argument(
"--log-level",
default="INFO",
help="Logging level to use. (default: INFO)",
)

NON_CLOSURE_CORRECTION_FUNCTIONS = {
"QCD": FF_QCD.non_closure_correction,
Expand All @@ -62,6 +68,7 @@
}


@logging_helper.grouped_logs
def non_closure_correction(
config: Dict[str, Union[str, Dict, List]],
corr_config: Dict[str, Union[str, Dict]],
Expand Down Expand Up @@ -128,6 +135,7 @@ def non_closure_correction(
return ff_func.fill_corrlib_expression(results, split_collections.split_variables)


@logging_helper.grouped_logs(lambda *args, **kwargs: args[8])
def run_non_closure_correction(
config: Dict[str, Union[str, Dict, List]],
corr_config: Dict[str, Union[str, Dict]],
Expand Down Expand Up @@ -160,7 +168,7 @@ def run_non_closure_correction(
Dictionary with the process name as key and a dictionary with the corrections
"""

log = logging.getLogger(logger)
log = logging_helper.setup_logging(logger=logging.getLogger(logger))
corrections = {process: {}}
_chained_DR_SR_process_config = None
if for_DRtoSR:
Expand Down Expand Up @@ -295,6 +303,7 @@ def run_non_closure_correction(
return corrections


@logging_helper.grouped_logs(lambda args: f"ff_corrections.{args[0]}")
def run_ff_calculation_for_DRtoSR(
args: Tuple[
str,
Expand All @@ -314,7 +323,7 @@ def run_ff_calculation_for_DRtoSR(
If a DR to SR correction is defined for the "process" a dictionary with fake factor function expressions is returned, otherwise None is returned
"""
process, config, corr_config, sample_paths, output_path = args
log = logging.getLogger(f"ff_corrections.{process}")
log = logging_helper.setup_logging(logger=logging.getLogger(f"ff_corrections.{process}"))

if "DR_SR" in corr_config["target_processes"][process]:
ff_config = copy.deepcopy(config)
Expand All @@ -340,6 +349,7 @@ def run_ff_calculation_for_DRtoSR(
return args, result


@logging_helper.grouped_logs(lambda args: f"ff_corrections.{args[0]}")
def run_non_closure_correction_for_DRtoSR(
args: Tuple[
str,
Expand All @@ -366,7 +376,7 @@ def run_non_closure_correction_for_DRtoSR(
"""

process, config, corr_config, sample_paths, output_path = args
log = logging.getLogger(f"ff_corrections.{process}")
log = logging_helper.setup_logging(logger=logging.getLogger(f"ff_corrections.{process}"))
corrections = {process: dict()}

process_config = deepcopy(corr_config["target_processes"][process])
Expand Down Expand Up @@ -401,6 +411,7 @@ def run_non_closure_correction_for_DRtoSR(
return args, corrections


@logging_helper.grouped_logs(lambda args: f"ff_corrections.{args[0]}")
def run_correction(
args,
) -> Dict[str, Dict[str, Any]]:
Expand All @@ -426,7 +437,7 @@ def run_correction(
save_path,
) = args

log = logging.getLogger(f"ff_corrections.{process}")
log = logging_helper.setup_logging(logger=logging.getLogger(f"ff_corrections.{process}"))
corrections = {process: dict()}

var_dependences = [config["target_processes"][process]["var_dependence"]] + list(config["target_processes"][process]["split_categories"].keys())
Expand Down Expand Up @@ -590,12 +601,9 @@ def run_correction(
func.configured_yaml.dump(corr_config, config_file)

# start output logging
func.setup_logger(
log_file=save_path + "/ff_corrections.log",
log_name="ff_corrections",
log_level=logging.INFO,
subcategories=corr_config["target_processes"].keys(),
)
logging_helper.LOG_FILENAME = save_path + "/ff_corrections.log"
logging_helper.LOG_LEVEL = getattr(logging, args.log_level.upper(), logging.INFO)
log = logging_helper.setup_logging(logger=logging.getLogger("ff_corrections"), level=logging_helper.LOG_LEVEL)

# getting all the input files
sample_paths = func.get_samples(config=config)
Expand Down Expand Up @@ -738,3 +746,5 @@ def run_correction(

with open(os.path.join(save_path, "done"), "w") as done_file:
done_file.write("")

log.info("Fake factor correction calculation finished successfully.")
Loading