From fe526f1f03350d64c2a833e86eede1ef33484593 Mon Sep 17 00:00:00 2001 From: MilanPesa <55391524+MilanPesa@users.noreply.github.com> Date: Wed, 18 Sep 2024 15:44:51 +0200 Subject: [PATCH 1/3] Create minimizer.py --- minimizer.py | 77 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 77 insertions(+) create mode 100644 minimizer.py diff --git a/minimizer.py b/minimizer.py new file mode 100644 index 00000000..4ced8f01 --- /dev/null +++ b/minimizer.py @@ -0,0 +1,77 @@ +import argparse +import os +import pickle +import sys +import time +from colorama import Fore, Back, Style +from pathlib import Path +from os.path import join + +from verifiers import * + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description = "A Python script for minimizing the constraint for a failed model. It will only keep the constraints that caused the error") + + parser.add_argument("-m", "--failed_model_file", help = "The path to a single pickle file or the path to a directory containing multiple pickle files", required=True, type=str) + parser.add_argument("-o", "--output-dir", help = "The directory to store the output (will be created if it does not exist).", required=False, type=str, default="minimizer_output") + + args = parser.parse_args() + if os.path.isfile(args.failed_model_file): + with open(args.failed_model_file, 'rb') as fpcl: + """ The error data is a dict with the following keys: + solver: the used solver + verifier: the verifier that got used + mutations_per_model: the amount of mutations that were used + seed: the used seed + error: a dict containing: + type: the type of error that occured + model: the newly mutated model that failed/crashed + originalmodel: the name of the model file that was used + mutators: a list with executed mutations + constraints: the constraints that made the model fail/crash + """ + error_data = pickle.loads(fpcl.read()) + original_error = error_data["error"] + original_cons = error_data["error"]["constraints"] + amount_of_original_cons = len(error_data["error"]["constraints"]) + if len(original_cons) == 1: + print(Fore.BLUE +"model only has 1 constraint no minimizing possible") + + else: + new_cons = [] + + for con in toplevel_list(original_cons): + test_cons = original_error["constraints"] + test_cons.remove(con) + new_error_dict = copy.deepcopy(original_error) + + vrf_cls = lookup_verifier(error_data['verifier']) + verifier = vrf_cls(solver=error_data['solver'], + mutations_per_model=error_data["mutations_per_model"], + exclude_dict = {}, + time_limit = time.time()*3600, + seed = error_data["seed"] + ) + new_error = verifier.rerun(new_error_dict) + + if new_error is not None: + # if we still get the error than the constraint is responsible so we keep it + new_cons.append(con) + + #copy new constraints to model + error_data["error"]["constraints"] = new_cons + new_model = error_data["error"]["model"] + new_model.constraints = new_cons + error_data["error"]["model"] = new_model + + print(Fore.LIGHTBLUE_EX +f"\nminimized model with {amount_of_original_cons} constraints to model with {len(new_cons)} constraints") + os.makedirs(args.output_dir, exist_ok=True) + + with open(join(args.output_dir, "minimized_"+os.path.basename(args.failed_model_file)), "wb") as ff: + pickle.dump(error_data, file=ff) + with open(join(args.output_dir, "minimized_"+Path(os.path.basename(args.failed_model_file)).stem+".txt"), "w") as ff: + ff.write(create_error_output_text(error_data)) + print(f"stored minimized model in {args.output_dir}") + else: + print(Fore.YELLOW +"failed model file not found") + print(Style.RESET_ALL) \ No newline at end of file From 881ba37aef6e59d481a68d7aa347246bafe62d4e Mon Sep 17 00:00:00 2001 From: MilanPesa <55391524+MilanPesa@users.noreply.github.com> Date: Fri, 20 Sep 2024 13:47:32 +0200 Subject: [PATCH 2/3] add ability to pass a dir --- minimizer.py | 151 ++++++++++++++++++++++++++++++++------------------- 1 file changed, 95 insertions(+), 56 deletions(-) diff --git a/minimizer.py b/minimizer.py index 4ced8f01..d4916fa0 100644 --- a/minimizer.py +++ b/minimizer.py @@ -3,75 +3,114 @@ import pickle import sys import time +import glob from colorama import Fore, Back, Style from pathlib import Path from os.path import join +from multiprocessing import Pool, cpu_count,set_start_method +from itertools import repeat from verifiers import * + +def minimize_model(failed_model_file,output_dir): + with open(failed_model_file, 'rb') as fpcl: + """ The error data is a dict with the following keys: + solver: the used solver + verifier: the verifier that got used + mutations_per_model: the amount of mutations that were used + seed: the used seed + error: a dict containing: + type: the type of error that occured + model: the newly mutated model that failed/crashed + originalmodel: the name of the model file that was used + mutators: a list with executed mutations + constraints: the constraints that made the model fail/crash + """ + error_data = pickle.loads(fpcl.read()) + original_error = error_data["error"] + original_cons = error_data["error"]["constraints"] + amount_of_original_cons = len(error_data["error"]["constraints"]) + + new_cons = [] + + for con in toplevel_list(original_cons): + test_cons = original_error["constraints"] + test_cons.remove(con) + new_error_dict = copy.deepcopy(original_error) + + vrf_cls = lookup_verifier(error_data['verifier']) + verifier = vrf_cls(solver=error_data['solver'], + mutations_per_model=error_data["mutations_per_model"], + exclude_dict = {}, + time_limit = time.time()*3600, + seed = error_data["seed"] + ) + new_error = verifier.rerun(new_error_dict) + + if new_error is not None: + # if we still get the error than the constraint is responsible we keep it + new_cons.append(con) + + #copy new constraints to model + error_data["error"]["constraints"] = new_cons + if "model" in error_data["error"]: + new_model = error_data["error"]["model"] + new_model.constraints = new_cons + error_data["error"]["model"] = new_model + #Fore.LIGHTBLUE_EX + + + with open(join(output_dir, "minimized_"+os.path.basename(failed_model_file)), "wb") as ff: + pickle.dump(error_data, file=ff) + with open(join(output_dir, "minimized_"+Path(os.path.basename(failed_model_file)).stem+".txt"), "w") as ff: + ff.write(create_error_output_text(error_data)) + + return f"minimized {failed_model_file} with {amount_of_original_cons} constraints to model with {len(new_cons)} constraints" + + + + + if __name__ == '__main__': parser = argparse.ArgumentParser(description = "A Python script for minimizing the constraint for a failed model. It will only keep the constraints that caused the error") - + def check_positive(value): + """ + Small helper function used in the argparser for checking if the input values are positive or not + """ + ivalue = int(value) + if ivalue <= 0: + raise argparse.ArgumentTypeError("%s is an invalid positive int value" % value) + return ivalue parser.add_argument("-m", "--failed_model_file", help = "The path to a single pickle file or the path to a directory containing multiple pickle files", required=True, type=str) parser.add_argument("-o", "--output-dir", help = "The directory to store the output (will be created if it does not exist).", required=False, type=str, default="minimizer_output") - + parser.add_argument("-p","--amount-of-processes", help = "The amount of processes that will be used to run the tests", required=False, default=cpu_count()-1 ,type=check_positive) # the -1 is for the main process args = parser.parse_args() - if os.path.isfile(args.failed_model_file): - with open(args.failed_model_file, 'rb') as fpcl: - """ The error data is a dict with the following keys: - solver: the used solver - verifier: the verifier that got used - mutations_per_model: the amount of mutations that were used - seed: the used seed - error: a dict containing: - type: the type of error that occured - model: the newly mutated model that failed/crashed - originalmodel: the name of the model file that was used - mutators: a list with executed mutations - constraints: the constraints that made the model fail/crash - """ - error_data = pickle.loads(fpcl.read()) - original_error = error_data["error"] - original_cons = error_data["error"]["constraints"] - amount_of_original_cons = len(error_data["error"]["constraints"]) - if len(original_cons) == 1: - print(Fore.BLUE +"model only has 1 constraint no minimizing possible") - - else: - new_cons = [] - - for con in toplevel_list(original_cons): - test_cons = original_error["constraints"] - test_cons.remove(con) - new_error_dict = copy.deepcopy(original_error) - - vrf_cls = lookup_verifier(error_data['verifier']) - verifier = vrf_cls(solver=error_data['solver'], - mutations_per_model=error_data["mutations_per_model"], - exclude_dict = {}, - time_limit = time.time()*3600, - seed = error_data["seed"] - ) - new_error = verifier.rerun(new_error_dict) - - if new_error is not None: - # if we still get the error than the constraint is responsible so we keep it - new_cons.append(con) - - #copy new constraints to model - error_data["error"]["constraints"] = new_cons - new_model = error_data["error"]["model"] - new_model.constraints = new_cons - error_data["error"]["model"] = new_model + + os.makedirs(args.output_dir, exist_ok=True) - print(Fore.LIGHTBLUE_EX +f"\nminimized model with {amount_of_original_cons} constraints to model with {len(new_cons)} constraints") - os.makedirs(args.output_dir, exist_ok=True) + if os.path.isfile(args.failed_model_file): + result = minimize_model(args.failed_model_file,args.output_dir) + print(Fore.LIGHTBLUE_EX +"\n"+result) + print(Style.RESET_ALL+f"stored minimized model in {args.output_dir}") - with open(join(args.output_dir, "minimized_"+os.path.basename(args.failed_model_file)), "wb") as ff: - pickle.dump(error_data, file=ff) - with open(join(args.output_dir, "minimized_"+Path(os.path.basename(args.failed_model_file)).stem+".txt"), "w") as ff: - ff.write(create_error_output_text(error_data)) - print(f"stored minimized model in {args.output_dir}") + elif os.path.isdir(args.failed_model_file): + set_start_method("spawn") + print("detected directory") + files = glob.glob(args.failed_model_file+"/*.pickle") + with Pool(args.amount_of_processes) as pool: + try: + print("rerunning failed models in directory",flush=True) + results = pool.starmap(minimize_model, zip(files,repeat(args.output_dir))) + print("\n") + [print(Fore.LIGHTBLUE_EX + result) for result in results] + # for result in results: + # print(Fore.LIGHTBLUE_EX +result) + print(Style.RESET_ALL+"\nsucessfully minimized all the models",flush=True ) + except KeyboardInterrupt: + pass + finally: + print("quiting the application",flush=True ) + print(Style.RESET_ALL+f"stored minimized models in {args.output_dir}") else: print(Fore.YELLOW +"failed model file not found") print(Style.RESET_ALL) \ No newline at end of file From e3061430cf345c7016507a71e24f8c75145ff904 Mon Sep 17 00:00:00 2001 From: MilanPesa <55391524+MilanPesa@users.noreply.github.com> Date: Fri, 20 Sep 2024 14:41:33 +0200 Subject: [PATCH 3/3] add args dirs to working dir --- minimizer.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/minimizer.py b/minimizer.py index d4916fa0..827d7368 100644 --- a/minimizer.py +++ b/minimizer.py @@ -85,22 +85,25 @@ def check_positive(value): parser.add_argument("-o", "--output-dir", help = "The directory to store the output (will be created if it does not exist).", required=False, type=str, default="minimizer_output") parser.add_argument("-p","--amount-of-processes", help = "The amount of processes that will be used to run the tests", required=False, default=cpu_count()-1 ,type=check_positive) # the -1 is for the main process args = parser.parse_args() - - os.makedirs(args.output_dir, exist_ok=True) - if os.path.isfile(args.failed_model_file): - result = minimize_model(args.failed_model_file,args.output_dir) + current_working_directory = os.getcwd() + output_dir = os.path.join(current_working_directory, args.output_dir) + os.makedirs(output_dir, exist_ok=True) + failed_model_file = os.path.join(current_working_directory, args.failed_model_file) + + if os.path.isfile(failed_model_file): + result = minimize_model(failed_model_file,output_dir) print(Fore.LIGHTBLUE_EX +"\n"+result) - print(Style.RESET_ALL+f"stored minimized model in {args.output_dir}") + print(Style.RESET_ALL+f"stored minimized model in {output_dir}") - elif os.path.isdir(args.failed_model_file): + elif os.path.isdir(failed_model_file): set_start_method("spawn") print("detected directory") - files = glob.glob(args.failed_model_file+"/*.pickle") + files = glob.glob(failed_model_file+"/*.pickle") with Pool(args.amount_of_processes) as pool: try: print("rerunning failed models in directory",flush=True) - results = pool.starmap(minimize_model, zip(files,repeat(args.output_dir))) + results = pool.starmap(minimize_model, zip(files,repeat(output_dir))) print("\n") [print(Fore.LIGHTBLUE_EX + result) for result in results] # for result in results: @@ -110,7 +113,7 @@ def check_positive(value): pass finally: print("quiting the application",flush=True ) - print(Style.RESET_ALL+f"stored minimized models in {args.output_dir}") + print(Style.RESET_ALL+f"stored minimized models in {output_dir}") else: print(Fore.YELLOW +"failed model file not found") print(Style.RESET_ALL) \ No newline at end of file