diff --git a/.gitignore b/.gitignore index 49c520a..4aa6a26 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,7 @@ build/* __pycache__/* .idea/* -dist/* \ No newline at end of file +dist/* +venv/ +.DS_Store +__pycache__ diff --git a/docs/_guides/propythia_descriptors_2021.pdf b/docs/_guides/propythia_descriptors_2021.pdf index 78f3481..a3aa512 100644 Binary files a/docs/_guides/propythia_descriptors_2021.pdf and b/docs/_guides/propythia_descriptors_2021.pdf differ diff --git a/docs/_guides/propythia_user_guide_2021.pdf b/docs/_guides/propythia_user_guide_2021.pdf index accc72e..8bbb5f6 100644 Binary files a/docs/_guides/propythia_user_guide_2021.pdf and b/docs/_guides/propythia_user_guide_2021.pdf differ diff --git a/requirements b/requirements_dna similarity index 100% rename from requirements rename to requirements_dna diff --git a/src/propythia/DNA/.gitignore b/src/propythia/DNA/.gitignore new file mode 100644 index 0000000..74f2a02 --- /dev/null +++ b/src/propythia/DNA/.gitignore @@ -0,0 +1,7 @@ +__pycache__/ +.ipynb_checkpoints/ +.mypy_cache/ +.vscode/ +datasets/ +src_old +backup/ \ No newline at end of file diff --git a/src/propythia/DNA/README.md b/src/propythia/DNA/README.md new file mode 100644 index 0000000..b121583 --- /dev/null +++ b/src/propythia/DNA/README.md @@ -0,0 +1,21 @@ +# Note + +## Machine Learning Part + +* `data` is where the physicochemical indices are stored, which are used to calculate some descriptors. +* `descriptors.py` is the file that contains the calculation of all descriptors for a given sequence. +* `calculate_features.py` is a script that calculates all descriptors for an entire dataset (with the help of `descriptors.py`) and creates a dataframe with all the descriptors. +* `notebooks/quick-start-ML.ipynb` is a notebook that explains how to perform every step of the developed modules. It includes data reading and validation, calculation of descriptors from sequences, descriptors processing and using processed descriptors to train ML models (already implemented in ProPythia). + +## Deep Learning Part + +* `deep_ml.py` runs a combination of set hyperparameters or performs hyperparameter tuning for the given model, feature mode, and data directory. +* `outputs` is a directory where the output of the hyperparameter tuning is stored. Only the filtered results with the score of each model is stored in the directory. +* `src` is a directory where the source code of the entire DL pipeline is stored. +* `essential_genes` is a directory where all the information about the essential genes is stored since it was needed a lot of data preprocessing to build the dataset. +* `config.json` is a file that contains the configuration of the entire DL pipeline. + +## Both Parts + +* `utils.py` is a file that contains some useful functions. +* `read_sequence.py` is the file that contains functions to read and validate DNA sequences. They can be read from a *CSV* file, a *FASTA* file, or from a single string. diff --git a/src/propythia/DNA/calculate_features.py b/src/propythia/DNA/calculate_features.py new file mode 100644 index 0000000..b8a5c01 --- /dev/null +++ b/src/propythia/DNA/calculate_features.py @@ -0,0 +1,133 @@ +import pandas as pd +from typing import List +from descriptors import DNADescriptor + +def _calculate_descriptors(data: pd.DataFrame, descriptor_list: List) -> pd.DataFrame: + """ + From a dataset of sequences and labels, this function calculates the descriptors and returns a dataframe with them. + The user can also specify which descriptors to calculate. + """ + list_feature = [] + count = 0 + for seq in data['sequence']: + res = {'sequence': seq} + dna = DNADescriptor(seq) + features = dna.get_descriptors(descriptor_list) + res.update(features) + list_feature.append(res) + + # print progress every 100 sequences + if count % 100 == 0: + print(count, '/', len(data)) + + count += 1 + print("Done!") + df = pd.DataFrame(list_feature) + return df + + +def _process_lists(fps_x, field): + """ + A helper function to normalize lists. + """ + l = fps_x[field].to_list() + new_df = pd.DataFrame(l) + new_df.columns = [str(field) + "_" + str(i) for i in new_df.columns] + fps_x.drop(field, axis=1, inplace=True) + return new_df + + +def _process_lists_of_lists(fps_x, field): + """ + A helper function to normalize lists of lists. + """ + l = fps_x[field].to_list() + new_df = pd.DataFrame(l) + new_df.columns = [str(field) + "_" + str(i) for i in new_df.columns] + empty_val = {} if field == "enhanced_nucleic_acid_composition" else [] + small_processed = [] + for f in new_df.columns: + col = [empty_val if i is None else i for i in new_df[f].to_list()] + sub = pd.DataFrame(col) + sub.columns = [str(f) + "_" + str(i) for i in sub.columns] + small_processed.append(sub) + fps_x.drop(field, axis=1, inplace=True) + return small_processed + + + +def normalization(fps_x, descriptor_list): + """ + Because the model cannot process data in dictionaries and lists, the descriptors that produce these forms must still be normalized. + + To normalize the data, dicts and lists need to "explode" into more columns. + + E.g. dicts: + + | descriptor_hello | + | ---------------- | + | {'a': 1, 'b': 2} | + + will be transformed into: + + | descriptor_hello_a | descriptor_hello_b | + | ------------------ | ------------------ | + | 1 | 2 | + + E.g. lists: + + | descriptor_hello | + | ---------------- | + | [1, 2, 3] | + + will be transformed into: + + | descriptor_hello_0 | descriptor_hello_1 | descriptor_hello_2 | + | ------------------ | ------------------ | ------------------ | + | 1 | 2 | 3 | + """ + lists = ["nucleic_acid_composition", "dinucleotide_composition", "trinucleotide_composition", + "k_spaced_nucleic_acid_pairs", "kmer", "PseDNC", "PseKNC", "DAC", "DCC", "DACC", "TAC", "TCC", "TACC"] + lists_of_lists = [ + "accumulated_nucleotide_frequency" + ] + + # update to be normalized lists with only columns the user wants + if(descriptor_list != []): + lists = [l for l in lists if l in descriptor_list] + lists_of_lists = [l for l in lists_of_lists if l in descriptor_list] + + small_processed = [] + for i in lists: + new_df = _process_lists(fps_x, i) + small_processed.append(new_df) + + for i in lists_of_lists: + smaller_processed = _process_lists_of_lists(fps_x, i) + small_processed += smaller_processed + + new_fps_x = pd.concat([fps_x, *small_processed], axis=1) + return new_fps_x + + +def calculate_and_normalize(data: pd.DataFrame, descriptor_list: list = []) -> pd.DataFrame: + """ + This function calculates the descriptors and normalizes the data all at once from a dataframe of sequences and labels. The user can also specify which descriptors to calculate. + """ + features = _calculate_descriptors(data, descriptor_list) + if 'label' in data: + fps_y = data['label'] + else: + fps_y = None + fps_x = features.loc[:, features.columns != 'label'] + fps_x = fps_x.loc[:, fps_x.columns != 'sequence'] + fps_x = normalization(fps_x, descriptor_list) + return fps_x, fps_y + +if __name__ == "__main__": + from read_sequence import ReadDNA + reader = ReadDNA() + filename = 'datasets/primer/dataset.csv' + data = reader.read_csv(filename=filename, with_labels=True) + fps_x, fps_y = calculate_and_normalize(data) + print(fps_x) \ No newline at end of file diff --git a/src/propythia/DNA/config.json b/src/propythia/DNA/config.json new file mode 100644 index 0000000..7055b0c --- /dev/null +++ b/src/propythia/DNA/config.json @@ -0,0 +1,34 @@ +{ + "combination":{ + "model_label": "bi_lstm", + "mode": "chemical", + "data_dir": "essential_genes_100k_cut", + "class_weights": [1.0, 1.0] + }, + "do_tuning": true, + "fixed_vals":{ + "epochs": 500, + "optimizer_label": "adam", + "loss_function": "cross_entropy", + "patience": 2, + "output_size": 2, + "cpus_per_trial": 2, + "gpus_per_trial": 2, + "num_samples": 5, + "kmer_one_hot": 2 + }, + "hyperparameters": { + "hidden_size": 32, + "lr": 1e-3, + "batch_size": 32, + "dropout": 0.35, + "num_layers": 1 + }, + "hyperparameter_search_space": { + "hidden_size": [32, 64, 128], + "lr": [1e-4, 1e-3, 1e-2], + "batch_size": [16, 32, 64], + "dropout": [0.2, 0.3, 0.4, 0.5], + "num_layers": [1, 2, 3] + } +} diff --git a/src/propythia/DNA/data/mmc3.data b/src/propythia/DNA/data/mmc3.data new file mode 100644 index 0000000..4b6be3a Binary files /dev/null and b/src/propythia/DNA/data/mmc3.data differ diff --git a/src/propythia/DNA/data/mmc4.data b/src/propythia/DNA/data/mmc4.data new file mode 100644 index 0000000..73dbce1 Binary files /dev/null and b/src/propythia/DNA/data/mmc4.data differ diff --git a/src/propythia/DNA/deep_ml.py b/src/propythia/DNA/deep_ml.py new file mode 100644 index 0000000..3fe8658 --- /dev/null +++ b/src/propythia/DNA/deep_ml.py @@ -0,0 +1,47 @@ +""" +######################################################################## +Runs a combination of hyperparameters or performs hyperparameter tuning +for the given model, feature mode, and data directory. +######################################################################## +""" + +import torch +import os +from src.prepare_data import prepare_data +from src.test import test +from src.hyperparameter_tuning import hyperparameter_tuning +from src.train import traindata +from utils import print_metrics, read_config + +os.environ["CUDA_VISIBLE_DEVICES"] = '1,2,3,4,5' +device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') + +def perform(config): + if config['do_tuning']: + hyperparameter_tuning(device, config) + else: + model_label = config['combination']['model_label'] + mode = config['combination']['mode'] + data_dir = config['combination']['data_dir'] + class_weights = config['combination']['class_weights'] + batch_size = config['hyperparameters']['batch_size'] + kmer_one_hot = config['fixed_vals']['kmer_one_hot'] + hyperparameters = config['hyperparameters'] + + trainloader, testloader, validloader, input_size, sequence_length = prepare_data( + data_dir=data_dir, + mode=mode, + batch_size=batch_size, + k=kmer_one_hot, + ) + + # train the model + model = traindata(hyperparameters, device, config, trainloader, validloader, input_size, sequence_length) + + # test the model + metrics = test(device, model, testloader) + print_metrics(model_label, mode, data_dir, kmer_one_hot, class_weights, metrics) + +if __name__ == '__main__': + config = read_config(device) + perform(config) \ No newline at end of file diff --git a/src/propythia/DNA/descriptors.py b/src/propythia/DNA/descriptors.py new file mode 100644 index 0000000..d06c2c3 --- /dev/null +++ b/src/propythia/DNA/descriptors.py @@ -0,0 +1,596 @@ +""" +########################################################################################## + +A class used for computing different types of DNA descriptors for a single DNA sequence. +It contains descriptors from packages iLearn, iDNA4mC, rDNAse, ... + +Authors: João Nuno Abreu +Date: 02/2022 +Email: + +########################################################################################## +""" + +from utils import * +from functools import reduce +from typing import Dict, List, Tuple + +class DNADescriptor: + """ + The Descriptor class collects all descriptor calculation functions into a simple class. + It returns the features in a dictionary object + """ + + def __init__(self, dna_sequence): + # it is assumed that the sequence is a string with valid alphabet + self.dna_sequence = dna_sequence + + + def get_length(self) -> int: + """ + Returns the length of the sequence. From: https://pubmed.ncbi.nlm.nih.gov/31067315/ + Parameters + ---------- + + Returns + ------- + int + The length of the sequence. + """ + return len(self.dna_sequence) + + def get_gc_content(self) -> float: + """ + Returns the GC content of the sequence. + Parameters + ---------- + + Returns + ------- + float + The GC content of the sequence. + """ + gc_content = 0 + for letter in self.dna_sequence: + if letter == 'G' or letter == 'C': + gc_content += 1 + return round(gc_content / self.get_length(), 3) + + def get_at_content(self) -> float: + """ + Returns the AT content of the sequence. + Parameters + ---------- + + Returns + ------- + float + The AT content of the sequence. + """ + at_content = 0 + for letter in self.dna_sequence: + if letter == 'A' or letter == 'T': + at_content += 1 + return round(at_content / self.get_length(), 3) + + # ----------------------- NUCLEIC ACID COMPOSITION ----------------------- # + + def get_nucleic_acid_composition(self, normalize: bool = True) -> Dict[str, float]: + """ + Calculates the Nucleic Acid Composition of the sequence. From: https://pubmed.ncbi.nlm.nih.gov/31067315/ + Parameters + ---------- + normalize : bool (default=True) + Default value is False. If True, this method returns the frequencies of each nucleic acid. + Returns + ------- + Dict of str:float + Dictionary with values of nucleic acid composition + """ + res = make_kmer_dict(1) + for letter in self.dna_sequence: + res[letter] += 1 + + if normalize: + res = normalize_dict(res) + return res + + def get_dinucleotide_composition(self, normalize: bool = True) -> Dict[str, float]: + """ + Calculates the Dinucleotide Composition of the sequence. From: https://pubmed.ncbi.nlm.nih.gov/31067315/ + Parameters + ---------- + normalize : bool (default=True) + Default value is False. If True, this method returns the frequencies of each dinucleotide. + Returns + ------- + Dict of str:float + Dictionary with values of dinucleotide composition + """ + res = make_kmer_dict(2) + for i in range(len(self.dna_sequence) - 1): + dinucleotide = self.dna_sequence[i:i+2] + res[dinucleotide] += 1 + if normalize: + res = normalize_dict(res) + return res + + def get_trinucleotide_composition(self, normalize: bool = True) -> Dict[str, float]: + """ + Calculates the Trinucleotide Composition of the sequence. From: https://pubmed.ncbi.nlm.nih.gov/31067315/ + Parameters + ---------- + normalize : bool (default=True) + Default value is False. If True, this method returns the frequencies of each trinucleotide. + Returns + ------- + Dict of str:float + Dictionary with values of trinucleotide composition + """ + res = make_kmer_dict(3) + for i in range(len(self.dna_sequence) - 2): + trinucleotide = self.dna_sequence[i:i+3] + res[trinucleotide] += 1 + + if normalize: + res = normalize_dict(res) + return res + + def get_k_spaced_nucleic_acid_pairs(self, k: int = 0, normalize: bool = True) -> Dict[str, float]: + """ + Calculates the K-Spaced Nucleic Acid Pairs of the sequence. From: https://pubmed.ncbi.nlm.nih.gov/31067315/ + Parameters + ---------- + k : int (default=0) + The number of nucleic acids to pair together. + normalize: bool (default=True) + Default value is False. If True, this method returns the frequencies of each k-spaced nucleic acid pair. + Returns + ------- + Dict of str:float + Dictionary with values of k-spaced nucleic acid pairs + """ + res = make_kmer_dict(2) + for i in range(len(self.dna_sequence) - k - 1): + k_spaced_nucleic_acid_pair = self.dna_sequence[i] + \ + self.dna_sequence[i+k+1] + res[k_spaced_nucleic_acid_pair] += 1 + + if normalize: + res = normalize_dict(res) + return res + + def get_kmer(self, k: int = 2, normalize: bool = True, reverse: bool = False) -> Dict[str, float]: + """ + Calculates the K-Mer of the sequence. From: https://pubmed.ncbi.nlm.nih.gov/31067315/, https://rdrr.io/cran/rDNAse/ + Parameters + ---------- + k : int (default=2) + The number of nucleic acids to pair together. + normalize: bool (default=True) + Default value is False. If True, this method returns the frequencies of all kmers. + reverse : bool (default=False) + Whether to calculate the reverse complement kmer or not. + Returns + ------- + Dict of str:float + Dictionary with values of kmer + """ + + res = make_kmer_dict(k) + + for i in range(len(self.dna_sequence) - k + 1): + res[self.dna_sequence[i:i+k]] += 1 + + if reverse: + for kmer, _ in sorted(res.items(), key=lambda x: x[0]): + reverse_val = "".join([pairs[i] for i in kmer[::-1]]) + + # calculate alphabet order between kmer and reverse compliment + if(kmer < reverse_val): + smaller = kmer + bigger = reverse_val + else: + smaller = reverse_val + bigger = kmer + + # create in dict if they dont exist + if(smaller not in res): + res[smaller] = 0 + if(bigger not in res): + res[bigger] = 0 + + if(smaller != bigger): + # add to dict + res[smaller] += res[bigger] + # delete from dict + del res[bigger] + + if normalize: + res = normalize_dict(res) + + return res + + def get_accumulated_nucleotide_frequency(self, normalize: bool = True) -> List[Dict[str, float]]: + """ + Calculates the Accumulated Nucleotide Frequency of the sequence at 25%, 50% and 75%. From: https://pubmed.ncbi.nlm.nih.gov/31067315/, https://www.nature.com/articles/srep13859?proof=t%252Btarget%253D + Parameters + ---------- + normalize: bool (default=True) + Default value is False. If True, this method returns the frequencies of all accumulated nucleotide frequencies. + Returns + ------- + list of dicts of str:float + The Accumulated Nucleotide Frequency of the sequence at 25%, 50% and 75%. + """ + res = [] + d1 = make_kmer_dict(1) + d2 = make_kmer_dict(1) + d3 = make_kmer_dict(1) + + for letter in self.dna_sequence[:normal_round(len(self.dna_sequence) * 0.25)]: + d1[letter] += 1 + + for letter in self.dna_sequence[:normal_round(len(self.dna_sequence) * 0.50)]: + d2[letter] += 1 + + for letter in self.dna_sequence[:normal_round(len(self.dna_sequence) * 0.75)]: + d3[letter] += 1 + res = [d1,d2,d3] + + if normalize: + res = [normalize_dict(d1),normalize_dict(d2),normalize_dict(d3)] + return res + + # -------------------------- Autocorrelation -------------------------- # + + def get_DAC(self, phyche_index: List[str] = ["Twist", "Tilt"], nlag: int = 2, all_property: bool = False, extra_phyche_index: Dict[str, List[Tuple[str, float]]] = None) -> List[float]: + """ + Calculates the Dinucleotide Based Auto Covariance of the sequence. CODE FROM repDNA (https://github.com/liufule12/repDNA) + Parameters + ---------- + phyche_index : list of str (default=["Twist", "Tilt"]) + The physicochemical properties list. + nlag : int (default=2) + An integer larger than or equal to 0 and less than or equal to L-2 (L means the length of the shortest DNA + sequence in the dataset). It represents the distance between two dinucleotides. + all_property : bool (default=False) + If True, returns all properties. + extra_phyche_index : dict of str and list of float (default=None) + The extra phyche index to use for the calculation. It means user-defined phyche_index. The key is + dinucleotide, and its corresponding value is a list with a pair of physicochemical indices and its new + value. + Returns + ------- + list of float + The Dinucleotide Based Auto Covariance of the sequence. + """ + k = 2 + phyche_value = ready_acc(k, phyche_index, all_property, extra_phyche_index) + return make_ac_vector([self.dna_sequence], nlag, phyche_value, k)[0] + + def get_DCC(self, phyche_index: List[str] = ["Twist", "Tilt"], nlag: int = 2, all_property: bool = False, extra_phyche_index: Dict[str, List[Tuple[str, float]]] = None) -> List[float]: + """ + Calculates the Dinucleotide Based Cross Covariance (DCC) of the sequence. CODE FROM repDNA (https://github.com/liufule12/repDNA) + Parameters + ---------- + phyche_index : list of str (default=["Twist", "Tilt"]) + The physicochemical properties list. + nlag : int (default=2) + An integer larger than or equal to 0 and less than or equal to L-2 (L means the length of the shortest DNA + sequence in the dataset). It represents the distance between two dinucleotides. + all_property : bool (default=False) + If True, returns all properties. + extra_phyche_index : dict of str and list of float (default=None) + The extra phyche index to use for the calculation. It means user-defined phyche_index. The key is + dinucleotide, and its corresponding value is a list with a pair of physicochemical indices and its new + value. + Returns + ------- + list of float + The Dinucleotide Based Cross Covariance of the sequence. + """ + k = 2 + phyche_value = ready_acc(k, phyche_index, all_property, extra_phyche_index) + return make_cc_vector([self.dna_sequence], nlag, phyche_value, k)[0] + + def get_DACC(self, phyche_index: List[str] = ["Twist", "Tilt"], nlag: int = 2, all_property: bool = False, extra_phyche_index: Dict[str, List[Tuple[str, float]]] = None) -> List[float]: + """ + Calculates the Dinucleotide Based Auto Cross Covariance of the sequence. CODE FROM repDNA (https://github.com/liufule12/repDNA) + Parameters + ---------- + phyche_index : list of str, optional (default=["Twist", "Tilt"]) + The physicochemical properties list. + nlag : int, optional (default=2) + An integer larger than or equal to 0 and less than or equal to L-2 (L means the length of the shortest DNA + sequence in the dataset). It represents the distance between two dinucleotides. + all_property : bool, optional (default=False) + If True, returns all properties. + extra_phyche_index : dict of str and list of float, optional (default=None) + The extra phyche index to use for the calculation. It means user-defined phyche_index. The key is + dinucleotide, and its corresponding value is a list with a pair of physicochemical indices and it's new + value. + Returns + ------- + list of float + The Dinucleotide Based Auto Cross Covariance of the sequence. + """ + k = 2 + phyche_value = ready_acc(k, phyche_index, all_property, extra_phyche_index) + zipped = list(zip(make_ac_vector([self.dna_sequence], nlag, phyche_value, k), + make_cc_vector([self.dna_sequence], nlag, phyche_value, k))) + vector = [reduce(lambda x, y: x + y, e) for e in zipped] + + return vector[0] + + def get_TAC(self, phyche_index: List[str] = ["Dnase I", "Nucleosome"], nlag: int = 2, all_property: bool = False, extra_phyche_index: Dict[str, List[Tuple[str, float]]] = None) -> List[float]: + """ + Calculates the Trinucleotide Based Auto Covariance of the sequence. CODE FROM repDNA (https://github.com/liufule12/repDNA) + Parameters + ---------- + phyche_index : list of str, optional (default=["Dnase I", "Nucleosome"]) + The physicochemical properties list. + nlag : int, optional (default=3) + An integer larger than or equal to 0 and less than or equal to L-2 (L means the length of the shortest DNA + sequence in the dataset). It represents the distance between two dinucleotides. + all_property : bool, optional (default=False) + If True, returns all properties. + extra_phyche_index : dict of str and list of float, optional (default=None) + The extra phyche index to use for the calculation. It means user-defined phyche_index. The key is + trinucleotide, and its corresponding value is a list with a pair of physicochemical indices and it's new + value. + Returns + ------- + list of float + The Trinucleotide Based Auto Covariance of the sequence. + """ + k = 3 + phyche_value = ready_acc(k, phyche_index, all_property, extra_phyche_index) + return make_ac_vector([self.dna_sequence], nlag, phyche_value, k)[0] + + def get_TCC(self, phyche_index: List[str] = ["Dnase I", "Nucleosome"], nlag: int = 2, all_property: bool = False, extra_phyche_index: Dict[str, List[Tuple[str, float]]] = None) -> List[float]: + """ + Calculates the Trinucleotide Based Auto Covariance of the sequence. CODE FROM repDNA (https://github.com/liufule12/repDNA) + Parameters + ---------- + phyche_index : list of str, optional (default=None) + The physicochemical properties list. + nlag : int, optional (default=3) + An integer larger than or equal to 0 and less than or equal to L-2 (L means the length of the shortest DNA + sequence in the dataset). It represents the distance between two dinucleotides. + all_property : bool, optional (default=False) + If True, returns all properties. + extra_phyche_index : dict of str and list of float, optional (default=None) + The extra phyche index to use for the calculation. It means user-defined phyche_index. The key is + trinucleotide, and its corresponding value is a list with a pair of physicochemical indices and it's new + value. + Returns + ------- + list of float + The Trinucleotide Based Cross Covariance of the sequence. + """ + k = 3 + phyche_value = ready_acc(k, phyche_index, all_property, extra_phyche_index) + + return make_cc_vector([self.dna_sequence], nlag, phyche_value, k)[0] + + def get_TACC(self, phyche_index: List[str] = ["Dnase I", "Nucleosome"], nlag: int = 2, all_property: bool = False, extra_phyche_index: Dict[str, List[Tuple[str, float]]] = None) -> List[float]: + """ + Calculates the Dinucleotide Based Auto Cross Covariance of the sequence. CODE FROM repDNA (https://github.com/liufule12/repDNA) + Parameters + ---------- + phyche_index : list of str, optional (default=None) + The physicochemical properties list. + nlag : int, optional (default=2) + An integer larger than or equal to 0 and less than or equal to L-2 (L means the length of the shortest DNA + sequence in the dataset). It represents the distance between two dinucleotides. + all_property : bool, optional (default=False) + If True, returns all properties. + extra_phyche_index : dict of str and list of float, optional (default=None) + The extra phyche index to use for the calculation. It means user-defined phyche_index. The key is + trinucleotide, and its corresponding value is a list with a pair of physicochemical indices and it's new + value. + Returns + ------- + list of float + The Dinucleotide Based Auto Cross Covariance of the sequence. + """ + k = 3 + phyche_value = ready_acc(k, phyche_index, all_property, extra_phyche_index) + + zipped = list(zip(make_ac_vector([self.dna_sequence], nlag, phyche_value, k), + make_cc_vector([self.dna_sequence], nlag, phyche_value, k))) + vector = [reduce(lambda x, y: x + y, e) for e in zipped] + + return vector[0] + + # -------------------- PSEUDO NUCLEOTIDE COMPOSITION -------------------- # + + def get_PseDNC(self, lamda: int = 3, w: float = 0.05) -> Dict[str, float]: + """ + Calculates the Pseudo Dinucleotide Composition of the sequence. From: https://www.ncbi.nlm.nih.gov/pmc/articles/PMC8138820/ + Parameters + ---------- + lamda : int, optional (default=3) + Value larger than or equal to 0 and less than or equal to L-2 (L means the length of the shortest sequence + in the dataset). It represents the highest counted rank (or tier) of the correlation along a DNA sequence. + w : float, optional (default=0.05) + The weight factor ranged from 0 to 1. + Returns + ------- + Dict of str:float + The Pseudo Dinucleotide Composition of the sequence. + """ + d = { + 'AA': [0.06, 0.5, 0.27, 1.59, 0.11, -0.11], + 'AC': [1.50, 0.50, 0.80, 0.13, 1.29, 1.04], + 'AG': [0.78, 0.36, 0.09, 0.68, -0.24, -0.62], + 'AT': [1.07, 0.22, 0.62, -1.02, 2.51, 1.17], + 'CA': [-1.38, -1.36, -0.27, -0.86, -0.62, -1.25], + 'CC': [0.06, 1.08, 0.09, 0.56, -0.82, 0.24], + 'CG': [-1.66, -1.22, -0.44, -0.82, -0.29, -1.39], + 'CT': [0.78, 0.36, 0.09, 0.68, -0.24, -0.62], + 'GA': [-0.08, 0.5, 0.27, 0.13, -0.39, 0.71], + 'GC': [-0.08, 0.22, 1.33, -0.35, 0.65, 1.59], + 'GG': [0.06, 1.08, 0.09, 0.56, -0.82, 0.24], + 'GT': [1.50, 0.50, 0.80, 0.13, 1.29, 1.04], + 'TA': [-1.23, -2.37, -0.44, -2.24, -1.51, -1.39], + 'TC': [-0.08, 0.5, 0.27, 0.13, -0.39, 0.71], + 'TG': [-1.38, -1.36, -0.27, -0.86, -0.62, -1.25], + 'TT': [0.06, 0.5, 0.27, 1.59, 0.11, -0.11] + } + + counts = make_kmer_dict(2) + for i in range(len(self.dna_sequence) - 1): + dinucleotide = self.dna_sequence[i:i + 2] + counts[dinucleotide] += 1 + + fk = {k: v / sum(counts.values()) for k, v in counts.items()} + all_possibilites = make_kmer_list(2) + + thetas = [] + L = len(self.dna_sequence) + for i in range(lamda): + big_somatorio = 0.0 + for j in range(L-i-2): + somatorio = 0.0 + first_dinucleotide = self.dna_sequence[j:j+2] + second_dinucleotide = self.dna_sequence[j+i+1:j+i+1+2] + for k in range(6): + val = (d[first_dinucleotide][k] - + d[second_dinucleotide][k])**2 + somatorio += val + + big_somatorio += somatorio/6 + + # Theta calculation + if(L-i-2 == 0): + theta = 0.0 + else: + theta = big_somatorio / (L-i-2) + thetas.append(theta) + + # -------------------------------------------- + + res = {} + for dinucleotide in all_possibilites: + res[dinucleotide] = round(fk[dinucleotide] / (1 + w * sum(thetas)), 3) + + for i in range(lamda): + res["lambda."+str(i+1)] = round(w * thetas[i] / + (1 + w * sum(thetas)), 3) + + return res + + def get_PseKNC(self, k: int = 3, lamda: int = 1, w: float = 0.5) -> Dict[str, float]: + """ + Calculates the Pseudo K Composition of the sequence. From: https://www.ncbi.nlm.nih.gov/pmc/articles/PMC8138820/ + Parameters + ---------- + k : int, optional (default=3) + Value larger than 0 represents the k-tuple. + lamda : int, optional (default=1) + Value larger than or equal to 0 and less than or equal to L-2 (L means the length of the shortest sequence + in the dataset). It represents the highest counted rank (or tier) of the correlation along a DNA sequence. + w : float, optional (default=0.5) + The weight factor ranged from 0 to 1. + Returns + ------- + Dict of str:float + The Pseudo K Composition of the sequence. + """ + d = { + 'AA': [0.06, 0.5, 0.27, 1.59, 0.11, -0.11], + 'AC': [1.50, 0.50, 0.80, 0.13, 1.29, 1.04], + 'AG': [0.78, 0.36, 0.09, 0.68, -0.24, -0.62], + 'AT': [1.07, 0.22, 0.62, -1.02, 2.51, 1.17], + 'CA': [-1.38, -1.36, -0.27, -0.86, -0.62, -1.25], + 'CC': [0.06, 1.08, 0.09, 0.56, -0.82, 0.24], + 'CG': [-1.66, -1.22, -0.44, -0.82, -0.29, -1.39], + 'CT': [0.78, 0.36, 0.09, 0.68, -0.24, -0.62], + 'GA': [-0.08, 0.5, 0.27, 0.13, -0.39, 0.71], + 'GC': [-0.08, 0.22, 1.33, -0.35, 0.65, 1.59], + 'GG': [0.06, 1.08, 0.09, 0.56, -0.82, 0.24], + 'GT': [1.50, 0.50, 0.80, 0.13, 1.29, 1.04], + 'TA': [-1.23, -2.37, -0.44, -2.24, -1.51, -1.39], + 'TC': [-0.08, 0.5, 0.27, 0.13, -0.39, 0.71], + 'TG': [-1.38, -1.36, -0.27, -0.86, -0.62, -1.25], + 'TT': [0.06, 0.5, 0.27, 1.59, 0.11, -0.11] + } + counts = make_kmer_dict(k) + for i in range(len(self.dna_sequence) - k + 1): + k_mer = self.dna_sequence[i:i + k] + counts[k_mer] += 1 + + fk = {k: v / sum(counts.values()) for k, v in counts.items()} + all_possibilites: List[str] = make_kmer_list(k) + + thetas = [] + L = len(self.dna_sequence) + for i in range(lamda): + big_somatorio = 0.0 + for j in range(L-i-2): + somatorio = 0.0 + first_dinucleotide = self.dna_sequence[j:j+2] + second_dinucleotide = self.dna_sequence[j+i+1:j+i+1+2] + for k in range(6): + val = (d[first_dinucleotide][k] - d[second_dinucleotide][k])**2 + somatorio += val + big_somatorio += somatorio/6 + + # Theta calculation + if(L-i-2 == 0): + theta = 0.0 + else: + theta = big_somatorio / (L-i-2) + thetas.append(theta) + + # -------------------------------------------- + + res = {} + for k_tuple in all_possibilites: + res[k_tuple] = round(fk[k_tuple] / (1 + w * sum(thetas)), 3) + + for i in range(lamda): + res["lambda."+str(i+1)] = round(w * thetas[i] / (1 + w * sum(thetas)), 3) + return res + + # ---------------------- CALCULATE DESCRIPTORS ---------------------- # + + def get_descriptors(self, descriptor_list = []): + """ + Calculates all descriptors + Parameters + ---------- + descriptor_list : List of str + List of descriptors to be calculated the user wants to calculate. The list must be a subset of the + descriptors in the list of descriptors. If the list is empty, all descriptors will be calculated. + Returns + ------- + Dict + Dictionary with values of all descriptors + """ + res = {} + if(descriptor_list == []): + res['length'] = self.get_length() + res['gc_content'] = self.get_gc_content() + res['at_content'] = self.get_at_content() + res['nucleic_acid_composition'] = self.get_nucleic_acid_composition() + res['dinucleotide_composition'] = self.get_dinucleotide_composition() + res['trinucleotide_composition'] = self.get_trinucleotide_composition() + res['k_spaced_nucleic_acid_pairs'] = self.get_k_spaced_nucleic_acid_pairs() + res['kmer'] = self.get_kmer() + res['accumulated_nucleotide_frequency'] = self.get_accumulated_nucleotide_frequency() + res['DAC'] = self.get_DAC() + res['DCC'] = self.get_DCC() + res['DACC'] = self.get_DACC() + res['TAC'] = self.get_TAC() + res['TCC'] = self.get_TCC() + res['TACC'] = self.get_TACC() + res['PseDNC'] = self.get_PseDNC() + res['PseKNC'] = self.get_PseKNC() + else: + for descriptor in descriptor_list: + function = getattr(self, 'get_' + descriptor) + res[descriptor] = function() + return res diff --git a/src/propythia/DNA/essential_genes/DeepHE.ipynb b/src/propythia/DNA/essential_genes/DeepHE.ipynb new file mode 100644 index 0000000..4f6c212 --- /dev/null +++ b/src/propythia/DNA/essential_genes/DeepHE.ipynb @@ -0,0 +1,2978 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Accurately predicting human essential genes based on deep learning" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This section will present a comparative analysis to demonstrate the application and performance of PyTorch models for addressing sequence-based prediction problems.\n", + "\n", + "We'll try to replicate the [DeepHE: Accurately predicting human essential genes based on deep learning](https://journals.plos.org/ploscompbiol/article?id=10.1371/journal.pcbi.1008229) deep learning model and evaluate its performance. Other models will be compared to the DeepHE model.\n", + "\n", + "DeepHE's model is based on the multilayer perceptron structure. It includes one input layer, three hidden layers, and one output layer. All the hidden layers utilize the ReLU activation function. The output layer uses sigmoid activation function to perform discrete classification. The loss function in DeepHE is binary cross-entropy. A dropout layer is used after each hidden layer." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The autoreload extension is already loaded. To reload it, use:\n", + " %reload_ext autoreload\n" + ] + } + ], + "source": [ + "%load_ext autoreload\n", + "import pandas as pd\n", + "from os.path import exists\n", + "import pickle" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "At this point, there is already 2 cleaned datasets which will be used in this notebook.\n", + "- `deg.csv`: 16 datasets grouped that contain essential genes of the human genome. Each sequence has an EMBL id associated, the original dataset it came from, among other information. [Link](http://origin.tubic.org/deg/public/index.php)\n", + "- `negative.csv`: contains the genome DNA sequences of humans for all annotated genes from Ensembl. Each sequence has an EMBL id associated. [Link](http://www.ensembl.org/Homo_sapiens/Info/Index)\n", + "\n", + "The process of cleaning each dataset is described below:\n", + "- `deg.csv`:\n", + " - removed rows with unavailable sequences.\n", + "- `negative.csv`:\n", + " - removed all rows which sequences belonged to the `deg.csv` dataset.\n", + " - removed all rows which EMBL id was in the `deg.csv` dataset.\n", + " - grouped all sequences with the same EMBL id, and kept only the first two.\n", + "\n", + "One of the tasks in this notebook is to also build the positive dataset (`essential_genes_positve.csv`), which will contain only the sequences that are in at least 5 different datasets." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(26791, 15)\n", + "(27684, 2)\n" + ] + } + ], + "source": [ + "deg_dataset = pd.read_csv(\"datasets/essential_genes/deg.csv\", sep=';')\n", + "print(deg_dataset.shape)\n", + "\n", + "eg_negative = pd.read_csv(\"datasets/essential_genes/negative.csv\", sep=',')\n", + "print(eg_negative.shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Creating positive essential genes dataset. Each sequence needs to be in at least 5 datasets." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " id sequence\n", + "0 GI:- ATGGTGCTGTCCCAGAGACAACGAGATGAACTAAATCGAGCTATAG...\n", + "1 GI:- ATGGCTGCAGCTTCATATGATCAGTTGTTAAAGCAAGTTGAGGCAC...\n", + "2 GI:- ATGAGCCGCCTGCTCTGGAGGAAGGTGGCCGGCGCCACCGTCGGGC...\n", + "3 GI:- ATGCAGAGCTGGAGTCGTGTGTACTGCTCCTTGGCCAAGAGAGGCC...\n", + "4 GI:- ATGGTTGGCTATGACCCCAAACCAGATGGCAGGAATAACACCAAGT...\n", + "(2010, 2)\n" + ] + } + ], + "source": [ + "# for each sequence, get all the datasets that contain it\n", + "d = {}\n", + "for _, row in deg_dataset.iterrows():\n", + " if(row[\"sequence\"] in d):\n", + " d[row[\"sequence\"]].append((row[\"id1\"], row[\"id4\"]))\n", + " else:\n", + " d[row[\"sequence\"]] = [(row[\"id1\"], row[\"id4\"])]\n", + "\n", + "\n", + "# get a list of sequences that are in more than 5 datasets\n", + "essential_sequences = []\n", + "for key, val in d.items():\n", + " if(len(val) >= 5):\n", + " essential_sequences.append((val[0][1], key))\n", + " \n", + "# create dataframe with essential sequences\n", + "eg_positive = pd.DataFrame(essential_sequences, columns=[\"id\", \"sequence\"])\n", + "print(eg_positive.head())\n", + "print(eg_positive.shape)\n", + "\n", + "# write to csv\n", + "eg_positive.to_csv(\"datasets/essential_genes/positive.csv\", index=False)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "unique positive sequences: 2010\n", + "unique negative sequences: 23443\n" + ] + } + ], + "source": [ + "print(\"unique positive sequences:\", len(set(eg_positive[\"sequence\"])))\n", + "print(\"unique negative sequences:\", len(set(eg_negative[\"sequence\"])))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Even though the negative dataset has 27684 sequences, not all of them are unique. So, we need to remove the duplicates." + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(23443, 2)\n" + ] + } + ], + "source": [ + "eg_negative = eg_negative.drop_duplicates(subset=\"sequence\")\n", + "print(eg_negative.shape)\n", + "eg_negative.to_csv(\"datasets/essential_genes/negative_unique.csv\", index=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "At this point, we have both positive and negative datasets:\n", + "* eg_positive (2010, 2) -> positive dataset with essential genes \n", + "* eg_negative (23443, 2) -> negative dataset with non essential genes " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Joining the positive and negative datasets" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(25453, 3)\n", + " id sequence label\n", + "0 GI:- ATGGTGCTGTCCCAGAGACAACGAGATGAACTAAATCGAGCTATAG... 1\n", + "1 GI:- ATGGCTGCAGCTTCATATGATCAGTTGTTAAAGCAAGTTGAGGCAC... 1\n", + "2 GI:- ATGAGCCGCCTGCTCTGGAGGAAGGTGGCCGGCGCCACCGTCGGGC... 1\n", + "3 GI:- ATGCAGAGCTGGAGTCGTGTGTACTGCTCCTTGGCCAAGAGAGGCC... 1\n", + "4 GI:- ATGGTTGGCTATGACCCCAAACCAGATGGCAGGAATAACACCAAGT... 1\n" + ] + } + ], + "source": [ + "# adding labels to the dataset\n", + "eg_positive[\"label\"] = 1\n", + "eg_negative[\"label\"] = 0\n", + "\n", + "# joining the two datasets\n", + "dataset = pd.concat([eg_positive, eg_negative])\n", + "\n", + "print(dataset.shape)\n", + "print(dataset.head())\n", + "\n", + "dataset.to_csv(\"datasets/essential_genes/dataset.csv\", index=False)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(25453, 3)\n" + ] + } + ], + "source": [ + "print(dataset.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYQAAAEDCAYAAAA1CHOzAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAN7klEQVR4nO3dX4yddV7H8ffHlkUUIfwZCE5hS6T+AaJom4puYiAkUt2LsgkkJUYaQ1KDEN3EC8Gb9aYGLlYSjJDUQCi4wjbohmZXWElRN6sEGDYIFLYyWVgY29CuEJa9gN2yXy/mO9nDMJ2/Zc7AvF/JyTnzfZ7n8DtJ4d3zPOcMqSokSfqpYS9AkrQyGARJEmAQJEnNIEiSAIMgSWoGQZIEwNphL2CxzjzzzFq/fv2wlyFJHyvPPPPM96pqZKZtH9sgrF+/nrGxsWEvQ5I+VpJ891jbPGUkSQIMgiSpGQRJEmAQJEnNIEiSAIMgSWoGQZIEGARJUvvYfjHt42L9zV8b9hI+UV699bPDXoL0ieU7BEkSYBAkSc0gSJIAgyBJagZBkgQYBElSMwiSJMAgSJKaQZAkAQZBktQMgiQJMAiSpGYQJEmAQZAkNYMgSQIMgiSpGQRJEmAQJEnNIEiSAIMgSWoGQZIEGARJUjMIkiTAIEiSmkGQJAEGQZLU5gxCknOT/FuSl5LsT/JnPT89yWNJXu770waOuSXJeJIDSa4cmG9M8nxvuyNJen5iki/3/Mkk6z+C1ypJmsV83iEcBf68qn4FuBS4McmFwM3AvqraAOzrn+lt24CLgC3AnUnW9HPdBewANvRtS8+vB96qqguA24HbjsNrkyQtwJxBqKpDVfWtfvwO8BIwCmwFdvduu4Gr+vFW4MGqeq+qXgHGgc1JzgFOqaonqqqA+6YdM/VcDwFXTL17kCQtjwVdQ+hTOb8OPAmcXVWHYDIawFm92yjw+sBhEz0b7cfT5x84pqqOAm8DZyxkbZKkpZl3EJKcDPwT8Pmq+v5su84wq1nmsx0zfQ07kowlGTty5MhcS5YkLcC8gpDkBCZj8KWq+ucev9Gngej7wz2fAM4dOHwdcLDn62aYf+CYJGuBU4E3p6+jqnZV1aaq2jQyMjKfpUuS5mk+nzIKcDfwUlX9zcCmvcD2frwdeHhgvq0/OXQ+kxePn+rTSu8kubSf87ppx0w919XA432dQZK0TNbOY5/PAH8IPJ/k2Z79JXArsCfJ9cBrwDUAVbU/yR7gRSY/oXRjVb3fx90A3AucBDzSN5gMzv1Jxpl8Z7BtaS9LkrRQcwahqr7JzOf4Aa44xjE7gZ0zzMeAi2eYv0sHRZI0HH5TWZIEGARJUjMIkiTAIEiSmkGQJAEGQZLUDIIkCTAIkqRmECRJgEGQJDWDIEkCDIIkqRkESRJgECRJzSBIkgCDIElqBkGSBBgESVIzCJIkwCBIkppBkCQBBkGS1AyCJAkwCJKkZhAkSYBBkCQ1gyBJAgyCJKkZBEkSYBAkSc0gSJIAgyBJagZBkgQYBElSMwiSJGAeQUhyT5LDSV4YmP1Vkv9N8mzffn9g2y1JxpMcSHLlwHxjkud72x1J0vMTk3y5508mWX+cX6MkaR7m8w7hXmDLDPPbq+qSvv0LQJILgW3ARX3MnUnW9P53ATuADX2bes7rgbeq6gLgduC2Rb4WSdISzBmEqvoG8OY8n28r8GBVvVdVrwDjwOYk5wCnVNUTVVXAfcBVA8fs7scPAVdMvXuQJC2fpVxDuCnJc31K6bSejQKvD+wz0bPRfjx9/oFjquoo8DZwxhLWJUlahMUG4S7gF4BLgEPAF3s+09/sa5b5bMd8SJIdScaSjB05cmRBC5YkzW5RQaiqN6rq/ar6MfD3wObeNAGcO7DrOuBgz9fNMP/AMUnWAqdyjFNUVbWrqjZV1aaRkZHFLF2SdAyLCkJfE5jyOWDqE0h7gW39yaHzmbx4/FRVHQLeSXJpXx+4Dnh44Jjt/fhq4PG+ziBJWkZr59ohyQPAZcCZSSaALwCXJbmEyVM7rwJ/DFBV+5PsAV4EjgI3VtX7/VQ3MPmJpZOAR/oGcDdwf5JxJt8ZbDsOr0uStEBzBqGqrp1hfPcs++8Eds4wHwMunmH+LnDNXOuQJH20/KayJAkwCJKkZhAkSYBBkCQ1gyBJAgyCJKkZBEkSYBAkSc0gSJIAgyBJagZBkgQYBElSMwiSJMAgSJKaQZAkAQZBktQMgiQJMAiSpGYQJEmAQZAkNYMgSQIMgiSpGQRJEmAQJEnNIEiSAIMgSWoGQZIEGARJUjMIkiTAIEiSmkGQJAEGQZLUDIIkCTAIkqRmECRJwDyCkOSeJIeTvDAwOz3JY0le7vvTBrbdkmQ8yYEkVw7MNyZ5vrfdkSQ9PzHJl3v+ZJL1x/k1SpLmYT7vEO4Ftkyb3Qzsq6oNwL7+mSQXAtuAi/qYO5Os6WPuAnYAG/o29ZzXA29V1QXA7cBti30xkqTFmzMIVfUN4M1p463A7n68G7hqYP5gVb1XVa8A48DmJOcAp1TVE1VVwH3Tjpl6roeAK6bePUiSls9iryGcXVWHAPr+rJ6PAq8P7DfRs9F+PH3+gWOq6ijwNnDGItclSVqk431Reaa/2dcs89mO+fCTJzuSjCUZO3LkyCKXKEmayWKD8EafBqLvD/d8Ajh3YL91wMGer5th/oFjkqwFTuXDp6gAqKpdVbWpqjaNjIwscumSpJksNgh7ge39eDvw8MB8W39y6HwmLx4/1aeV3klyaV8fuG7aMVPPdTXweF9nkCQto7Vz7ZDkAeAy4MwkE8AXgFuBPUmuB14DrgGoqv1J9gAvAkeBG6vq/X6qG5j8xNJJwCN9A7gbuD/JOJPvDLYdl1cmSVqQOYNQVdceY9MVx9h/J7BzhvkYcPEM83fpoEiShsdvKkuSAIMgSWoGQZIEGARJUjMIkiTAIEiSmkGQJAEGQZLUDIIkCTAIkqRmECRJgEGQJDWDIEkCDIIkqRkESRJgECRJzSBIkgCDIElqBkGSBBgESVIzCJIkwCBIkppBkCQBBkGS1AyCJAkwCJKkZhAkSYBBkCQ1gyBJAgyCJKkZBEkSYBAkSc0gSJIAgyBJagZBkgQsMQhJXk3yfJJnk4z17PQkjyV5ue9PG9j/liTjSQ4kuXJgvrGfZzzJHUmylHVJkhbueLxDuLyqLqmqTf3zzcC+qtoA7OufSXIhsA24CNgC3JlkTR9zF7AD2NC3LcdhXZKkBfgoThltBXb3493AVQPzB6vqvap6BRgHNic5Bzilqp6oqgLuGzhGkrRMlhqEAv41yTNJdvTs7Ko6BND3Z/V8FHh94NiJno324+lzSdIyWrvE4z9TVQeTnAU8luTbs+w703WBmmX+4SeYjM4OgPPOO2+ha5UkzWJJ7xCq6mDfHwa+AmwG3ujTQPT94d59Ajh34PB1wMGer5thPtM/b1dVbaqqTSMjI0tZuiRpmkUHIcnPJvm5qcfA7wIvAHuB7b3bduDhfrwX2JbkxCTnM3nx+Kk+rfROkkv700XXDRwjSVomSzlldDbwlf6E6FrgH6vq0SRPA3uSXA+8BlwDUFX7k+wBXgSOAjdW1fv9XDcA9wInAY/0TZK0jBYdhKr6DvBrM8z/D7jiGMfsBHbOMB8DLl7sWiRJS+c3lSVJgEGQJDWDIEkCDIIkqRkESRJgECRJzSBIkgCDIElqBkGSBBgESVIzCJIkwCBIkppBkCQBBkGS1AyCJAkwCJKkZhAkSYBBkCQ1gyBJAgyCJKkZBEkSYBAkSW3tsBcgaTjW3/y1YS/hE+XVWz877CUsme8QJEmAQZAkNYMgSQIMgiSpGQRJEmAQJEnNIEiSAIMgSWoGQZIEGARJUjMIkiTAIEiSmkGQJAErKAhJtiQ5kGQ8yc3DXo8krTYrIghJ1gB/B/wecCFwbZILh7sqSVpdVkQQgM3AeFV9p6p+CDwIbB3ymiRpVVkp/4OcUeD1gZ8ngN+cvlOSHcCO/vEHSQ4sw9pWizOB7w17EXPJbcNegYbAP5vH16ePtWGlBCEzzOpDg6pdwK6PfjmrT5Kxqto07HVI0/lnc/mslFNGE8C5Az+vAw4OaS2StCqtlCA8DWxIcn6STwHbgL1DXpMkrSor4pRRVR1NchPwdWANcE9V7R/yslYbT8VppfLP5jJJ1YdO1UuSVqGVcspIkjRkBkGSBBgESVJbEReVtbyS/DKT3wQfZfL7HgeBvVX10lAXJmmofIewyiT5CyZ/NUiAp5j8yG+AB/ylglrJkvzRsNfwSeenjFaZJP8DXFRVP5o2/xSwv6o2DGdl0uySvFZV5w17HZ9knjJafX4M/Dzw3Wnzc3qbNDRJnjvWJuDs5VzLamQQVp/PA/uSvMxPfqHgecAFwE3DWpTUzgauBN6aNg/wX8u/nNXFIKwyVfVokl9k8leOjzL5L9oE8HRVvT/UxUnwVeDkqnp2+oYk/77sq1llvIYgSQL8lJEkqRkESRJgEKR5SfKDObavT/LCAp/z3iRXL21l0vFjECRJgEGQFiTJyUn2JflWkueTbB3YvDbJ7iTPJXkoyc/0MRuT/EeSZ5J8Pck5Q1q+NCuDIC3Mu8Dnquo3gMuBLyaZ+n+C/xKwq6p+Ffg+8CdJTgD+Fri6qjYC9wA7h7BuaU5+D0FamAB/neR3mPxm9yg/+Qbt61X1n/34H4A/BR4FLgYe626sAQ4t64qleTII0sL8ATACbKyqHyV5Ffjp3jb9Sz3FZED2V9VvLd8SpcXxlJG0MKcChzsGlwOfHth2XpKp//BfC3wTOACMTM2TnJDkomVdsTRPBkFamC8Bm5KMMflu4dsD214CtvcvaDsduKuqfghcDdyW5L+BZ4HfXt4lS/Pjr66QJAG+Q5AkNYMgSQIMgiSpGQRJEmAQJEnNIEiSAIMgSWoGQZIEwP8DfzVO3WW4JXsAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# plot the distribution of each class\n", + "dataset.groupby('label').size().plot(kind='bar')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Classification using DNA descriptors" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The autoreload extension is already loaded. To reload it, use:\n", + " %reload_ext autoreload\n" + ] + } + ], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2\n", + "import sys\n", + "import torch\n", + "from torch import nn\n", + "import os\n", + "\n", + "sys.path.append('../')\n", + "from descriptors.descriptors import DNADescriptor" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "def calculate_feature(data):\n", + " list_feature = []\n", + " count = 0\n", + " for seq in data['sequence']:\n", + " res = {'sequence': seq}\n", + " dna = DNADescriptor(seq)\n", + " feature = dna.get_descriptors()\n", + " res.update(feature)\n", + " list_feature.append(res)\n", + " # print progress every 100 sequences\n", + " if count % 100 == 0:\n", + " print(count, '/', len(data))\n", + "\n", + " count += 1\n", + " print(\"Done!\")\n", + " df = pd.DataFrame(list_feature)\n", + " return df" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Skip the calculation of descriptors if `features.pkl` exists which already has them calculated. Skip all of this if `fps_x_descriptor.pkl` exists because it already has the features calculated and **normalized**. The need of data normalization is explained in the next section." + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Features already calculated and normalized\n" + ] + } + ], + "source": [ + "if exists(\"datasets/essential_genes/fps_x_descriptor.pkl\") == False:\n", + " if exists(\"datasets/essential_genes/features.pkl\"):\n", + " with open(\"datasets/essential_genes/features.pkl\", \"rb\") as f:\n", + " features = pickle.load(f)\n", + " print(\"Features loaded from pickle file\")\n", + " else:\n", + " features = calculate_feature(dataset)\n", + " with open(\"datasets/essential_genes/features.pkl\", \"wb\") as f:\n", + " pickle.dump(features, f)\n", + "else:\n", + " print(\"Features already calculated and normalized\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Functions to help normalize the data.\n", + "\n", + "Without being normalized, we have a dataset with 17 columns. Each column is a result of a DNA descriptor, and some of these columns are numbers, dicts and even lists.\n", + "\n", + "We still need to normalize those who have dictionaries and lists because the model can't handle data in these types.\n", + "\n", + "To normalize the data, dicts and lists need to \"explode\" into more columns. \n", + "\n", + "E.g. dicts:\n", + "\n", + "| descriptor_hello |\n", + "| ---------------- |\n", + "| {'a': 1, 'b': 2} |\n", + "\n", + "will be transformed into:\n", + "\n", + "| descriptor_hello_a | descriptor_hello_b |\n", + "| ------------------ | ------------------ |\n", + "| 1 | 2 |\n", + "\n", + "E.g. lists:\n", + "\n", + "| descriptor_hello |\n", + "| ---------------- |\n", + "| [1, 2, 3] |\n", + "\n", + "will be transformed into:\n", + "\n", + "| descriptor_hello_0 | descriptor_hello_1 | descriptor_hello_2 |\n", + "| ------------------ | ------------------ | ------------------ |\n", + "| 1 | 2 | 3 |" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [], + "source": [ + "def process_lists(fps_x, field):\n", + " l = fps_x[field].to_list()\n", + " new_df = pd.DataFrame(l)\n", + " new_df.columns = [str(field) + \"_\" + str(i) for i in new_df.columns]\n", + " fps_x.drop(field, axis=1, inplace=True)\n", + " return new_df\n", + "\n", + "def process_lists_of_lists(fps_x, field):\n", + " l = fps_x[field].to_list()\n", + " new_df = pd.DataFrame(l)\n", + " new_df.columns = [str(field) + \"_\" + str(i) for i in new_df.columns]\n", + " empty_val = {} if field == \"enhanced_nucleic_acid_composition\" else []\n", + " small_processed = []\n", + " for f in new_df.columns:\n", + " col = [empty_val if i is None else i for i in new_df[f].to_list()]\n", + " sub = pd.DataFrame(col)\n", + " sub.columns = [str(f) + \"_\" + str(i) for i in sub.columns]\n", + " small_processed.append(sub)\n", + " fps_x.drop(field, axis=1, inplace=True)\n", + " return small_processed" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [], + "source": [ + "def normalize_features():\n", + " fps_y = dataset['label']\n", + " fps_x = features.loc[:, features.columns != 'label']\n", + " fps_x = fps_x.loc[:, fps_x.columns != 'sequence']\n", + " print(fps_x.shape)\n", + " \n", + " lists = [\"nucleic_acid_composition\",\"dinucleotide_composition\",\"trinucleotide_composition\",\"k_spaced_nucleic_acid_pairs\",\"kmer\",\"PseDNC\", \"PseKNC\", \"DAC\", \"DCC\", \"DACC\", \"TAC\",\"TCC\",\"TACC\"]\n", + " lists_of_lists = [\n", + " \"accumulated_nucleotide_frequency\"\n", + " ]\n", + "\n", + " small_processed = []\n", + " for i in lists:\n", + " new_df = process_lists(fps_x, i)\n", + " small_processed.append(new_df)\n", + " \n", + " for i in lists_of_lists:\n", + " smaller_processed = process_lists_of_lists(fps_x, i)\n", + " small_processed += smaller_processed\n", + "\n", + " # concat final with original\n", + " fps_x = pd.concat([fps_x, *small_processed], axis=1)\n", + "\n", + " with open(\"datasets/essential_genes/fps_x_descriptor.pkl\", \"wb\") as f:\n", + " pickle.dump(fps_x, f)\n", + " \n", + " with open(\"datasets/essential_genes/fps_y_descriptor.pkl\", \"wb\") as f:\n", + " pickle.dump(fps_y, f)\n", + " \n", + " return fps_x, fps_y" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Skip the data normalization if it was already performed (`fps_x_descriptor.pkl` exists)." + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Features loaded from pickle file\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
lengthgc_contentat_contentnucleic_acid_composition_Anucleic_acid_composition_Cnucleic_acid_composition_Gnucleic_acid_composition_Tdinucleotide_composition_AAdinucleotide_composition_ACdinucleotide_composition_AG...accumulated_nucleotide_frequency_0_Gaccumulated_nucleotide_frequency_0_Taccumulated_nucleotide_frequency_1_Aaccumulated_nucleotide_frequency_1_Caccumulated_nucleotide_frequency_1_Gaccumulated_nucleotide_frequency_1_Taccumulated_nucleotide_frequency_2_Aaccumulated_nucleotide_frequency_2_Caccumulated_nucleotide_frequency_2_Gaccumulated_nucleotide_frequency_2_T
012330.4400.5600.3030.1930.2470.2560.0960.0520.075...0.2240.2470.3240.1820.2270.2670.3210.1880.2400.251
185320.4120.5880.3440.2080.2050.2440.1260.0540.089...0.2400.2500.3360.1930.2200.2510.3450.1980.2110.246
237200.6040.3960.2150.2900.3140.1810.0410.0500.089...0.3400.1530.2050.3110.3220.1620.2080.3030.3190.171
315300.4140.5860.3030.1690.2460.2820.0990.0440.080...0.2480.2900.2980.1630.2470.2920.3010.1640.2440.291
49630.5590.4410.2120.2860.2730.2290.0480.0450.076...0.2820.2240.1990.2860.2780.2370.2130.2890.2670.230
..................................................................
254445760.4390.5610.2880.2100.2290.2730.0890.0680.068...0.2290.3120.2920.1810.2150.3120.2960.2060.2080.289
254455760.4340.5660.2860.2120.2220.2800.0890.0680.066...0.2290.3120.2920.1810.2150.3120.2960.2060.2080.289
2544633630.3940.6060.3380.1920.2020.2680.1220.0550.076...0.1930.2620.3530.1900.1960.2620.3510.1870.1980.264
2544711010.6330.3670.2270.2920.3410.1400.0370.0560.105...0.2950.1750.2270.2920.3230.1580.2250.2930.3400.142
25448960.6350.3650.1460.3020.3330.2190.0110.0420.042...0.3330.3330.1250.2710.3750.2290.1250.2920.3610.222
\n", + "

25449 rows × 247 columns

\n", + "
" + ], + "text/plain": [ + " length gc_content at_content nucleic_acid_composition_A \\\n", + "0 1233 0.440 0.560 0.303 \n", + "1 8532 0.412 0.588 0.344 \n", + "2 3720 0.604 0.396 0.215 \n", + "3 1530 0.414 0.586 0.303 \n", + "4 963 0.559 0.441 0.212 \n", + "... ... ... ... ... \n", + "25444 576 0.439 0.561 0.288 \n", + "25445 576 0.434 0.566 0.286 \n", + "25446 3363 0.394 0.606 0.338 \n", + "25447 1101 0.633 0.367 0.227 \n", + "25448 96 0.635 0.365 0.146 \n", + "\n", + " nucleic_acid_composition_C nucleic_acid_composition_G \\\n", + "0 0.193 0.247 \n", + "1 0.208 0.205 \n", + "2 0.290 0.314 \n", + "3 0.169 0.246 \n", + "4 0.286 0.273 \n", + "... ... ... \n", + "25444 0.210 0.229 \n", + "25445 0.212 0.222 \n", + "25446 0.192 0.202 \n", + "25447 0.292 0.341 \n", + "25448 0.302 0.333 \n", + "\n", + " nucleic_acid_composition_T dinucleotide_composition_AA \\\n", + "0 0.256 0.096 \n", + "1 0.244 0.126 \n", + "2 0.181 0.041 \n", + "3 0.282 0.099 \n", + "4 0.229 0.048 \n", + "... ... ... \n", + "25444 0.273 0.089 \n", + "25445 0.280 0.089 \n", + "25446 0.268 0.122 \n", + "25447 0.140 0.037 \n", + "25448 0.219 0.011 \n", + "\n", + " dinucleotide_composition_AC dinucleotide_composition_AG ... \\\n", + "0 0.052 0.075 ... \n", + "1 0.054 0.089 ... \n", + "2 0.050 0.089 ... \n", + "3 0.044 0.080 ... \n", + "4 0.045 0.076 ... \n", + "... ... ... ... \n", + "25444 0.068 0.068 ... \n", + "25445 0.068 0.066 ... \n", + "25446 0.055 0.076 ... \n", + "25447 0.056 0.105 ... \n", + "25448 0.042 0.042 ... \n", + "\n", + " accumulated_nucleotide_frequency_0_G \\\n", + "0 0.224 \n", + "1 0.240 \n", + "2 0.340 \n", + "3 0.248 \n", + "4 0.282 \n", + "... ... \n", + "25444 0.229 \n", + "25445 0.229 \n", + "25446 0.193 \n", + "25447 0.295 \n", + "25448 0.333 \n", + "\n", + " accumulated_nucleotide_frequency_0_T \\\n", + "0 0.247 \n", + "1 0.250 \n", + "2 0.153 \n", + "3 0.290 \n", + "4 0.224 \n", + "... ... \n", + "25444 0.312 \n", + "25445 0.312 \n", + "25446 0.262 \n", + "25447 0.175 \n", + "25448 0.333 \n", + "\n", + " accumulated_nucleotide_frequency_1_A \\\n", + "0 0.324 \n", + "1 0.336 \n", + "2 0.205 \n", + "3 0.298 \n", + "4 0.199 \n", + "... ... \n", + "25444 0.292 \n", + "25445 0.292 \n", + "25446 0.353 \n", + "25447 0.227 \n", + "25448 0.125 \n", + "\n", + " accumulated_nucleotide_frequency_1_C \\\n", + "0 0.182 \n", + "1 0.193 \n", + "2 0.311 \n", + "3 0.163 \n", + "4 0.286 \n", + "... ... \n", + "25444 0.181 \n", + "25445 0.181 \n", + "25446 0.190 \n", + "25447 0.292 \n", + "25448 0.271 \n", + "\n", + " accumulated_nucleotide_frequency_1_G \\\n", + "0 0.227 \n", + "1 0.220 \n", + "2 0.322 \n", + "3 0.247 \n", + "4 0.278 \n", + "... ... \n", + "25444 0.215 \n", + "25445 0.215 \n", + "25446 0.196 \n", + "25447 0.323 \n", + "25448 0.375 \n", + "\n", + " accumulated_nucleotide_frequency_1_T \\\n", + "0 0.267 \n", + "1 0.251 \n", + "2 0.162 \n", + "3 0.292 \n", + "4 0.237 \n", + "... ... \n", + "25444 0.312 \n", + "25445 0.312 \n", + "25446 0.262 \n", + "25447 0.158 \n", + "25448 0.229 \n", + "\n", + " accumulated_nucleotide_frequency_2_A \\\n", + "0 0.321 \n", + "1 0.345 \n", + "2 0.208 \n", + "3 0.301 \n", + "4 0.213 \n", + "... ... \n", + "25444 0.296 \n", + "25445 0.296 \n", + "25446 0.351 \n", + "25447 0.225 \n", + "25448 0.125 \n", + "\n", + " accumulated_nucleotide_frequency_2_C \\\n", + "0 0.188 \n", + "1 0.198 \n", + "2 0.303 \n", + "3 0.164 \n", + "4 0.289 \n", + "... ... \n", + "25444 0.206 \n", + "25445 0.206 \n", + "25446 0.187 \n", + "25447 0.293 \n", + "25448 0.292 \n", + "\n", + " accumulated_nucleotide_frequency_2_G \\\n", + "0 0.240 \n", + "1 0.211 \n", + "2 0.319 \n", + "3 0.244 \n", + "4 0.267 \n", + "... ... \n", + "25444 0.208 \n", + "25445 0.208 \n", + "25446 0.198 \n", + "25447 0.340 \n", + "25448 0.361 \n", + "\n", + " accumulated_nucleotide_frequency_2_T \n", + "0 0.251 \n", + "1 0.246 \n", + "2 0.171 \n", + "3 0.291 \n", + "4 0.230 \n", + "... ... \n", + "25444 0.289 \n", + "25445 0.289 \n", + "25446 0.264 \n", + "25447 0.142 \n", + "25448 0.222 \n", + "\n", + "[25449 rows x 247 columns]" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "if exists(\"datasets/essential_genes/fps_x_descriptor.pkl\"):\n", + " with open(\"datasets/essential_genes/fps_x_descriptor.pkl\", \"rb\") as f:\n", + " fps_x = pickle.load(f)\n", + " with open(\"datasets/essential_genes/fps_y_descriptor.pkl\", \"rb\") as f:\n", + " fps_y = pickle.load(f)\n", + " print(\"Features loaded from pickle file\")\n", + "else:\n", + " fps_x, fps_y = normalize_features()\n", + "fps_x" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [], + "source": [ + "from src.hyperparameter_tuning import hyperparameter_tuning\n", + "from ray import tune" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Training the model and evaluating the performance" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [], + "source": [ + "torch.manual_seed(2022)\n", + "os.environ[\"CUDA_VISIBLE_DEVICES\"] = '4,5'\n", + "device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')\n", + "\n", + "class_weights = torch.tensor([1.0, 4.0]).to(device)\n", + "\n", + "fixed_vals = {\n", + " 'epochs': 50,\n", + " 'optimizer_label': 'adam',\n", + " 'loss_function': nn.CrossEntropyLoss(weight=class_weights),\n", + " 'patience': 2, \n", + " 'output_size': 2,\n", + " 'model_label': 'mlp',\n", + " 'data_dir': 'essential_genes',\n", + " 'mode': 'descriptor'\n", + "}\n", + "\n", + "# these are hyperparameters to be tuned\n", + "config = {\n", + " \"hidden_size\": tune.choice([32, 64, 128, 256]),\n", + " \"lr\": tune.loguniform(1e-4, 1e-1),\n", + " \"batch_size\": tune.choice([8, 16, 32]),\n", + " \"dropout\": tune.uniform(0.3, 0.5)\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/jabreu/miniconda3/envs/dna-conda/lib/python3.8/site-packages/ray/_private/services.py:238: UserWarning: Not all Ray Dashboard dependencies were found. To use the dashboard please install Ray using `pip install ray[default]`. To disable this message, set RAY_DISABLE_IMPORT_WARNING env var to '1'.\n", + " warnings.warn(warning_message)\n", + "2022-06-22 17:16:25,583\tWARNING experiment.py:295 -- No name detected on trainable. Using DEFAULT.\n", + "2022-06-22 17:16:25,584\tINFO registry.py:66 -- Detected unknown callable for trainable. Converting to class.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "== Status ==\n", + "Memory usage on this node: 131.9/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=0\n", + "Bracket: Iter 32.000: None | Iter 16.000: None | Iter 8.000: None | Iter 4.000: None | Iter 2.000: None | Iter 1.000: None\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (9 PENDING, 1 RUNNING)\n", + "+---------------------+----------+-------+--------------+-----------+---------------+-------------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr |\n", + "|---------------------+----------+-------+--------------+-----------+---------------+-------------|\n", + "| DEFAULT_aa136_00000 | RUNNING | | 32 | 0.490806 | 128 | 0.000464116 |\n", + "| DEFAULT_aa136_00001 | PENDING | | 32 | 0.363243 | 64 | 0.000837212 |\n", + "| DEFAULT_aa136_00002 | PENDING | | 32 | 0.322638 | 128 | 0.00370153 |\n", + "| DEFAULT_aa136_00003 | PENDING | | 8 | 0.490939 | 128 | 0.0387667 |\n", + "| DEFAULT_aa136_00004 | PENDING | | 8 | 0.359548 | 64 | 0.000422694 |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 |\n", + "+---------------------+----------+-------+--------------+-----------+---------------+-------------+\n", + "\n", + "\n", + "== Status ==\n", + "Memory usage on this node: 132.7/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=0\n", + "Bracket: Iter 32.000: None | Iter 16.000: None | Iter 8.000: None | Iter 4.000: None | Iter 2.000: None | Iter 1.000: None\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (9 PENDING, 1 RUNNING)\n", + "+---------------------+----------+-------+--------------+-----------+---------------+-------------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr |\n", + "|---------------------+----------+-------+--------------+-----------+---------------+-------------|\n", + "| DEFAULT_aa136_00000 | RUNNING | | 32 | 0.490806 | 128 | 0.000464116 |\n", + "| DEFAULT_aa136_00001 | PENDING | | 32 | 0.363243 | 64 | 0.000837212 |\n", + "| DEFAULT_aa136_00002 | PENDING | | 32 | 0.322638 | 128 | 0.00370153 |\n", + "| DEFAULT_aa136_00003 | PENDING | | 8 | 0.490939 | 128 | 0.0387667 |\n", + "| DEFAULT_aa136_00004 | PENDING | | 8 | 0.359548 | 64 | 0.000422694 |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 |\n", + "+---------------------+----------+-------+--------------+-----------+---------------+-------------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [1/50, 0/478] loss: 0.68987405\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [1/50, 100/478] loss: 0.51656061\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [1/50, 200/478] loss: 0.50412208\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [1/50, 300/478] loss: 0.58845931\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [1/50, 400/478] loss: 0.34457323\n", + "Result for DEFAULT_aa136_00000:\n", + " accuracy: 0.9216110019646365\n", + " date: 2022-06-22_17-16-33\n", + " done: false\n", + " experiment_id: bf3da74770884a86a7dda00d055f8808\n", + " hostname: turing\n", + " iterations_since_restore: 1\n", + " loss: 0.5258612325415015\n", + " mcc: 0.08382986364380658\n", + " node_ip: 192.168.85.234\n", + " pid: 2208011\n", + " should_checkpoint: true\n", + " time_since_restore: 1.5346076488494873\n", + " time_this_iter_s: 1.5346076488494873\n", + " time_total_s: 1.5346076488494873\n", + " timestamp: 1655914593\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: aa136_00000\n", + " \n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5258612325415015\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [2/50, 0/478] loss: 0.63812971\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [2/50, 100/478] loss: 0.62217182\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [2/50, 200/478] loss: 0.62553257\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [2/50, 300/478] loss: 0.52347457\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [2/50, 400/478] loss: 0.5324508\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5198581408709287\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [3/50, 0/478] loss: 0.44727612\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [3/50, 100/478] loss: 0.51104313\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [3/50, 200/478] loss: 0.51600176\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [3/50, 300/478] loss: 0.6028887\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [3/50, 400/478] loss: 0.47487661\n", + "== Status ==\n", + "Memory usage on this node: 133.9/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=0\n", + "Bracket: Iter 32.000: None | Iter 16.000: None | Iter 8.000: None | Iter 4.000: None | Iter 2.000: -0.5198581408709287 | Iter 1.000: -0.5258612325415015\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (9 PENDING, 1 RUNNING)\n", + "+---------------------+----------+------------------------+--------------+-----------+---------------+-------------+---------+------------+----------------------+---------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+----------+------------------------+--------------+-----------+---------------+-------------+---------+------------+----------------------+---------|\n", + "| DEFAULT_aa136_00000 | RUNNING | 192.168.85.234:2208011 | 32 | 0.490806 | 128 | 0.000464116 | 0.51716 | 0.894892 | 3 | 0.21335 |\n", + "| DEFAULT_aa136_00001 | PENDING | | 32 | 0.363243 | 64 | 0.000837212 | | | | |\n", + "| DEFAULT_aa136_00002 | PENDING | | 32 | 0.322638 | 128 | 0.00370153 | | | | |\n", + "| DEFAULT_aa136_00003 | PENDING | | 8 | 0.490939 | 128 | 0.0387667 | | | | |\n", + "| DEFAULT_aa136_00004 | PENDING | | 8 | 0.359548 | 64 | 0.000422694 | | | | |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "+---------------------+----------+------------------------+--------------+-----------+---------------+-------------+---------+------------+----------------------+---------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5171600800007582\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [4/50, 0/478] loss: 0.60054803\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [4/50, 100/478] loss: 0.41381383\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [4/50, 200/478] loss: 0.55092418\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [4/50, 300/478] loss: 0.47885665\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [4/50, 400/478] loss: 0.50202018\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5169517792761326\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [5/50, 0/478] loss: 0.43847811\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [5/50, 100/478] loss: 0.46009675\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [5/50, 200/478] loss: 0.46610659\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [5/50, 300/478] loss: 0.39092091\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [5/50, 400/478] loss: 0.32210511\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5196726605296135\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m Trigger Times: 1\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [6/50, 0/478] loss: 0.41917861\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [6/50, 100/478] loss: 0.54500473\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [6/50, 200/478] loss: 0.64454585\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [6/50, 300/478] loss: 0.60130596\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [6/50, 400/478] loss: 0.42010918\n", + "Result for DEFAULT_aa136_00000:\n", + " accuracy: 0.9027504911591355\n", + " date: 2022-06-22_17-16-39\n", + " done: false\n", + " experiment_id: bf3da74770884a86a7dda00d055f8808\n", + " hostname: turing\n", + " iterations_since_restore: 6\n", + " loss: 0.5169986667111516\n", + " mcc: 0.24592532250493737\n", + " node_ip: 192.168.85.234\n", + " pid: 2208011\n", + " should_checkpoint: true\n", + " time_since_restore: 7.283294916152954\n", + " time_this_iter_s: 1.1411995887756348\n", + " time_total_s: 7.283294916152954\n", + " timestamp: 1655914599\n", + " timesteps_since_restore: 0\n", + " training_iteration: 6\n", + " trial_id: aa136_00000\n", + " \n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5169986667111516\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [7/50, 0/478] loss: 0.39071706\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [7/50, 100/478] loss: 0.51081043\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [7/50, 200/478] loss: 0.49922016\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [7/50, 300/478] loss: 0.69548148\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [7/50, 400/478] loss: 0.37604201\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5147938344627618\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [8/50, 0/478] loss: 0.38195479\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [8/50, 100/478] loss: 0.69236201\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [8/50, 200/478] loss: 0.46583146\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [8/50, 300/478] loss: 0.70418721\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [8/50, 400/478] loss: 0.59396523\n", + "== Status ==\n", + "Memory usage on this node: 133.9/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=0\n", + "Bracket: Iter 32.000: None | Iter 16.000: None | Iter 8.000: -0.5155486999079585 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5198581408709287 | Iter 1.000: -0.5258612325415015\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (9 PENDING, 1 RUNNING)\n", + "+---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+---------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+---------|\n", + "| DEFAULT_aa136_00000 | RUNNING | 192.168.85.234:2208011 | 32 | 0.490806 | 128 | 0.000464116 | 0.515549 | 0.886051 | 8 | 0.24446 |\n", + "| DEFAULT_aa136_00001 | PENDING | | 32 | 0.363243 | 64 | 0.000837212 | | | | |\n", + "| DEFAULT_aa136_00002 | PENDING | | 32 | 0.322638 | 128 | 0.00370153 | | | | |\n", + "| DEFAULT_aa136_00003 | PENDING | | 8 | 0.490939 | 128 | 0.0387667 | | | | |\n", + "| DEFAULT_aa136_00004 | PENDING | | 8 | 0.359548 | 64 | 0.000422694 | | | | |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "+---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+---------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5155486999079585\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m Trigger Times: 1\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [9/50, 0/478] loss: 0.47961521\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [9/50, 100/478] loss: 0.37789133\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [9/50, 200/478] loss: 0.44564179\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [9/50, 300/478] loss: 0.75229746\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [9/50, 400/478] loss: 0.4402346\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.513775559514761\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [10/50, 0/478] loss: 0.49826008\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [10/50, 100/478] loss: 0.35014766\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [10/50, 200/478] loss: 0.56508654\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [10/50, 300/478] loss: 0.44240198\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [10/50, 400/478] loss: 0.61660951\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.511439822986722\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [11/50, 0/478] loss: 0.44910598\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [11/50, 100/478] loss: 0.49631646\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [11/50, 200/478] loss: 0.65642458\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [11/50, 300/478] loss: 0.47474179\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [11/50, 400/478] loss: 0.43055797\n", + "Result for DEFAULT_aa136_00000:\n", + " accuracy: 0.9029469548133595\n", + " date: 2022-06-22_17-16-45\n", + " done: false\n", + " experiment_id: bf3da74770884a86a7dda00d055f8808\n", + " hostname: turing\n", + " iterations_since_restore: 11\n", + " loss: 0.5163614902645349\n", + " mcc: 0.24100832899349578\n", + " node_ip: 192.168.85.234\n", + " pid: 2208011\n", + " should_checkpoint: true\n", + " time_since_restore: 12.867016553878784\n", + " time_this_iter_s: 1.0865564346313477\n", + " time_total_s: 12.867016553878784\n", + " timestamp: 1655914605\n", + " timesteps_since_restore: 0\n", + " training_iteration: 11\n", + " trial_id: aa136_00000\n", + " \n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5163614902645349\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m Trigger Times: 1\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [12/50, 0/478] loss: 0.50844884\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [12/50, 100/478] loss: 0.49372154\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [12/50, 200/478] loss: 0.43920717\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [12/50, 300/478] loss: 0.41347471\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [12/50, 400/478] loss: 0.58682495\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5128772670403123\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [13/50, 0/478] loss: 0.47044057\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [13/50, 100/478] loss: 0.3601281\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [13/50, 200/478] loss: 0.50525379\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [13/50, 300/478] loss: 0.43464309\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [13/50, 400/478] loss: 0.72738469\n", + "== Status ==\n", + "Memory usage on this node: 133.9/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=0\n", + "Bracket: Iter 32.000: None | Iter 16.000: None | Iter 8.000: -0.5155486999079585 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5198581408709287 | Iter 1.000: -0.5258612325415015\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (9 PENDING, 1 RUNNING)\n", + "+---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| DEFAULT_aa136_00000 | RUNNING | 192.168.85.234:2208011 | 32 | 0.490806 | 128 | 0.000464116 | 0.511865 | 0.877407 | 13 | 0.256422 |\n", + "| DEFAULT_aa136_00001 | PENDING | | 32 | 0.363243 | 64 | 0.000837212 | | | | |\n", + "| DEFAULT_aa136_00002 | PENDING | | 32 | 0.322638 | 128 | 0.00370153 | | | | |\n", + "| DEFAULT_aa136_00003 | PENDING | | 8 | 0.490939 | 128 | 0.0387667 | | | | |\n", + "| DEFAULT_aa136_00004 | PENDING | | 8 | 0.359548 | 64 | 0.000422694 | | | | |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "+---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5118651442229748\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [14/50, 0/478] loss: 0.53213024\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [14/50, 100/478] loss: 0.52125245\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [14/50, 200/478] loss: 0.36366993\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [14/50, 300/478] loss: 0.33481258\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [14/50, 400/478] loss: 0.41769841\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.514135119318962\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m Trigger Times: 1\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [15/50, 0/478] loss: 0.49710703\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [15/50, 100/478] loss: 0.47272381\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [15/50, 200/478] loss: 0.54634142\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [15/50, 300/478] loss: 0.45439163\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [15/50, 400/478] loss: 0.57580876\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5113861914724112\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [16/50, 0/478] loss: 0.48759604\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [16/50, 100/478] loss: 0.34973693\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [16/50, 200/478] loss: 0.58057332\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [16/50, 300/478] loss: 0.42384604\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [16/50, 400/478] loss: 0.3452118\n", + "Result for DEFAULT_aa136_00000:\n", + " accuracy: 0.9023575638506877\n", + " date: 2022-06-22_17-16-50\n", + " done: false\n", + " experiment_id: bf3da74770884a86a7dda00d055f8808\n", + " hostname: turing\n", + " iterations_since_restore: 16\n", + " loss: 0.5176659166812897\n", + " mcc: 0.22968195307073827\n", + " node_ip: 192.168.85.234\n", + " pid: 2208011\n", + " should_checkpoint: true\n", + " time_since_restore: 18.4563627243042\n", + " time_this_iter_s: 1.1623249053955078\n", + " time_total_s: 18.4563627243042\n", + " timestamp: 1655914610\n", + " timesteps_since_restore: 0\n", + " training_iteration: 16\n", + " trial_id: aa136_00000\n", + " \n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5176659166812897\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m Trigger Times: 1\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [17/50, 0/478] loss: 0.51016217\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [17/50, 100/478] loss: 0.43984023\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [17/50, 200/478] loss: 0.48504508\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [17/50, 300/478] loss: 0.3823801\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [17/50, 400/478] loss: 0.45298922\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5137624090537429\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [18/50, 0/478] loss: 0.54375386\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [18/50, 100/478] loss: 0.54107577\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [18/50, 200/478] loss: 0.72816181\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [18/50, 300/478] loss: 0.4642697\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [18/50, 400/478] loss: 0.43205038\n", + "== Status ==\n", + "Memory usage on this node: 133.9/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=0\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5155486999079585 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5198581408709287 | Iter 1.000: -0.5258612325415015\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (9 PENDING, 1 RUNNING)\n", + "+---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| DEFAULT_aa136_00000 | RUNNING | 192.168.85.234:2208011 | 32 | 0.490806 | 128 | 0.000464116 | 0.511828 | 0.879175 | 18 | 0.264919 |\n", + "| DEFAULT_aa136_00001 | PENDING | | 32 | 0.363243 | 64 | 0.000837212 | | | | |\n", + "| DEFAULT_aa136_00002 | PENDING | | 32 | 0.322638 | 128 | 0.00370153 | | | | |\n", + "| DEFAULT_aa136_00003 | PENDING | | 8 | 0.490939 | 128 | 0.0387667 | | | | |\n", + "| DEFAULT_aa136_00004 | PENDING | | 8 | 0.359548 | 64 | 0.000422694 | | | | |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "+---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5118283901363612\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [19/50, 0/478] loss: 0.36861387\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [19/50, 100/478] loss: 0.56865203\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [19/50, 200/478] loss: 0.41424742\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [19/50, 300/478] loss: 0.34322029\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [19/50, 400/478] loss: 0.74527025\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5108585806563497\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [20/50, 0/478] loss: 0.47781006\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [20/50, 100/478] loss: 0.61703753\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [20/50, 200/478] loss: 0.44346598\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [20/50, 300/478] loss: 0.60879904\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [20/50, 400/478] loss: 0.40923232\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.51494116242975\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m Trigger Times: 1\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [21/50, 0/478] loss: 0.40704912\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [21/50, 100/478] loss: 0.39100194\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [21/50, 200/478] loss: 0.49078277\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [21/50, 300/478] loss: 0.51815081\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [21/50, 400/478] loss: 0.5183053\n", + "Result for DEFAULT_aa136_00000:\n", + " accuracy: 0.887426326129666\n", + " date: 2022-06-22_17-16-56\n", + " done: false\n", + " experiment_id: bf3da74770884a86a7dda00d055f8808\n", + " hostname: turing\n", + " iterations_since_restore: 21\n", + " loss: 0.5116073828190565\n", + " mcc: 0.24772310028449873\n", + " node_ip: 192.168.85.234\n", + " pid: 2208011\n", + " should_checkpoint: true\n", + " time_since_restore: 24.02616286277771\n", + " time_this_iter_s: 1.0995526313781738\n", + " time_total_s: 24.02616286277771\n", + " timestamp: 1655914616\n", + " timesteps_since_restore: 0\n", + " training_iteration: 21\n", + " trial_id: aa136_00000\n", + " \n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5116073828190565\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [22/50, 0/478] loss: 0.62977988\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [22/50, 100/478] loss: 0.57811022\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [22/50, 200/478] loss: 0.4189716\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [22/50, 300/478] loss: 0.54172117\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [22/50, 400/478] loss: 0.67148203\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5128803562372923\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m Trigger Times: 1\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [23/50, 0/478] loss: 0.32692057\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [23/50, 100/478] loss: 0.3421748\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [23/50, 200/478] loss: 0.49851057\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [23/50, 300/478] loss: 0.60021728\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [23/50, 400/478] loss: 0.50858355\n", + "== Status ==\n", + "Memory usage on this node: 133.9/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=0\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5155486999079585 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5198581408709287 | Iter 1.000: -0.5258612325415015\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (9 PENDING, 1 RUNNING)\n", + "+---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| DEFAULT_aa136_00000 | RUNNING | 192.168.85.234:2208011 | 32 | 0.490806 | 128 | 0.000464116 | 0.510611 | 0.88664 | 23 | 0.259163 |\n", + "| DEFAULT_aa136_00001 | PENDING | | 32 | 0.363243 | 64 | 0.000837212 | | | | |\n", + "| DEFAULT_aa136_00002 | PENDING | | 32 | 0.322638 | 128 | 0.00370153 | | | | |\n", + "| DEFAULT_aa136_00003 | PENDING | | 8 | 0.490939 | 128 | 0.0387667 | | | | |\n", + "| DEFAULT_aa136_00004 | PENDING | | 8 | 0.359548 | 64 | 0.000422694 | | | | |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "+---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5106113558635116\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [24/50, 0/478] loss: 0.56840211\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [24/50, 100/478] loss: 0.44073173\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [24/50, 200/478] loss: 0.36675978\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [24/50, 300/478] loss: 0.54674274\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [24/50, 400/478] loss: 0.33992305\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5203540079295635\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m Trigger Times: 1\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [25/50, 0/478] loss: 0.56723654\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [25/50, 100/478] loss: 0.38775757\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [25/50, 200/478] loss: 0.61808926\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [25/50, 300/478] loss: 0.5385834\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [25/50, 400/478] loss: 0.53443193\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5179462978616357\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [26/50, 0/478] loss: 0.34895968\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [26/50, 100/478] loss: 0.34593299\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [26/50, 200/478] loss: 0.62731481\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [26/50, 300/478] loss: 0.33892822\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [26/50, 400/478] loss: 0.41992518\n", + "Result for DEFAULT_aa136_00000:\n", + " accuracy: 0.8927308447937131\n", + " date: 2022-06-22_17-17-01\n", + " done: false\n", + " experiment_id: bf3da74770884a86a7dda00d055f8808\n", + " hostname: turing\n", + " iterations_since_restore: 26\n", + " loss: 0.5175836551934481\n", + " mcc: 0.22061196025285917\n", + " node_ip: 192.168.85.234\n", + " pid: 2208011\n", + " should_checkpoint: true\n", + " time_since_restore: 29.516406774520874\n", + " time_this_iter_s: 1.0819473266601562\n", + " time_total_s: 29.516406774520874\n", + " timestamp: 1655914621\n", + " timesteps_since_restore: 0\n", + " training_iteration: 26\n", + " trial_id: aa136_00000\n", + " \n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5175836551934481\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [27/50, 0/478] loss: 0.37942797\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [27/50, 100/478] loss: 0.61191261\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [27/50, 200/478] loss: 0.5639776\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [27/50, 300/478] loss: 0.58435035\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [27/50, 400/478] loss: 0.46136248\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5146163472905755\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [28/50, 0/478] loss: 0.53365684\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [28/50, 100/478] loss: 0.57379156\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [28/50, 200/478] loss: 0.40661034\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [28/50, 300/478] loss: 0.50133222\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [28/50, 400/478] loss: 0.41485777\n", + "== Status ==\n", + "Memory usage on this node: 133.9/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=0\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5155486999079585 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5198581408709287 | Iter 1.000: -0.5258612325415015\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (9 PENDING, 1 RUNNING)\n", + "+---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| DEFAULT_aa136_00000 | RUNNING | 192.168.85.234:2208011 | 32 | 0.490806 | 128 | 0.000464116 | 0.513101 | 0.884086 | 28 | 0.253225 |\n", + "| DEFAULT_aa136_00001 | PENDING | | 32 | 0.363243 | 64 | 0.000837212 | | | | |\n", + "| DEFAULT_aa136_00002 | PENDING | | 32 | 0.322638 | 128 | 0.00370153 | | | | |\n", + "| DEFAULT_aa136_00003 | PENDING | | 8 | 0.490939 | 128 | 0.0387667 | | | | |\n", + "| DEFAULT_aa136_00004 | PENDING | | 8 | 0.359548 | 64 | 0.000422694 | | | | |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "+---------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5131005002185702\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [29/50, 0/478] loss: 0.613949\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [29/50, 100/478] loss: 0.52192605\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [29/50, 200/478] loss: 0.34978276\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [29/50, 300/478] loss: 0.45305017\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [29/50, 400/478] loss: 0.57091963\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5118356017395854\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [30/50, 0/478] loss: 0.53044957\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [30/50, 100/478] loss: 0.41264832\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [30/50, 200/478] loss: 0.32242486\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [30/50, 300/478] loss: 0.58155006\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [30/50, 400/478] loss: 0.56180137\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5159793404862285\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m Trigger Times: 1\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [31/50, 0/478] loss: 0.38457695\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [31/50, 100/478] loss: 0.42583048\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [31/50, 200/478] loss: 0.3703151\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [31/50, 300/478] loss: 0.48900938\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m [31/50, 400/478] loss: 0.5544582\n", + "Result for DEFAULT_aa136_00000:\n", + " accuracy: 0.8734774066797643\n", + " date: 2022-06-22_17-17-07\n", + " done: false\n", + " experiment_id: bf3da74770884a86a7dda00d055f8808\n", + " hostname: turing\n", + " iterations_since_restore: 31\n", + " loss: 0.5199840003624558\n", + " mcc: 0.2594221902901971\n", + " node_ip: 192.168.85.234\n", + " pid: 2208011\n", + " should_checkpoint: true\n", + " time_since_restore: 34.96112275123596\n", + " time_this_iter_s: 1.0789408683776855\n", + " time_total_s: 34.96112275123596\n", + " timestamp: 1655914627\n", + " timesteps_since_restore: 0\n", + " training_iteration: 31\n", + " trial_id: aa136_00000\n", + " \n", + "Result for DEFAULT_aa136_00000:\n", + " accuracy: 0.8734774066797643\n", + " date: 2022-06-22_17-17-07\n", + " done: true\n", + " experiment_id: bf3da74770884a86a7dda00d055f8808\n", + " experiment_tag: 0_batch_size=32,dropout=0.49081,hidden_size=128,lr=0.00046412\n", + " hostname: turing\n", + " iterations_since_restore: 31\n", + " loss: 0.5199840003624558\n", + " mcc: 0.2594221902901971\n", + " node_ip: 192.168.85.234\n", + " pid: 2208011\n", + " should_checkpoint: true\n", + " time_since_restore: 34.96112275123596\n", + " time_this_iter_s: 1.0789408683776855\n", + " time_total_s: 34.96112275123596\n", + " timestamp: 1655914627\n", + " timesteps_since_restore: 0\n", + " training_iteration: 31\n", + " trial_id: aa136_00000\n", + " \n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m The Current Loss: 0.5199840003624558\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m Trigger Times: 2\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m Early stopping!\n", + "\u001b[2m\u001b[36m(pid=2208011)\u001b[0m Start to test process.\n", + "\u001b[2m\u001b[36m(pid=2208004)\u001b[0m [1/50, 0/478] loss: 0.69428337\n", + "\u001b[2m\u001b[36m(pid=2208004)\u001b[0m [1/50, 100/478] loss: 0.63212794\n", + "\u001b[2m\u001b[36m(pid=2208004)\u001b[0m [1/50, 200/478] loss: 0.48083377\n", + "\u001b[2m\u001b[36m(pid=2208004)\u001b[0m [1/50, 300/478] loss: 0.61866051\n", + "\u001b[2m\u001b[36m(pid=2208004)\u001b[0m [1/50, 400/478] loss: 0.53016013\n", + "Result for DEFAULT_aa136_00001:\n", + " accuracy: 0.918664047151277\n", + " date: 2022-06-22_17-17-15\n", + " done: true\n", + " experiment_id: 1721fcb894184e038a2b58e283ed8073\n", + " hostname: turing\n", + " iterations_since_restore: 1\n", + " loss: 0.5306287368759512\n", + " mcc: 0.03622668226502021\n", + " node_ip: 192.168.85.234\n", + " pid: 2208004\n", + " should_checkpoint: true\n", + " time_since_restore: 1.5304410457611084\n", + " time_this_iter_s: 1.5304410457611084\n", + " time_total_s: 1.5304410457611084\n", + " timestamp: 1655914635\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: aa136_00001\n", + " \n", + "== Status ==\n", + "Memory usage on this node: 133.8/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=1\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5155486999079585 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5198581408709287 | Iter 1.000: -0.5282449847087264\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (8 PENDING, 1 RUNNING, 1 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00001 | RUNNING | 192.168.85.234:2208004 | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | PENDING | | 32 | 0.322638 | 128 | 0.00370153 | | | | |\n", + "| DEFAULT_aa136_00003 | PENDING | | 8 | 0.490939 | 128 | 0.0387667 | | | | |\n", + "| DEFAULT_aa136_00004 | PENDING | | 8 | 0.359548 | 64 | 0.000422694 | | | | |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208004)\u001b[0m The Current Loss: 0.5306287368759512\n", + "\u001b[2m\u001b[36m(pid=2208004)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208024)\u001b[0m [1/50, 0/478] loss: 0.70231432\n", + "\u001b[2m\u001b[36m(pid=2208024)\u001b[0m [1/50, 100/478] loss: 0.42754757\n", + "\u001b[2m\u001b[36m(pid=2208024)\u001b[0m [1/50, 200/478] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(pid=2208024)\u001b[0m [1/50, 300/478] loss: 0.60594457\n", + "\u001b[2m\u001b[36m(pid=2208024)\u001b[0m [1/50, 400/478] loss: 0.52378786\n", + "Result for DEFAULT_aa136_00002:\n", + " accuracy: 0.9210216110019647\n", + " date: 2022-06-22_17-17-24\n", + " done: true\n", + " experiment_id: 0198bf72574e40daa95886ae0c8648a1\n", + " hostname: turing\n", + " iterations_since_restore: 1\n", + " loss: 0.5534285621717572\n", + " mcc: 0.0\n", + " node_ip: 192.168.85.234\n", + " pid: 2208024\n", + " should_checkpoint: true\n", + " time_since_restore: 1.5712156295776367\n", + " time_this_iter_s: 1.5712156295776367\n", + " time_total_s: 1.5712156295776367\n", + " timestamp: 1655914644\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: aa136_00002\n", + " \n", + "== Status ==\n", + "Memory usage on this node: 133.8/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5155486999079585 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5198581408709287 | Iter 1.000: -0.5306287368759512\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (7 PENDING, 1 RUNNING, 2 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00002 | RUNNING | 192.168.85.234:2208024 | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00003 | PENDING | | 8 | 0.490939 | 128 | 0.0387667 | | | | |\n", + "| DEFAULT_aa136_00004 | PENDING | | 8 | 0.359548 | 64 | 0.000422694 | | | | |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208024)\u001b[0m The Current Loss: 0.5534285621717572\n", + "\u001b[2m\u001b[36m(pid=2208024)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 0/1909] loss: 0.67008007\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 100/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 200/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 300/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 400/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 500/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 600/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 700/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 800/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 900/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 1000/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 1100/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 1200/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 1300/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 1400/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 1500/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 1600/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 1700/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 1800/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [1/50, 1900/1909] loss: 0.31326166\n", + "Result for DEFAULT_aa136_00003:\n", + " accuracy: 0.9210216110019647\n", + " date: 2022-06-22_17-17-35\n", + " done: false\n", + " experiment_id: 08ca7d292cd647298aacff2d1749de0d\n", + " hostname: turing\n", + " iterations_since_restore: 1\n", + " loss: 0.5150372937111128\n", + " mcc: 0.0\n", + " node_ip: 192.168.85.234\n", + " pid: 2208019\n", + " should_checkpoint: true\n", + " time_since_restore: 4.103395938873291\n", + " time_this_iter_s: 4.103395938873291\n", + " time_total_s: 4.103395938873291\n", + " timestamp: 1655914655\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: aa136_00003\n", + " \n", + "== Status ==\n", + "Memory usage on this node: 133.7/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5155486999079585 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5198581408709287 | Iter 1.000: -0.5282449847087264\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (6 PENDING, 1 RUNNING, 3 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00003 | RUNNING | 192.168.85.234:2208019 | 8 | 0.490939 | 128 | 0.0387667 | 0.515037 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00004 | PENDING | | 8 | 0.359548 | 64 | 0.000422694 | | | | |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m The Current Loss: 0.5150372937111128\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 0/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 100/1909] loss: 0.88469028\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 200/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 300/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 400/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 500/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 600/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 700/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 800/1909] loss: 1.0191439\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 900/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 1000/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 1100/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 1200/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 1300/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 1400/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 1500/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 1600/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 1700/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 1800/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [2/50, 1900/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m The Current Loss: 0.5160677171015478\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m Trigger Times: 1\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 0/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 100/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 200/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 300/1909] loss: 0.88469028\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 400/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 500/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 600/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 700/1909] loss: 0.88469023\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 800/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 900/1909] loss: 1.0191439\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 1000/1909] loss: 1.0191439\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 1100/1909] loss: 0.88469034\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 1200/1909] loss: 0.88469028\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 1300/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 1400/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 1500/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 1600/1909] loss: 0.88469028\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 1700/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 1800/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m [3/50, 1900/1909] loss: 0.31326166\n", + "Result for DEFAULT_aa136_00003:\n", + " accuracy: 0.9210216110019647\n", + " date: 2022-06-22_17-17-42\n", + " done: false\n", + " experiment_id: 08ca7d292cd647298aacff2d1749de0d\n", + " hostname: turing\n", + " iterations_since_restore: 3\n", + " loss: 0.5190524836443657\n", + " mcc: 0.0\n", + " node_ip: 192.168.85.234\n", + " pid: 2208019\n", + " should_checkpoint: true\n", + " time_since_restore: 11.546069145202637\n", + " time_this_iter_s: 3.718369483947754\n", + " time_total_s: 11.546069145202637\n", + " timestamp: 1655914662\n", + " timesteps_since_restore: 0\n", + " training_iteration: 3\n", + " trial_id: aa136_00003\n", + " \n", + "== Status ==\n", + "Memory usage on this node: 133.7/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5155486999079585 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5179629289862382 | Iter 1.000: -0.5282449847087264\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (6 PENDING, 1 RUNNING, 3 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00003 | RUNNING | 192.168.85.234:2208019 | 8 | 0.490939 | 128 | 0.0387667 | 0.519052 | 0.921022 | 3 | 0 |\n", + "| DEFAULT_aa136_00004 | PENDING | | 8 | 0.359548 | 64 | 0.000422694 | | | | |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m The Current Loss: 0.5190524836443657\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m Trigger Times: 2\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m Early stopping!\n", + "\u001b[2m\u001b[36m(pid=2208019)\u001b[0m Start to test process.\n", + "Result for DEFAULT_aa136_00003:\n", + " accuracy: 0.9210216110019647\n", + " date: 2022-06-22_17-17-42\n", + " done: true\n", + " experiment_id: 08ca7d292cd647298aacff2d1749de0d\n", + " experiment_tag: 3_batch_size=8,dropout=0.49094,hidden_size=128,lr=0.038767\n", + " hostname: turing\n", + " iterations_since_restore: 3\n", + " loss: 0.5190524836443657\n", + " mcc: 0.0\n", + " node_ip: 192.168.85.234\n", + " pid: 2208019\n", + " should_checkpoint: true\n", + " time_since_restore: 11.546069145202637\n", + " time_this_iter_s: 3.718369483947754\n", + " time_total_s: 11.546069145202637\n", + " timestamp: 1655914662\n", + " timesteps_since_restore: 0\n", + " training_iteration: 3\n", + " trial_id: aa136_00003\n", + " \n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 0/1909] loss: 0.68398356\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 100/1909] loss: 0.65455431\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 200/1909] loss: 0.67198902\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 300/1909] loss: 0.31944603\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 400/1909] loss: 0.69015938\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 500/1909] loss: 0.3253141\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 600/1909] loss: 0.33951581\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 700/1909] loss: 0.32468924\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 800/1909] loss: 0.33192191\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 900/1909] loss: 0.31599736\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 1000/1909] loss: 0.68522739\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 1100/1909] loss: 0.34424219\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 1200/1909] loss: 0.31619164\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 1300/1909] loss: 0.32234719\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 1400/1909] loss: 0.31920925\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 1500/1909] loss: 0.31443065\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 1600/1909] loss: 0.37445819\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 1700/1909] loss: 0.33593565\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 1800/1909] loss: 0.32161525\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [1/50, 1900/1909] loss: 0.38747534\n", + "Result for DEFAULT_aa136_00004:\n", + " accuracy: 0.9210216110019647\n", + " date: 2022-06-22_17-17-53\n", + " done: false\n", + " experiment_id: 9db2ba91477c4732aaa9513da9922353\n", + " hostname: turing\n", + " iterations_since_restore: 1\n", + " loss: 0.5035767241774399\n", + " mcc: 0.0\n", + " node_ip: 192.168.85.234\n", + " pid: 2208003\n", + " should_checkpoint: true\n", + " time_since_restore: 4.060023307800293\n", + " time_this_iter_s: 4.060023307800293\n", + " time_total_s: 4.060023307800293\n", + " timestamp: 1655914673\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: aa136_00004\n", + " \n", + "== Status ==\n", + "Memory usage on this node: 133.7/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5155486999079585 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5179629289862382 | Iter 1.000: -0.5258612325415015\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (5 PENDING, 1 RUNNING, 4 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00004 | RUNNING | 192.168.85.234:2208003 | 8 | 0.359548 | 64 | 0.000422694 | 0.503577 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00003 | TERMINATED | | 8 | 0.490939 | 128 | 0.0387667 | 0.519052 | 0.921022 | 3 | 0 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m The Current Loss: 0.5035767241774399\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 0/1909] loss: 0.70875561\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 100/1909] loss: 0.41695529\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 200/1909] loss: 0.69533414\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 300/1909] loss: 0.83182639\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 400/1909] loss: 0.6633538\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 500/1909] loss: 0.71081293\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 600/1909] loss: 0.54337001\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 700/1909] loss: 0.36294001\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 800/1909] loss: 0.74736476\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 900/1909] loss: 0.47151434\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 1000/1909] loss: 0.35154155\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 1100/1909] loss: 0.36750656\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 1200/1909] loss: 0.45904592\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 1300/1909] loss: 0.87586296\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 1400/1909] loss: 0.31544429\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 1500/1909] loss: 0.31472296\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 1600/1909] loss: 0.44936061\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 1700/1909] loss: 0.54483217\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 1800/1909] loss: 0.34695929\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [2/50, 1900/1909] loss: 0.65770191\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m The Current Loss: 0.5053222407725767\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m Trigger Times: 1\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 0/1909] loss: 0.3147752\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 100/1909] loss: 0.71379668\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 200/1909] loss: 0.31918186\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 300/1909] loss: 0.56474459\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 400/1909] loss: 0.88373595\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 500/1909] loss: 0.37825263\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 600/1909] loss: 0.90131199\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 700/1909] loss: 0.71929431\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 800/1909] loss: 0.32879654\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 900/1909] loss: 0.53006828\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 1000/1909] loss: 0.62563807\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 1100/1909] loss: 0.35970885\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 1200/1909] loss: 0.89042521\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 1300/1909] loss: 0.31331393\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 1400/1909] loss: 0.68706965\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 1500/1909] loss: 0.3206926\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 1600/1909] loss: 0.69969708\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 1700/1909] loss: 0.7096954\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 1800/1909] loss: 0.81260461\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [3/50, 1900/1909] loss: 0.34662077\n", + "Result for DEFAULT_aa136_00004:\n", + " accuracy: 0.9168958742632612\n", + " date: 2022-06-22_17-18-00\n", + " done: false\n", + " experiment_id: 9db2ba91477c4732aaa9513da9922353\n", + " hostname: turing\n", + " iterations_since_restore: 3\n", + " loss: 0.5033538938972227\n", + " mcc: 0.13739013314664003\n", + " node_ip: 192.168.85.234\n", + " pid: 2208003\n", + " should_checkpoint: true\n", + " time_since_restore: 11.28352403640747\n", + " time_this_iter_s: 3.585029125213623\n", + " time_total_s: 11.28352403640747\n", + " timestamp: 1655914680\n", + " timesteps_since_restore: 0\n", + " training_iteration: 3\n", + " trial_id: aa136_00004\n", + " \n", + "== Status ==\n", + "Memory usage on this node: 133.7/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5155486999079585 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5160677171015478 | Iter 1.000: -0.5258612325415015\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (5 PENDING, 1 RUNNING, 4 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00004 | RUNNING | 192.168.85.234:2208003 | 8 | 0.359548 | 64 | 0.000422694 | 0.503354 | 0.916896 | 3 | 0.13739 |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00003 | TERMINATED | | 8 | 0.490939 | 128 | 0.0387667 | 0.519052 | 0.921022 | 3 | 0 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m The Current Loss: 0.5033538938972227\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 0/1909] loss: 0.31436396\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 100/1909] loss: 0.54196936\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 200/1909] loss: 0.36487147\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 300/1909] loss: 0.34199634\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 400/1909] loss: 0.77238423\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 500/1909] loss: 0.74420249\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 600/1909] loss: 0.31894442\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 700/1909] loss: 0.68594027\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 800/1909] loss: 0.50440079\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 900/1909] loss: 0.31961507\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 1000/1909] loss: 0.7343356\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 1100/1909] loss: 0.34699765\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 1200/1909] loss: 0.32937354\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 1300/1909] loss: 0.67715365\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 1400/1909] loss: 0.39003402\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 1500/1909] loss: 0.60228312\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 1600/1909] loss: 0.37960491\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 1700/1909] loss: 0.35150999\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 1800/1909] loss: 0.34164754\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [4/50, 1900/1909] loss: 0.93063939\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m The Current Loss: 0.49655752569390244\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 0/1909] loss: 0.41142386\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 100/1909] loss: 0.69862109\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 200/1909] loss: 0.67390913\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 300/1909] loss: 0.79181093\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 400/1909] loss: 0.67876393\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 500/1909] loss: 0.72166073\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 600/1909] loss: 0.36819163\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 700/1909] loss: 0.67915255\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 800/1909] loss: 0.61058468\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 900/1909] loss: 0.33417651\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 1000/1909] loss: 0.69475877\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 1100/1909] loss: 0.31970543\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 1200/1909] loss: 0.32186732\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 1300/1909] loss: 0.31392026\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 1400/1909] loss: 0.65978199\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 1500/1909] loss: 0.40511304\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 1600/1909] loss: 0.31360215\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 1700/1909] loss: 0.45462072\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 1800/1909] loss: 0.31690359\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [5/50, 1900/1909] loss: 0.68842429\n", + "Result for DEFAULT_aa136_00004:\n", + " accuracy: 0.8988212180746562\n", + " date: 2022-06-22_17-18-08\n", + " done: false\n", + " experiment_id: 9db2ba91477c4732aaa9513da9922353\n", + " hostname: turing\n", + " iterations_since_restore: 5\n", + " loss: 0.49496317868891465\n", + " mcc: 0.23384247617839465\n", + " node_ip: 192.168.85.234\n", + " pid: 2208003\n", + " should_checkpoint: true\n", + " time_since_restore: 18.605748414993286\n", + " time_this_iter_s: 3.6580545902252197\n", + " time_total_s: 18.605748414993286\n", + " timestamp: 1655914688\n", + " timesteps_since_restore: 0\n", + " training_iteration: 5\n", + " trial_id: aa136_00004\n", + " \n", + "== Status ==\n", + "Memory usage on this node: 133.7/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5155486999079585 | Iter 4.000: -0.5067546524850175 | Iter 2.000: -0.5160677171015478 | Iter 1.000: -0.5258612325415015\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (5 PENDING, 1 RUNNING, 4 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00004 | RUNNING | 192.168.85.234:2208003 | 8 | 0.359548 | 64 | 0.000422694 | 0.494963 | 0.898821 | 5 | 0.233842 |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00003 | TERMINATED | | 8 | 0.490939 | 128 | 0.0387667 | 0.519052 | 0.921022 | 3 | 0 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m The Current Loss: 0.49496317868891465\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 0/1909] loss: 0.40820503\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 100/1909] loss: 0.68461478\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 200/1909] loss: 0.44859987\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 300/1909] loss: 0.87701851\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 400/1909] loss: 0.66608816\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 500/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 600/1909] loss: 0.92419398\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 700/1909] loss: 0.47409067\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 800/1909] loss: 0.66427922\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 900/1909] loss: 0.86008888\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 1000/1909] loss: 0.33817148\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 1100/1909] loss: 0.31880021\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 1200/1909] loss: 0.89934963\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 1300/1909] loss: 0.31590417\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 1400/1909] loss: 0.34327754\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 1500/1909] loss: 0.79430825\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 1600/1909] loss: 0.32323244\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 1700/1909] loss: 0.71911103\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 1800/1909] loss: 0.97345763\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [6/50, 1900/1909] loss: 0.71252966\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m The Current Loss: 0.5021767178360297\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m Trigger Times: 1\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 0/1909] loss: 0.31491506\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 100/1909] loss: 0.67763573\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 200/1909] loss: 0.4954313\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 300/1909] loss: 0.31449455\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 400/1909] loss: 0.65830988\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 500/1909] loss: 0.35156921\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 600/1909] loss: 0.31333914\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 700/1909] loss: 0.43154514\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 800/1909] loss: 0.91074616\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 900/1909] loss: 0.56885535\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 1000/1909] loss: 0.44441646\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 1100/1909] loss: 0.31895372\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 1200/1909] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 1300/1909] loss: 0.31392407\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 1400/1909] loss: 0.71750623\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 1500/1909] loss: 0.66210467\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 1600/1909] loss: 0.31415617\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 1700/1909] loss: 0.69189507\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 1800/1909] loss: 0.31364149\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [7/50, 1900/1909] loss: 0.3370606\n", + "Result for DEFAULT_aa136_00004:\n", + " accuracy: 0.9115913555992141\n", + " date: 2022-06-22_17-18-15\n", + " done: false\n", + " experiment_id: 9db2ba91477c4732aaa9513da9922353\n", + " hostname: turing\n", + " iterations_since_restore: 7\n", + " loss: 0.5000285096875914\n", + " mcc: 0.1905445117249078\n", + " node_ip: 192.168.85.234\n", + " pid: 2208003\n", + " should_checkpoint: true\n", + " time_since_restore: 26.118915796279907\n", + " time_this_iter_s: 3.6840596199035645\n", + " time_total_s: 26.118915796279907\n", + " timestamp: 1655914695\n", + " timesteps_since_restore: 0\n", + " training_iteration: 7\n", + " trial_id: aa136_00004\n", + " \n", + "== Status ==\n", + "Memory usage on this node: 133.9/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5155486999079585 | Iter 4.000: -0.5067546524850175 | Iter 2.000: -0.5160677171015478 | Iter 1.000: -0.5258612325415015\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (5 PENDING, 1 RUNNING, 4 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00004 | RUNNING | 192.168.85.234:2208003 | 8 | 0.359548 | 64 | 0.000422694 | 0.500029 | 0.911591 | 7 | 0.190545 |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00003 | TERMINATED | | 8 | 0.490939 | 128 | 0.0387667 | 0.519052 | 0.921022 | 3 | 0 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m The Current Loss: 0.5000285096875914\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 0/1909] loss: 0.31341514\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 100/1909] loss: 0.3560918\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 200/1909] loss: 0.36302492\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 300/1909] loss: 0.32748044\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 400/1909] loss: 0.38115904\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 500/1909] loss: 0.39367732\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 600/1909] loss: 0.31813312\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 700/1909] loss: 0.37062904\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 800/1909] loss: 0.37323382\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 900/1909] loss: 0.67983204\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 1000/1909] loss: 0.32330704\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 1100/1909] loss: 0.36307621\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 1200/1909] loss: 0.31388161\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 1300/1909] loss: 0.33479542\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 1400/1909] loss: 0.5424884\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 1500/1909] loss: 0.85408533\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 1600/1909] loss: 0.71939647\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 1700/1909] loss: 0.76040608\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 1800/1909] loss: 0.40375835\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [8/50, 1900/1909] loss: 0.32372844\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m The Current Loss: 0.4957240507872176\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 0/1909] loss: 0.34138921\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 100/1909] loss: 0.40748397\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 200/1909] loss: 0.32292104\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 300/1909] loss: 0.31711641\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 400/1909] loss: 0.88320732\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 500/1909] loss: 0.31602859\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 600/1909] loss: 0.67755437\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 700/1909] loss: 0.65664506\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 800/1909] loss: 0.32129991\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 900/1909] loss: 0.33259022\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 1000/1909] loss: 0.33336315\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 1100/1909] loss: 0.69583893\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 1200/1909] loss: 0.53014237\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 1300/1909] loss: 0.34682003\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 1400/1909] loss: 0.31338561\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 1500/1909] loss: 0.31464216\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 1600/1909] loss: 0.39271018\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 1700/1909] loss: 0.40700004\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 1800/1909] loss: 0.31615716\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [9/50, 1900/1909] loss: 0.32247835\n", + "Result for DEFAULT_aa136_00004:\n", + " accuracy: 0.906876227897839\n", + " date: 2022-06-22_17-18-23\n", + " done: false\n", + " experiment_id: 9db2ba91477c4732aaa9513da9922353\n", + " hostname: turing\n", + " iterations_since_restore: 9\n", + " loss: 0.4928408688989876\n", + " mcc: 0.2006913646512015\n", + " node_ip: 192.168.85.234\n", + " pid: 2208003\n", + " should_checkpoint: true\n", + " time_since_restore: 33.445435523986816\n", + " time_this_iter_s: 3.6581828594207764\n", + " time_total_s: 33.445435523986816\n", + " timestamp: 1655914703\n", + " timesteps_since_restore: 0\n", + " training_iteration: 9\n", + " trial_id: aa136_00004\n", + " \n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m The Current Loss: 0.4928408688989876== Status ==\n", + "Memory usage on this node: 133.9/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5056363753475881 | Iter 4.000: -0.5067546524850175 | Iter 2.000: -0.5160677171015478 | Iter 1.000: -0.5258612325415015\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (5 PENDING, 1 RUNNING, 4 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00004 | RUNNING | 192.168.85.234:2208003 | 8 | 0.359548 | 64 | 0.000422694 | 0.492841 | 0.906876 | 9 | 0.200691 |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00003 | TERMINATED | | 8 | 0.490939 | 128 | 0.0387667 | 0.519052 | 0.921022 | 3 | 0 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m trigger times: 0\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 0/1909] loss: 0.61973035\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 100/1909] loss: 0.32476935\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 200/1909] loss: 0.31807616\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 300/1909] loss: 0.69324201\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 400/1909] loss: 0.34648669\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 500/1909] loss: 0.31327733\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 600/1909] loss: 0.35047558\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 700/1909] loss: 0.88474888\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 800/1909] loss: 0.6679461\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 900/1909] loss: 0.76008499\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 1000/1909] loss: 1.0170842\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 1100/1909] loss: 0.36001807\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 1200/1909] loss: 0.66237724\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 1300/1909] loss: 0.44292209\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 1400/1909] loss: 0.35735583\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 1500/1909] loss: 0.67690301\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 1600/1909] loss: 0.31447071\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 1700/1909] loss: 0.31341305\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 1800/1909] loss: 0.75480783\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [10/50, 1900/1909] loss: 0.68009144\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m The Current Loss: 0.4910005126119221\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 0/1909] loss: 0.40159076\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 100/1909] loss: 0.31495127\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 200/1909] loss: 0.31326377\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 300/1909] loss: 0.31479871\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 400/1909] loss: 0.31333426\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 500/1909] loss: 0.67959648\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 600/1909] loss: 0.31496313\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 700/1909] loss: 0.76853603\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 800/1909] loss: 0.71012187\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 900/1909] loss: 0.37415132\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 1000/1909] loss: 1.0841097\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 1100/1909] loss: 0.35331032\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 1200/1909] loss: 0.55304283\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 1300/1909] loss: 0.56194717\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 1400/1909] loss: 0.88546044\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 1500/1909] loss: 0.36644119\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 1600/1909] loss: 0.32892126\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 1700/1909] loss: 0.61663544\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 1800/1909] loss: 0.40787116\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [11/50, 1900/1909] loss: 0.70931977\n", + "Result for DEFAULT_aa136_00004:\n", + " accuracy: 0.8856581532416503\n", + " date: 2022-06-22_17-18-30\n", + " done: false\n", + " experiment_id: 9db2ba91477c4732aaa9513da9922353\n", + " hostname: turing\n", + " iterations_since_restore: 11\n", + " loss: 0.4918521585138851\n", + " mcc: 0.2418515383816664\n", + " node_ip: 192.168.85.234\n", + " pid: 2208003\n", + " should_checkpoint: true\n", + " time_since_restore: 40.79284071922302\n", + " time_this_iter_s: 3.702777147293091\n", + " time_total_s: 40.79284071922302\n", + " timestamp: 1655914710\n", + " timesteps_since_restore: 0\n", + " training_iteration: 11\n", + " trial_id: aa136_00004\n", + " \n", + "== Status ==\n", + "Memory usage on this node: 133.8/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5056363753475881 | Iter 4.000: -0.5067546524850175 | Iter 2.000: -0.5160677171015478 | Iter 1.000: -0.5258612325415015\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (5 PENDING, 1 RUNNING, 4 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00004 | RUNNING | 192.168.85.234:2208003 | 8 | 0.359548 | 64 | 0.000422694 | 0.491852 | 0.885658 | 11 | 0.241852 |\n", + "| DEFAULT_aa136_00005 | PENDING | | 32 | 0.408204 | 32 | 0.00339182 | | | | |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00003 | TERMINATED | | 8 | 0.490939 | 128 | 0.0387667 | 0.519052 | 0.921022 | 3 | 0 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m The Current Loss: 0.4918521585138851\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m Trigger Times: 1\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 0/1909] loss: 0.31588259\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 100/1909] loss: 0.3134743\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 200/1909] loss: 0.3132754\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 300/1909] loss: 0.88063926\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 400/1909] loss: 0.43065497\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 500/1909] loss: 0.39083162\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 600/1909] loss: 0.5676105\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 700/1909] loss: 0.49730933\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 800/1909] loss: 0.31330308\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 900/1909] loss: 0.42935678\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 1000/1909] loss: 0.31326255\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 1100/1909] loss: 0.46957368\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 1200/1909] loss: 0.31343764\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 1300/1909] loss: 0.40425536\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 1400/1909] loss: 0.67906767\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 1500/1909] loss: 0.31676215\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 1600/1909] loss: 0.6915589\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 1700/1909] loss: 0.35664043\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 1800/1909] loss: 0.32073495\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m [12/50, 1900/1909] loss: 0.48562756\n", + "Result for DEFAULT_aa136_00004:\n", + " accuracy: 0.9123772102161101\n", + " date: 2022-06-22_17-18-34\n", + " done: true\n", + " experiment_id: 9db2ba91477c4732aaa9513da9922353\n", + " experiment_tag: 4_batch_size=8,dropout=0.35955,hidden_size=64,lr=0.00042269\n", + " hostname: turing\n", + " iterations_since_restore: 12\n", + " loss: 0.4992354839712709\n", + " mcc: 0.17972734100149626\n", + " node_ip: 192.168.85.234\n", + " pid: 2208003\n", + " should_checkpoint: true\n", + " time_since_restore: 44.501025915145874\n", + " time_this_iter_s: 3.7081851959228516\n", + " time_total_s: 44.501025915145874\n", + " timestamp: 1655914714\n", + " timesteps_since_restore: 0\n", + " training_iteration: 12\n", + " trial_id: aa136_00004\n", + " \n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m The Current Loss: 0.4992354839712709\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m Trigger Times: 2\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m Early stopping!\n", + "\u001b[2m\u001b[36m(pid=2208003)\u001b[0m Start to test process.\n", + "\u001b[2m\u001b[36m(pid=2207995)\u001b[0m [1/50, 0/478] loss: 0.69591373\n", + "\u001b[2m\u001b[36m(pid=2207995)\u001b[0m [1/50, 100/478] loss: 0.4547897\n", + "\u001b[2m\u001b[36m(pid=2207995)\u001b[0m [1/50, 200/478] loss: 0.51869601\n", + "\u001b[2m\u001b[36m(pid=2207995)\u001b[0m [1/50, 300/478] loss: 0.35632953\n", + "\u001b[2m\u001b[36m(pid=2207995)\u001b[0m [1/50, 400/478] loss: 0.51474988\n", + "Result for DEFAULT_aa136_00005:\n", + " accuracy: 0.906679764243615\n", + " date: 2022-06-22_17-18-42\n", + " done: true\n", + " experiment_id: 014204bb0c8a4a82a0d2e7522782f0bc\n", + " hostname: turing\n", + " iterations_since_restore: 1\n", + " loss: 0.5351069306954741\n", + " mcc: 0.08902631341732872\n", + " node_ip: 192.168.85.234\n", + " pid: 2207995\n", + " should_checkpoint: true\n", + " time_since_restore: 1.587294340133667\n", + " time_this_iter_s: 1.587294340133667\n", + " time_total_s: 1.587294340133667\n", + " timestamp: 1655914722\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: aa136_00005\n", + " \n", + "\u001b[2m\u001b[36m(pid=2207995)\u001b[0m The Current Loss: 0.5351069306954741\n", + "\u001b[2m\u001b[36m(pid=2207995)\u001b[0m trigger times: 0\n", + "== Status ==\n", + "Memory usage on this node: 133.7/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=3\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5056363753475881 | Iter 4.000: -0.5067546524850175 | Iter 2.000: -0.5160677171015478 | Iter 1.000: -0.5282449847087264\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (4 PENDING, 1 RUNNING, 5 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00005 | RUNNING | 192.168.85.234:2207995 | 32 | 0.408204 | 32 | 0.00339182 | 0.535107 | 0.90668 | 1 | 0.0890263 |\n", + "| DEFAULT_aa136_00006 | PENDING | | 8 | 0.472529 | 256 | 0.00205751 | | | | |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00003 | TERMINATED | | 8 | 0.490939 | 128 | 0.0387667 | 0.519052 | 0.921022 | 3 | 0 |\n", + "| DEFAULT_aa136_00004 | TERMINATED | | 8 | 0.359548 | 64 | 0.000422694 | 0.499235 | 0.912377 | 12 | 0.179727 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 0/1909] loss: 0.69178051\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 100/1909] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 200/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 300/1909] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 400/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 500/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 600/1909] loss: 0.67689812\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 700/1909] loss: 0.31326255\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 800/1909] loss: 0.88467133\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 900/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 1000/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 1100/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 1200/1909] loss: 0.88469023\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 1300/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 1400/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 1500/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 1600/1909] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 1700/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 1800/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [1/50, 1900/1909] loss: 0.31326166\n", + "Result for DEFAULT_aa136_00006:\n", + " accuracy: 0.9210216110019647\n", + " date: 2022-06-22_17-18-53\n", + " done: false\n", + " experiment_id: b2f33f44f32f4767aad26b857ea25fb1\n", + " hostname: turing\n", + " iterations_since_restore: 1\n", + " loss: 0.5195475489228636\n", + " mcc: 0.0\n", + " node_ip: 192.168.85.234\n", + " pid: 2208016\n", + " should_checkpoint: true\n", + " time_since_restore: 4.2315428256988525\n", + " time_this_iter_s: 4.2315428256988525\n", + " time_total_s: 4.2315428256988525\n", + " timestamp: 1655914733\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: aa136_00006\n", + " \n", + "== Status ==\n", + "Memory usage on this node: 133.6/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=3\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5056363753475881 | Iter 4.000: -0.5067546524850175 | Iter 2.000: -0.5160677171015478 | Iter 1.000: -0.5258612325415015\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (3 PENDING, 1 RUNNING, 6 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00006 | RUNNING | 192.168.85.234:2208016 | 8 | 0.472529 | 256 | 0.00205751 | 0.519548 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00003 | TERMINATED | | 8 | 0.490939 | 128 | 0.0387667 | 0.519052 | 0.921022 | 3 | 0 |\n", + "| DEFAULT_aa136_00004 | TERMINATED | | 8 | 0.359548 | 64 | 0.000422694 | 0.499235 | 0.912377 | 12 | 0.179727 |\n", + "| DEFAULT_aa136_00005 | TERMINATED | | 32 | 0.408204 | 32 | 0.00339182 | 0.535107 | 0.90668 | 1 | 0.0890263 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m The Current Loss: 0.5195475489228636\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 0/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 100/1909] loss: 0.67700142\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 200/1909] loss: 0.67689806\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 300/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 400/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 500/1909] loss: 0.88469023\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 600/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 700/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 800/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 900/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 1000/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 1100/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 1200/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 1300/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 1400/1909] loss: 0.88469023\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 1500/1909] loss: 0.88469023\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 1600/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 1700/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 1800/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [2/50, 1900/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m The Current Loss: 0.5141910875049454\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 0/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 100/1909] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 200/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 300/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 400/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 500/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 600/1909] loss: 0.88469028\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 700/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 800/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 900/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 1000/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 1100/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 1200/1909] loss: 0.88469028\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 1300/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 1400/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 1500/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 1600/1909] loss: 0.88469034\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 1700/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 1800/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [3/50, 1900/1909] loss: 0.31326166\n", + "Result for DEFAULT_aa136_00006:\n", + " accuracy: 0.9210216110019647\n", + " date: 2022-06-22_17-19-01\n", + " done: false\n", + " experiment_id: b2f33f44f32f4767aad26b857ea25fb1\n", + " hostname: turing\n", + " iterations_since_restore: 3\n", + " loss: 0.5176651569981029\n", + " mcc: 0.0\n", + " node_ip: 192.168.85.234\n", + " pid: 2208016\n", + " should_checkpoint: true\n", + " time_since_restore: 11.829198837280273\n", + " time_this_iter_s: 3.7828471660614014\n", + " time_total_s: 11.829198837280273\n", + " timestamp: 1655914741\n", + " timesteps_since_restore: 0\n", + " training_iteration: 3\n", + " trial_id: aa136_00006\n", + " \n", + "== Status ==\n", + "Memory usage on this node: 133.6/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=3\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5056363753475881 | Iter 4.000: -0.5067546524850175 | Iter 2.000: -0.5151294023032466 | Iter 1.000: -0.5258612325415015\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (3 PENDING, 1 RUNNING, 6 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00006 | RUNNING | 192.168.85.234:2208016 | 8 | 0.472529 | 256 | 0.00205751 | 0.517665 | 0.921022 | 3 | 0 |\n", + "| DEFAULT_aa136_00007 | PENDING | | 32 | 0.309119 | 256 | 0.0257685 | | | | |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00003 | TERMINATED | | 8 | 0.490939 | 128 | 0.0387667 | 0.519052 | 0.921022 | 3 | 0 |\n", + "| DEFAULT_aa136_00004 | TERMINATED | | 8 | 0.359548 | 64 | 0.000422694 | 0.499235 | 0.912377 | 12 | 0.179727 |\n", + "| DEFAULT_aa136_00005 | TERMINATED | | 32 | 0.408204 | 32 | 0.00339182 | 0.535107 | 0.90668 | 1 | 0.0890263 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m The Current Loss: 0.5176651569981029\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m Trigger Times: 1\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 0/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 100/1909] loss: 0.88469023\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 200/1909] loss: 1.0191439\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 300/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 400/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 500/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 600/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 700/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 800/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 900/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 1000/1909] loss: 0.676898\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 1100/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 1200/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 1300/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 1400/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 1500/1909] loss: 1.0191438\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 1600/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 1700/1909] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 1800/1909] loss: 0.67689794\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m [4/50, 1900/1909] loss: 0.676898\n", + "Result for DEFAULT_aa136_00006:\n", + " accuracy: 0.9210216110019647\n", + " date: 2022-06-22_17-19-05\n", + " done: true\n", + " experiment_id: b2f33f44f32f4767aad26b857ea25fb1\n", + " hostname: turing\n", + " iterations_since_restore: 4\n", + " loss: 0.5190524855157831\n", + " mcc: 0.0\n", + " node_ip: 192.168.85.234\n", + " pid: 2208016\n", + " should_checkpoint: true\n", + " time_since_restore: 15.782555103302002\n", + " time_this_iter_s: 3.9533562660217285\n", + " time_total_s: 15.782555103302002\n", + " timestamp: 1655914745\n", + " timesteps_since_restore: 0\n", + " training_iteration: 4\n", + " trial_id: aa136_00006\n", + " \n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m The Current Loss: 0.5190524855157831\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m Trigger Times: 2\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m Early stopping!\n", + "\u001b[2m\u001b[36m(pid=2208016)\u001b[0m Start to test process.\n", + "\u001b[2m\u001b[36m(pid=2207991)\u001b[0m [1/50, 0/478] loss: 0.69377691\n", + "\u001b[2m\u001b[36m(pid=2207991)\u001b[0m [1/50, 100/478] loss: 0.52378786\n", + "\u001b[2m\u001b[36m(pid=2207991)\u001b[0m [1/50, 200/478] loss: 0.52378786\n", + "\u001b[2m\u001b[36m(pid=2207991)\u001b[0m [1/50, 300/478] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(pid=2207991)\u001b[0m [1/50, 400/478] loss: 0.5237878\n", + "Result for DEFAULT_aa136_00007:\n", + " accuracy: 0.9210216110019647\n", + " date: 2022-06-22_17-19-13\n", + " done: true\n", + " experiment_id: 64e7083d3029478fad78776e1a911819\n", + " hostname: turing\n", + " iterations_since_restore: 1\n", + " loss: 0.5537177223712206\n", + " mcc: 0.0\n", + " node_ip: 192.168.85.234\n", + " pid: 2207991\n", + " should_checkpoint: true\n", + " time_since_restore: 1.6116507053375244\n", + " time_this_iter_s: 1.6116507053375244\n", + " time_total_s: 1.6116507053375244\n", + " timestamp: 1655914753\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: aa136_00007\n", + " \n", + "== Status ==\n", + "Memory usage on this node: 133.4/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=5\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5056363753475881 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5151294023032466 | Iter 1.000: -0.5282449847087264\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (2 PENDING, 1 RUNNING, 7 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00007 | RUNNING | 192.168.85.234:2207991 | 32 | 0.309119 | 256 | 0.0257685 | 0.553718 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00008 | PENDING | | 16 | 0.451338 | 128 | 0.00277979 | | | | |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00003 | TERMINATED | | 8 | 0.490939 | 128 | 0.0387667 | 0.519052 | 0.921022 | 3 | 0 |\n", + "| DEFAULT_aa136_00004 | TERMINATED | | 8 | 0.359548 | 64 | 0.000422694 | 0.499235 | 0.912377 | 12 | 0.179727 |\n", + "| DEFAULT_aa136_00005 | TERMINATED | | 32 | 0.408204 | 32 | 0.00339182 | 0.535107 | 0.90668 | 1 | 0.0890263 |\n", + "| DEFAULT_aa136_00006 | TERMINATED | | 8 | 0.472529 | 256 | 0.00205751 | 0.519052 | 0.921022 | 4 | 0 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2207991)\u001b[0m The Current Loss: 0.5537177223712206\n", + "\u001b[2m\u001b[36m(pid=2207991)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2207962)\u001b[0m [1/50, 0/955] loss: 0.69558269\n", + "\u001b[2m\u001b[36m(pid=2207962)\u001b[0m [1/50, 100/955] loss: 0.67689806\n", + "\u001b[2m\u001b[36m(pid=2207962)\u001b[0m [1/50, 200/955] loss: 0.52378803\n", + "\u001b[2m\u001b[36m(pid=2207962)\u001b[0m [1/50, 300/955] loss: 0.67689806\n", + "\u001b[2m\u001b[36m(pid=2207962)\u001b[0m [1/50, 400/955] loss: 0.52378803\n", + "\u001b[2m\u001b[36m(pid=2207962)\u001b[0m [1/50, 500/955] loss: 0.52378803\n", + "\u001b[2m\u001b[36m(pid=2207962)\u001b[0m [1/50, 600/955] loss: 0.79326177\n", + "\u001b[2m\u001b[36m(pid=2207962)\u001b[0m [1/50, 700/955] loss: 0.67689806\n", + "\u001b[2m\u001b[36m(pid=2207962)\u001b[0m [1/50, 800/955] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(pid=2207962)\u001b[0m [1/50, 900/955] loss: 0.52378809\n", + "Result for DEFAULT_aa136_00008:\n", + " accuracy: 0.9210216110019647\n", + " date: 2022-06-22_17-19-23\n", + " done: true\n", + " experiment_id: f5fcd3dc2d5d44ddbb353b8dba496665\n", + " hostname: turing\n", + " iterations_since_restore: 1\n", + " loss: 0.5416557300240269\n", + " mcc: 0.0\n", + " node_ip: 192.168.85.234\n", + " pid: 2207962\n", + " should_checkpoint: true\n", + " time_since_restore: 2.4512779712677\n", + " time_this_iter_s: 2.4512779712677\n", + " time_total_s: 2.4512779712677\n", + " timestamp: 1655914763\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: aa136_00008\n", + " \n", + "\u001b[2m\u001b[36m(pid=2207962)\u001b[0m The Current Loss: 0.5416557300240269\n", + "\u001b[2m\u001b[36m(pid=2207962)\u001b[0m trigger times: 0\n", + "== Status ==\n", + "Memory usage on this node: 133.2/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=6\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5056363753475881 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5151294023032466 | Iter 1.000: -0.5306287368759512\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (1 PENDING, 1 RUNNING, 8 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00008 | RUNNING | 192.168.85.234:2207962 | 16 | 0.451338 | 128 | 0.00277979 | 0.541656 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00009 | PENDING | | 16 | 0.413709 | 64 | 0.000935155 | | | | |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00003 | TERMINATED | | 8 | 0.490939 | 128 | 0.0387667 | 0.519052 | 0.921022 | 3 | 0 |\n", + "| DEFAULT_aa136_00004 | TERMINATED | | 8 | 0.359548 | 64 | 0.000422694 | 0.499235 | 0.912377 | 12 | 0.179727 |\n", + "| DEFAULT_aa136_00005 | TERMINATED | | 32 | 0.408204 | 32 | 0.00339182 | 0.535107 | 0.90668 | 1 | 0.0890263 |\n", + "| DEFAULT_aa136_00006 | TERMINATED | | 8 | 0.472529 | 256 | 0.00205751 | 0.519052 | 0.921022 | 4 | 0 |\n", + "| DEFAULT_aa136_00007 | TERMINATED | | 32 | 0.309119 | 256 | 0.0257685 | 0.553718 | 0.921022 | 1 | 0 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [1/50, 0/955] loss: 0.69644594\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [1/50, 100/955] loss: 0.52891278\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [1/50, 200/955] loss: 0.3275187\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [1/50, 300/955] loss: 0.74556661\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [1/50, 400/955] loss: 0.67437965\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [1/50, 500/955] loss: 0.75605094\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [1/50, 600/955] loss: 0.66877556\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [1/50, 700/955] loss: 0.32982174\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [1/50, 800/955] loss: 0.40903068\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [1/50, 900/955] loss: 0.43993327\n", + "Result for DEFAULT_aa136_00009:\n", + " accuracy: 0.9204322200392927\n", + " date: 2022-06-22_17-19-32\n", + " done: false\n", + " experiment_id: a9607a16f4c6490a9866e7f0151be9ec\n", + " hostname: turing\n", + " iterations_since_restore: 1\n", + " loss: 0.5221678679079098\n", + " mcc: 0.05734360217358698\n", + " node_ip: 192.168.85.234\n", + " pid: 2207967\n", + " should_checkpoint: true\n", + " time_since_restore: 2.4979469776153564\n", + " time_this_iter_s: 2.4979469776153564\n", + " time_total_s: 2.4979469776153564\n", + " timestamp: 1655914772\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: aa136_00009\n", + " \n", + "== Status ==\n", + "Memory usage on this node: 133.2/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=6\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5056363753475881 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5151294023032466 | Iter 1.000: -0.5282449847087264\n", + "Resources requested: 2.0/80 CPUs, 2.0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (1 RUNNING, 9 TERMINATED)\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00009 | RUNNING | 192.168.85.234:2207967 | 16 | 0.413709 | 64 | 0.000935155 | 0.522168 | 0.920432 | 1 | 0.0573436 |\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00003 | TERMINATED | | 8 | 0.490939 | 128 | 0.0387667 | 0.519052 | 0.921022 | 3 | 0 |\n", + "| DEFAULT_aa136_00004 | TERMINATED | | 8 | 0.359548 | 64 | 0.000422694 | 0.499235 | 0.912377 | 12 | 0.179727 |\n", + "| DEFAULT_aa136_00005 | TERMINATED | | 32 | 0.408204 | 32 | 0.00339182 | 0.535107 | 0.90668 | 1 | 0.0890263 |\n", + "| DEFAULT_aa136_00006 | TERMINATED | | 8 | 0.472529 | 256 | 0.00205751 | 0.519052 | 0.921022 | 4 | 0 |\n", + "| DEFAULT_aa136_00007 | TERMINATED | | 32 | 0.309119 | 256 | 0.0257685 | 0.553718 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00008 | TERMINATED | | 16 | 0.451338 | 128 | 0.00277979 | 0.541656 | 0.921022 | 1 | 0 |\n", + "+---------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m The Current Loss: 0.5221678679079098\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [2/50, 0/955] loss: 0.77765018\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [2/50, 100/955] loss: 0.3411535\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [2/50, 200/955] loss: 0.46019769\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [2/50, 300/955] loss: 0.32868737\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [2/50, 400/955] loss: 0.6103701\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [2/50, 500/955] loss: 0.34553358\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [2/50, 600/955] loss: 0.31951815\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [2/50, 700/955] loss: 0.59231275\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [2/50, 800/955] loss: 0.38451046\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m [2/50, 900/955] loss: 0.31994426\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2022-06-22 17:19:34,674\tINFO tune.py:561 -- Total run time: 189.09 seconds (188.86 seconds for the tuning loop).\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Result for DEFAULT_aa136_00009:\n", + " accuracy: 0.8713163064833006\n", + " date: 2022-06-22_17-19-34\n", + " done: true\n", + " experiment_id: a9607a16f4c6490a9866e7f0151be9ec\n", + " hostname: turing\n", + " iterations_since_restore: 2\n", + " loss: 0.5211453443410629\n", + " mcc: 0.1769352913273523\n", + " node_ip: 192.168.85.234\n", + " pid: 2207967\n", + " should_checkpoint: true\n", + " time_since_restore: 4.441319465637207\n", + " time_this_iter_s: 1.9433724880218506\n", + " time_total_s: 4.441319465637207\n", + " timestamp: 1655914774\n", + " timesteps_since_restore: 0\n", + " training_iteration: 2\n", + " trial_id: aa136_00009\n", + " \n", + "== Status ==\n", + "Memory usage on this node: 133.2/754.3 GiB\n", + "Using AsyncHyperBand: num_stopped=7\n", + "Bracket: Iter 32.000: None | Iter 16.000: -0.5176659166812897 | Iter 8.000: -0.5056363753475881 | Iter 4.000: -0.5169517792761326 | Iter 2.000: -0.5160677171015478 | Iter 1.000: -0.5282449847087264\n", + "Resources requested: 0/80 CPUs, 0/2 GPUs, 0.0/437.51 GiB heap, 0.0/186.26 GiB objects (0.0/1.0 accelerator_type:G)\n", + "Result logdir: /home/jabreu/ray_results/DEFAULT_2022-06-22_17-16-25\n", + "Number of trials: 10/10 (10 TERMINATED)\n", + "+---------------------+------------+-------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|---------------------+------------+-------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------|\n", + "| DEFAULT_aa136_00000 | TERMINATED | | 32 | 0.490806 | 128 | 0.000464116 | 0.519984 | 0.873477 | 31 | 0.259422 |\n", + "| DEFAULT_aa136_00001 | TERMINATED | | 32 | 0.363243 | 64 | 0.000837212 | 0.530629 | 0.918664 | 1 | 0.0362267 |\n", + "| DEFAULT_aa136_00002 | TERMINATED | | 32 | 0.322638 | 128 | 0.00370153 | 0.553429 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00003 | TERMINATED | | 8 | 0.490939 | 128 | 0.0387667 | 0.519052 | 0.921022 | 3 | 0 |\n", + "| DEFAULT_aa136_00004 | TERMINATED | | 8 | 0.359548 | 64 | 0.000422694 | 0.499235 | 0.912377 | 12 | 0.179727 |\n", + "| DEFAULT_aa136_00005 | TERMINATED | | 32 | 0.408204 | 32 | 0.00339182 | 0.535107 | 0.90668 | 1 | 0.0890263 |\n", + "| DEFAULT_aa136_00006 | TERMINATED | | 8 | 0.472529 | 256 | 0.00205751 | 0.519052 | 0.921022 | 4 | 0 |\n", + "| DEFAULT_aa136_00007 | TERMINATED | | 32 | 0.309119 | 256 | 0.0257685 | 0.553718 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00008 | TERMINATED | | 16 | 0.451338 | 128 | 0.00277979 | 0.541656 | 0.921022 | 1 | 0 |\n", + "| DEFAULT_aa136_00009 | TERMINATED | | 16 | 0.413709 | 64 | 0.000935155 | 0.521145 | 0.871316 | 2 | 0.176935 |\n", + "+---------------------+------------+-------+--------------+-----------+---------------+-------------+----------+------------+----------------------+-----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m The Current Loss: 0.5211453443410629\n", + "\u001b[2m\u001b[36m(pid=2207967)\u001b[0m trigger times: 0\n", + "Best trial config: {'hidden_size': 128, 'lr': 0.0004641155577241009, 'batch_size': 32, 'dropout': 0.49080636243238274}\n", + "Best trial final validation loss: 0.5199840003624558\n", + "Best trial final validation accuracy: 0.8734774066797643\n", + "Best trial final validation mcc: 0.2594221902901971\n", + "Results in test set:\n", + "--------------------\n", + "Accuracy: 0.899\n", + "MCC: 0.372\n", + "[[4385 303]\n", + " [ 212 190]]\n" + ] + } + ], + "source": [ + "hyperparameter_tuning(device, fixed_vals, config)" + ] + } + ], + "metadata": { + "interpreter": { + "hash": "ba449ea13c29f64a91968d8f927cecceedd6e605eda30388903386e6cd94168d" + }, + "kernelspec": { + "display_name": "Python 3.8.13 ('dna-conda': conda)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.13" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/src/propythia/DNA/essential_genes/DeepHE_stats.ipynb b/src/propythia/DNA/essential_genes/DeepHE_stats.ipynb new file mode 100644 index 0000000..2139e8e --- /dev/null +++ b/src/propythia/DNA/essential_genes/DeepHE_stats.ipynb @@ -0,0 +1,484 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd\n", + "import matplotlib.pyplot as plt" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Statistics about the data, including:\n", + "* Total sequences\n", + "* Top 5 longest and shortest sequences\n", + "* Average length of sequences\n", + "* Top 5 most and least common sequence length" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "def stats(dataset, name):\n", + " dataset[\"length\"].hist(bins=100)\n", + " \n", + " # save image as pdf\n", + " plt.savefig(f\"{name}_length_hist.pdf\")\n", + " plt.show()\n", + "\n", + " print(\"-\" * 40)\n", + "\n", + " print(\"Total sequences:\", dataset.shape[0])\n", + "\n", + " print(\"-\" * 40)\n", + "\n", + " print(\"Top 5 longest sequences:\")\n", + " print(\"id length\")\n", + " print(dataset[\"length\"].nlargest(5).to_string())\n", + "\n", + " print(\"-\" * 40)\n", + "\n", + " print(\"Top 5 shortest sequences:\")\n", + " print(\"id length\")\n", + " print(dataset[\"length\"].nsmallest(5).to_string())\n", + "\n", + " print(\"-\" * 40)\n", + "\n", + " average_length = dataset[\"length\"].mean()\n", + " print(\"Average length:\", average_length)\n", + "\n", + " print(\"-\" * 40)\n", + "\n", + " print(\"Top 5 most common lengths:\")\n", + " print(\"length count\")\n", + " print(dataset[\"length\"].value_counts().nlargest(5).to_string())\n", + " \n", + " print(\"-\" * 40)\n", + "\n", + " print(\"Top 5 least common lengths:\")\n", + " print(\"length count\")\n", + " print(dataset[\"length\"].value_counts().nsmallest(5).to_string())" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(2010, 2)\n", + "(15888, 2)\n" + ] + } + ], + "source": [ + "positive = pd.read_csv(\"../datasets/essential_genes/positive.csv\")\n", + "print(positive.shape)\n", + "\n", + "negative = pd.read_csv(\"../datasets/essential_genes/essential_genes_negative.csv\")\n", + "print(negative.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "15888 15888 unique negative ids\n", + "(15179, 2)\n" + ] + } + ], + "source": [ + "negative_ids = negative[\"id\"].values\n", + "unique_negative_ids = set(negative_ids)\n", + "print(len(negative_ids), len(unique_negative_ids), \"unique negative ids\")\n", + "\n", + "# from Bio.SeqIO.FastaIO import SimpleFastaParser\n", + "# def read_fasta(filename):\n", + "# d = {}\n", + "# with open(filename) as handle:\n", + "# for key, sequence in SimpleFastaParser(handle):\n", + "# sequence = sequence.upper()\n", + "# if(sequence != \"SEQUENCEUNAVAILABLE\"):\n", + "# key = key.split(\"|\")[0]\n", + "# d[key] = sequence\n", + "# print(len(d), \"keys\", len(d.values()), \"seqs\", len(set(d.values())), \"unique\")\n", + "# return d\n", + "\n", + "# d = read_fasta(\"../datasets/essential_genes/mart_export_unspliced.fa\")\n", + "\n", + "# # replace negatives sequences with sequence from the dict\n", + "# negative[\"sequence\"] = negative[\"id\"].apply(lambda x: d[x])\n", + "\n", + "# remove rows with repeating sequences\n", + "negative = negative.drop_duplicates(subset=[\"sequence\"])\n", + "print(negative.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(15137, 2)\n", + "(2010, 2)\n" + ] + } + ], + "source": [ + "# remove sequences with letters different from A, C, G, T\n", + "negative = negative[negative[\"sequence\"].str.contains(\"^[ACGT]+$\")]\n", + "positive = positive[positive[\"sequence\"].str.contains(\"^[ACGT]+$\")]\n", + "\n", + "print(negative.shape)\n", + "print(positive.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAD4CAYAAAAD6PrjAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAWZklEQVR4nO3dfZBdd33f8fendjDgBT/UsKPKbmQygtZYaYK2LikNs1uHYB6K3Da08jip3LqjSWsoaWEau8yU/OOpEwodOm6aUWOPRU28OAZqlwwpjsrG05kYxzIG+QHHAquOZGGVJ8ESairy7R/37OF6vau92vuw99rv14zmnvM7v3vOZ4+v97u/83RTVUiSBPAXNjqAJGl8WBQkSS2LgiSpZVGQJLUsCpKk1ukbHQDgvPPOqy1btvTc/3vf+x5nnnnm8AINiblHaxJzT2JmMPeoLeXev3//16vqFQNdeVVt+L/t27fXqfjc5z53Sv3HhblHaxJzT2LmKnOP2lJu4P4a8O9jDx9JkloWBUlSy6IgSWpZFCRJLYuCJKllUZAktSwKkqSWRUGS1LIoSJJaY/GYi2HZcu3vtdOHbnjbBiaRpMngSEGS1LIoSJJaFgVJUsuiIElqrVkUktyc5FiSh5a1vzvJY0keTvIbXe3XJTnYLHvzMEJLkoajl6uPbgFuBD661JBkDtgB/GRVPZPklU37RcBO4LXAXwL+IMmrq+qHgw4uSRq8NUcKVXUP8M1lzf8MuKGqnmn6HGvadwDzVfVMVT0BHAQuGWBeSdIQpfPlPWt0SrYAn66qi5v5B4E7gcuA/wu8r6r+OMmNwL1VdWvT7ybgM1V1xwrr3A3sBpient4+Pz/fc+jFxUWmpqbW7HfgyPF2etvms3pe/7D0mnvcmHt0JjEzmHvUlnLPzc3tr6qZQa57vTevnQ6cA7we+OvA7UleBWSFvitWnaraA+wBmJmZqdnZ2Z43vrCwQC/9r+q+ee3K3tc/LL3mHjfmHp1JzAzmHrVh5l7v1UeHgU82Xxd6H/DnwHlN+wVd/c4HnuovoiRpVNY7UvhvwN8GFpK8GngR8HXgLuB3knyYzonmrcB9A8g5UD7+QpJWtmZRSHIbMAucl+Qw8AHgZuDm5jLVHwC7qnNy4uEktwOPACeAa7zySJImx5pFoaquWGXRL67S/3rg+n5CSZI2hnc0S5JaFgVJUut5/X0K3bpPLkuSVuZIQZLUsihIkloWBUlSy6IgSWpZFCRJLYuCJKllUZAktSwKkqSWRUGS1LIoSJJaFgVJUsuiIElqPe8eiOeD7yRp/dYcKSS5Ocmx5lvWli97X5JKcl5X23VJDiZ5LMmbBx1YkjQ8vYwUbgFuBD7a3ZjkAuBNwJNdbRcBO4HX0vmO5j9I8upx/kpOv69Zkn5kzZFCVd0DfHOFRf8B+NdAdbXtAOar6pmqegI4CFwyiKCSpOFLVa3dKdkCfLqqLm7m3wFcWlXvSXIImKmqrye5Ebi3qm5t+t0EfKaq7lhhnbuB3QDT09Pb5+fnew69uLjI1NTUissOHDne83qW27b5rBXX093ej5PlHmfmHp1JzAzmHrWl3HNzc/uramaQ6z7lE81JXgq8H/j5lRav0LZi1amqPcAegJmZmZqdne05w8LCAqv1v6qPE82HrvzROrvX093ej5PlHmfmHp1JzAzmHrVh5l7P1Uc/AVwIfDEJwPnAA0kuAQ4DF3T1PR94qt+QkqTROOX7FKrqQFW9sqq2VNUWOoXgdVX1NeAuYGeSM5JcCGwF7htoYknS0PRySeptwB8Br0lyOMnVq/WtqoeB24FHgN8HrhnnK48kSc+25uGjqrpijeVbls1fD1zfXyxJ0kbwMReSpJZFQZLUsihIkloWBUlSy6IgSWpZFCRJLYuCJKllUZAktSwKkqTW8+7rOPvhV3lKeqFzpCBJalkUJEkti4IkqWVRkCS1LAqSpJZFQZLU6uWb125OcizJQ11tH0zy5SRfSvKpJGd3LbsuycEkjyV585ByS5KGoJeRwi3AZcva7gYurqqfBP4EuA4gyUXATuC1zXt+M8lpA0srSRqqNYtCVd0DfHNZ22er6kQzey9wfjO9A5ivqmeq6gngIHDJAPNKkoYoVbV2p2QL8OmquniFZf8d+HhV3ZrkRuDeqrq1WXYT8JmqumOF9+0GdgNMT09vn5+f7zn04uIiU1NTKy47cOR4z+vp1bbNZw1kPSfLPc7MPTqTmBnMPWpLuefm5vZX1cwg193XYy6SvB84AXxsqWmFbitWnaraA+wBmJmZqdnZ2Z63u7CwwGr9rxrCoyoOXbnytk7VyXKPM3OPziRmBnOP2jBzr7soJNkFvB24tH403DgMXNDV7XzgqfXHkySN0rouSU1yGfCrwDuq6s+6Ft0F7ExyRpILga3Aff3HlCSNwpojhSS3AbPAeUkOAx+gc7XRGcDdSaBzHuGXq+rhJLcDj9A5rHRNVf1wWOElSYO1ZlGoqitWaL7pJP2vB67vJ5QkaWN4R7MkqWVRkCS1LAqSpJZFQZLUsihIkloWBUlSy6IgSWpZFCRJLYuCJKllUZAktSwKkqRWX9+n8EKxpes7Gg7d8LYNTCJJw+VIQZLUsihIkloWBUlSy6IgSWqtWRSS3JzkWJKHutrOTXJ3kseb13O6ll2X5GCSx5K8eVjBJUmD18tI4RbgsmVt1wL7qmorsK+ZJ8lFwE7gtc17fjPJaQNLK0kaqjWLQlXdA3xzWfMOYG8zvRe4vKt9vqqeqaongIPAJYOJKkkatlTV2p2SLcCnq+riZv7bVXV21/JvVdU5SW4E7q2qW5v2m4DPVNUdK6xzN7AbYHp6evv8/HzPoRcXF5mamlpx2YEjx3tez3ps23zWut97stzjzNyjM4mZwdyjtpR7bm5uf1XNDHLdg755LSu0rVh1qmoPsAdgZmamZmdne97IwsICq/W/qutGs2E4dOXK2+3FyXKPM3OPziRmBnOP2jBzr/fqo6eTbAJoXo817YeBC7r6nQ88tf54kqRRWm9RuAvY1UzvAu7sat+Z5IwkFwJbgfv6iyhJGpU1Dx8luQ2YBc5Lchj4AHADcHuSq4EngXcCVNXDSW4HHgFOANdU1Q+HlF2SNGBrFoWqumKVRZeu0v964Pp+QkmSNoZPST1FPjFV0vOZj7mQJLUsCpKklkVBktSyKEiSWhYFSVLLoiBJalkUJEkti4IkqWVRkCS1LAqSpJZFQZLUsihIkloWBUlSy6IgSWr1VRSS/MskDyd5KMltSV6c5Nwkdyd5vHk9Z1BhJUnDte6ikGQz8C+Amaq6GDgN2AlcC+yrqq3AvmZekjQB+j18dDrwkiSnAy8FngJ2AHub5XuBy/vchiRpRNZdFKrqCPDv6XxH81HgeFV9FpiuqqNNn6PAKwcRVJI0fKmq9b2xc67gE8A/BL4N/C5wB3BjVZ3d1e9bVfWc8wpJdgO7Aaanp7fPz8/3vO3FxUWmpqZWXHbgyPGe19OvbZvPOqX+J8s9zsw9OpOYGcw9aku55+bm9lfVzCDX3c93NP8c8ERV/R+AJJ8E/ibwdJJNVXU0ySbg2Epvrqo9wB6AmZmZmp2d7XnDCwsLrNb/qq7vUB62Q1eunGE1J8s9zsw9OpOYGcw9asPM3c85hSeB1yd5aZIAlwKPAncBu5o+u4A7+4soSRqVdY8UqurzSe4AHgBOAF+g85f/FHB7kqvpFI53DiKoJGn4+jl8RFV9APjAsuZn6IwaJEkTxjuaJUkti4IkqWVRkCS1LAqSpJZFQZLU6uvqoxe6LV03yh264W0bmESSBsORgiSp5UhhCBxBSJpUjhQkSS2LgiSpZVGQJLWeF+cUtozwcdmS9HzmSEGS1LIoSJJaFgVJUsuiIElq9VUUkpyd5I4kX07yaJKfSXJukruTPN68njOosJKk4ep3pPAR4Per6q8Af43OdzRfC+yrqq3AvmZekjQB1l0UkrwceCNwE0BV/aCqvg3sAPY23fYCl/cXUZI0Kqmq9b0x+SlgD/AInVHCfuA9wJGqOrur37eq6jmHkJLsBnYDTE9Pb5+fn+9524uLi0xNTbXzB44cX9fPMEjbNp/VTnfn6W5fnntSmHt0JjEzmHvUlnLPzc3tr6qZQa67n6IwA9wLvKGqPp/kI8B3gHf3UhS6zczM1P3339/zthcWFpidnW3nx+Hmte4H3632QLzluSeFuUdnEjODuUdtKXeSgReFfu5oPgwcrqrPN/N30Dl/8HSSTVV1NMkm4Fi/ISfBOBQmSerXuotCVX0tyZ8meU1VPQZcSudQ0iPALuCG5vXOgSSdUN3F4pbLztzAJJK0tn6fffRu4GNJXgR8FfjHdE5e357kauBJ4J19bkOSNCJ9FYWqehBY6XjWpf2sV5K0MbyjWZLUsihIkloWBUlSy6IgSWpZFCRJLYuCJKllUZAktfq9eU3rtNrzkSRpIzlSkCS1HCmM0IEjx7nKB+dJGmOOFCRJLYuCJKllUZAktSwKkqSWRUGS1LIoSJJafReFJKcl+UKSTzfz5ya5O8njzes5/ceUJI3CIEYK7wEe7Zq/FthXVVuBfc28JGkC9FUUkpwPvA347a7mHcDeZnovcHk/25AkjU6qav1vTu4A/h3wMuB9VfX2JN+uqrO7+nyrqp5zCCnJbmA3wPT09Pb5+fmet7u4uMjU1FQ7f+DI8XX/DKM0/RJ4+vvPbd+2+azRhzkFy/f3pJjE3JOYGcw9aku55+bm9lfVzCDXve7HXCR5O3CsqvYnmT3V91fVHmAPwMzMTM3O9r6KhYUFuvtPyqMj3rvtBB868NxdfujK2dGHOQXL9/ekmMTck5gZzD1qw8zdz7OP3gC8I8lbgRcDL09yK/B0kk1VdTTJJuDYIIJKkoZv3UWhqq4DrgNoRgrvq6pfTPJBYBdwQ/N6Z/8xn9+2rDLS8ZHakkZtGPcp3AC8KcnjwJuaeUnSBBjIo7OragFYaKa/AVw6iPVKkkbLO5olSS2LgiSpZVGQJLUsCpKklkVBktSyKEiSWhYFSVLLoiBJalkUJEkti4IkqTWQx1xoOLoflOfD8SSNgiMFSVLLoiBJalkUJEktzylMiNW+iGc5zz1I6ocjBUlSa91FIckFST6X5NEkDyd5T9N+bpK7kzzevJ4zuLiSpGHqZ6RwAnhvVf1V4PXANUkuAq4F9lXVVmBfMy9JmgDrLgpVdbSqHmimvws8CmwGdgB7m257gcv7zChJGpFUVf8rSbYA9wAXA09W1dldy75VVc85hJRkN7AbYHp6evv8/HzP21tcXGRqaqqdP3Dk+Hqjj9T0S+Dp7w93G9s2nzXwdS7f35NiEnNPYmYw96gt5Z6bm9tfVTODXHffRSHJFPCHwPVV9ckk3+6lKHSbmZmp+++/v+dtLiwsMDs72873emXORnvvthN86MBwL/gaxtVHy/f3pJjE3JOYGcw9aku5kwy8KPR19VGSHwM+AXysqj7ZND+dZFOzfBNwrL+IkqRR6efqowA3AY9W1Ye7Ft0F7GqmdwF3rj+eJGmU+jmW8Qbgl4ADSR5s2v4NcANwe5KrgSeBd/aVUJI0MusuClX1v4CssvjS9a5X/fHJqpL64R3NkqSWRUGS1LIoSJJaPiX1eczzC5JOlUXhBcICIakXFoUXOIuFpG6eU5AktSwKkqSWRUGS1PKcgtZ04MhxrmrOPax23mEY5yY83yGNnkXhBWhSHjW+GouFNDwWBW0If7FL48lzCpKkliMFDdypjgIm/XCW9HxiUdC69fLLfNiHiVZb/5Zrf4/3bjux4glyD11Jq7MoqLXaL/n3blu7z7CdagFaz/slDbEoJLkM+AhwGvDbVXXDsLalyTCOv5hPNtJYST+Hw0Y5WnE0pPUayonmJKcB/wl4C3ARcEWSi4axLUnS4AxrpHAJcLCqvgqQZB7YATwypO1JfetnJDOoUVD3jYLdhvXXfi8jpV5uWLzlsjNPaVvdTnX0NYz+o1rXMNc5KKmqwa80+QXgsqr6p838LwF/o6re1dVnN7C7mX0N8NgpbOI84OsDijtK5h6tScw9iZnB3KO2lPvHq+oVg1zxsEYKWaHtWdWnqvYAe9a18uT+qppZz3s3krlHaxJzT2JmMPeoDTP3sG5eOwxc0DV/PvDUkLYlSRqQYRWFPwa2JrkwyYuAncBdQ9qWJGlAhnL4qKpOJHkX8D/oXJJ6c1U9PMBNrOuw0xgw92hNYu5JzAzmHrWh5R7KiWZJ0mTygXiSpJZFQZLUmqiikOSyJI8lOZjk2jHIc0GSzyV5NMnDSd7TtP9akiNJHmz+vbXrPdc1+R9L8uau9u1JDjTL/mOSlS7rHWT2Q832Hkxyf9N2bpK7kzzevJ4zTrmTvKZrnz6Y5DtJfmUc93eSm5McS/JQV9vA9m+SM5J8vGn/fJItQ8r8wSRfTvKlJJ9KcnbTviXJ97v2+W9tROaT5B7YZ2LEuT/elflQkgeb9tHt76qaiH90Tlh/BXgV8CLgi8BFG5xpE/C6ZvplwJ/QeazHrwHvW6H/RU3uM4ALm5/ntGbZfcDP0LnH4zPAW4ac/RBw3rK23wCubaavBX593HIv+zx8DfjxcdzfwBuB1wEPDWP/Av8c+K1meifw8SFl/nng9Gb617syb+nut2w9I8t8ktwD+0yMMvey5R8C/u2o9/ckjRTaR2dU1Q+ApUdnbJiqOlpVDzTT3wUeBTaf5C07gPmqeqaqngAOApck2QS8vKr+qDr/BT8KXD7c9Kvm29tM7+3KMI65LwW+UlX/+yR9Nix3Vd0DfHOFPIPav93rugO4tN/RzkqZq+qzVXWimb2Xzj1Hqxp15tVyn8RY7Ou1cjfr/wfAbSdbxzByT1JR2Az8adf8YU7+C3ikmqHZTwOfb5re1Qy5b+46TLDaz7C5mV7ePkwFfDbJ/nQeOQIwXVVHoVPwgFc27eOUe8lOnv0/zLjvbxjs/m3f0/zSPg78xaEl7/gndP4SXXJhki8k+cMkP9uVa1wyD+ozsRH7+meBp6vq8a62kezvSSoKaz46Y6MkmQI+AfxKVX0H+M/ATwA/BRylMwyE1X+GjfjZ3lBVr6PzJNtrkrzxJH3HKTfp3BD5DuB3m6ZJ2N8ns56cI/0ZkrwfOAF8rGk6Cvzlqvpp4F8Bv5Pk5WvkGmXmQX4mNuLzcgXP/qNnZPt7korCWD46I8mP0SkIH6uqTwJU1dNV9cOq+nPgv9A59AWr/wyHefawfOg/W1U91bweAz7VZHy6GY4uDUuPjVvuxluAB6rqaZiM/d0Y5P5t35PkdOAsej+EckqS7ALeDlzZHKKgOfzyjWZ6P51j868el8wD/kyMLHfXNv4e8PGltlHu70kqCmP36Izm+NxNwKNV9eGu9k1d3f4usHR1wV3AzuaqgAuBrcB9zaGE7yZ5fbPOfwTcOcTcZyZ52dI0nZOJDzX5djXddnVlGIvcXZ71V9S47+8ug9y/3ev6BeB/Lv3CHqR0vizrV4F3VNWfdbW/Ip3vTSHJq5rMXx2HzE2mQX4mRpa78XPAl6uqPSw00v19KmfLN/of8FY6V/h8BXj/GOT5W3SGY18CHmz+vRX4r8CBpv0uYFPXe97f5H+MritegBk6H9yvADfS3G0+pNyvonMFxheBh5f2JZ3jjfuAx5vXc8cpd7O9lwLfAM7qahu7/U2naB0F/h+dv9iuHuT+BV5M5/DZQTpXn7xqSJkP0jkuvfT5Xrqa5e83n50vAg8Af2cjMp8k98A+E6PM3bTfAvzysr4j298+5kKS1Jqkw0eSpCGzKEiSWhYFSVLLoiBJalkUJEkti4IkqWVRkCS1/j9Wd8qbXEl8dAAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----------------------------------------\n", + "Total sequences: 2010\n", + "----------------------------------------\n", + "Top 5 longest sequences:\n", + "id length\n", + "676 16791\n", + "248 15615\n", + "204 14574\n", + "491 13941\n", + "31 13167\n", + "----------------------------------------\n", + "Top 5 shortest sequences:\n", + "id length\n", + "1112 192\n", + "1427 195\n", + "1500 204\n", + "1611 204\n", + "1346 210\n", + "----------------------------------------\n", + "Average length: 1903.5980099502488\n", + "----------------------------------------\n", + "Top 5 most common lengths:\n", + "length count\n", + "1083 9\n", + "1041 8\n", + "1314 7\n", + "618 7\n", + "1371 7\n", + "----------------------------------------\n", + "Top 5 least common lengths:\n", + "length count\n", + "2496 1\n", + "2763 1\n", + "3162 1\n", + "2982 1\n", + "3324 1\n", + "****************************************************************************************************\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAEFCAYAAAAPCDf9AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAXMElEQVR4nO3df4xV6V3H8fen7JZOf5DuuuVKGBTUaRXY7FomSG3STIu62Jqyf4gZsxVsiKMEa2tQA/5h4x8ka+Iay6asTmzdIWLJWNtC2tKWYG+0Bpay67azQHGnhdKREXR1LdMaZDZf/7jPbs/OXGbO3Jk9cO/zeSWTe+73nOdwvnPC59557o+jiMDMzPLwqlt9AGZmVh2HvplZRhz6ZmYZceibmWXEoW9mlpE7bvUBzOaee+6JlStXtjT2e9/7Hq973esW9oBuczn2DHn2nWPP4L7LevLJJ/8zIt40tX7bh/7KlSs5ffp0S2Pr9Tp9fX0Le0C3uRx7hjz7zrFncN9lSfp2s7qnd8zMMuLQNzPLiEPfzCwjDn0zs4w49M3MMlIq9CX9rqQzkp6R9AlJr5F0t6Rjkp5Nt3cVtt8jaVTSeUkPFOrrJI2kdfsk6ZVoyszMmps19CUtB34H6I2ItcAioB/YDRyPiB7geLqPpNVp/RpgE7Bf0qK0u8eAAaAn/Wxa0G7MzGxGZad37gC6JN0BvBa4DGwGhtL6IeDBtLwZOBQR1yPiAjAKrJe0DFgSESei8X3OBwpjzMysArN+OCsi/k3SnwKXgP8FvhQRX5JUi4jxtM24pKVpyHLgZGEXY6l2Iy1PrU8jaYDGXwTUajXq9fqcmnrRxMREy2PbVY49Q55959gzuO/5mjX001z9ZmAV8Dzwd5LeN9OQJrWYoT69GDEIDAL09vZGq5++e/TgYR75yvcAuPjwe1raR7vxpxXzkWPP4L7nq8z0zs8BFyLiPyLiBvAp4GeBK2nKhnR7NW0/BqwojO+mMR00lpan1s3MrCJlQv8SsEHSa9O7bTYC54AjwLa0zTbgcFo+AvRLWixpFY0XbE+lqaBrkjak/WwtjDEzswqUmdN/QtIngaeASeBfaEy9vB4YlrSdxgPDlrT9GUnDwNm0/c6IeCHtbgfwONAFHE0/ZmZWkVLfshkRHwY+PKV8ncaz/mbb7wX2NqmfBtbO8RjNzGyB+BO5ZmYZceibmWXEoW9mlhGHvplZRhz6ZmYZceibmWXEoW9mlhGHvplZRhz6ZmYZceibmWXEoW9mlhGHvplZRhz6ZmYZceibmWXEoW9mlhGHvplZRhz6ZmYZmTX0Jb1F0tOFn+9K+pCkuyUdk/Rsur2rMGaPpFFJ5yU9UKivkzSS1u1L18o1M7OKzBr6EXE+Iu6PiPuBdcD3gU8Du4HjEdEDHE/3kbQa6AfWAJuA/ZIWpd09BgzQuFh6T1pvZmYVmev0zkbgmxHxbWAzMJTqQ8CDaXkzcCgirkfEBWAUWC9pGbAkIk5ERAAHCmPMzKwCpS6MXtAPfCIt1yJiHCAixiUtTfXlwMnCmLFUu5GWp9ankTRA4y8CarUa9Xp9joeZDrALdt07CdDyPtrNxMRENr0W5dh3jj2D+56v0qEv6dXAe4E9s23apBYz1KcXIwaBQYDe3t7o6+sre5gv8+jBwzwy0mjx4kOt7aPd1Ot1Wv19tbMc+86xZ3Df8zWX6Z1fBJ6KiCvp/pU0ZUO6vZrqY8CKwrhu4HKqdzepm5lZReYS+r/KD6Z2AI4A29LyNuBwod4vabGkVTResD2VpoKuSdqQ3rWztTDGzMwqUGp6R9JrgZ8HfrNQfhgYlrQduARsAYiIM5KGgbPAJLAzIl5IY3YAjwNdwNH0Y2ZmFSkV+hHxfeCHptSeo/Funmbb7wX2NqmfBtbO/TDNzGwh+BO5ZmYZceibmWXEoW9mlhGHvplZRhz6ZmYZceibmWXEoW9mlhGHvplZRhz6ZmYZceibmWXEoW9mlhGHvplZRhz6ZmYZceibmWXEoW9mlhGHvplZRhz6ZmYZKRX6kt4o6ZOSviHpnKS3Sbpb0jFJz6bbuwrb75E0Kum8pAcK9XWSRtK6felauWZmVpGyz/Q/AnwhIn4SuA84B+wGjkdED3A83UfSaqAfWANsAvZLWpT28xgwQONi6T1pvZmZVWTW0Je0BHgH8DGAiPi/iHge2AwMpc2GgAfT8mbgUERcj4gLwCiwXtIyYElEnIiIAA4UxpiZWQXKXBj9x4D/AP5a0n3Ak8AHgVpEjANExLikpWn75cDJwvixVLuRlqfWp5E0QOMvAmq1GvV6vWw/L1Prgl33TgK0vI92MzExkU2vRTn2nWPP4L7nq0zo3wG8FfhARDwh6SOkqZybaDZPHzPUpxcjBoFBgN7e3ujr6ytxmNM9evAwj4w0Wrz4UGv7aDf1ep1Wf1/tLMe+c+wZ3Pd8lZnTHwPGIuKJdP+TNB4ErqQpG9Lt1cL2Kwrju4HLqd7dpG5mZhWZNfQj4t+B70h6SyptBM4CR4BtqbYNOJyWjwD9khZLWkXjBdtTaSromqQN6V07WwtjzMysAmWmdwA+AByU9GrgW8D7aTxgDEvaDlwCtgBExBlJwzQeGCaBnRHxQtrPDuBxoAs4mn7MzKwipUI/Ip4Gepus2niT7fcCe5vUTwNr53B8Zma2gPyJXDOzjDj0zcwy4tA3M8uIQ9/MLCMOfTOzjDj0zcwy4tA3M8uIQ9/MLCMOfTOzjDj0zcwy4tA3M8uIQ9/MLCMOfTOzjDj0zcwy4tA3M8uIQ9/MLCMOfTOzjJQKfUkXJY1IelrS6VS7W9IxSc+m27sK2++RNCrpvKQHCvV1aT+jkvala+WamVlF5vJM/50RcX9EvHjZxN3A8YjoAY6n+0haDfQDa4BNwH5Ji9KYx4ABGhdL70nrzcysIvOZ3tkMDKXlIeDBQv1QRFyPiAvAKLBe0jJgSUSciIgADhTGmJlZBUpdGB0I4EuSAvjLiBgEahExDhAR45KWpm2XAycLY8dS7UZanlqfRtIAjb8IqNVq1Ov1kof5crUu2HXvJEDL+2g3ExMT2fRalGPfOfYM7nu+yob+2yPicgr2Y5K+McO2zebpY4b69GLjQWUQoLe3N/r6+koe5ss9evAwj4w0Wrz4UGv7aDf1ep1Wf1/tLMe+c+wZ3Pd8lZreiYjL6fYq8GlgPXAlTdmQbq+mzceAFYXh3cDlVO9uUjczs4rMGvqSXifpDS8uA78APAMcAbalzbYBh9PyEaBf0mJJq2i8YHsqTQVdk7QhvWtna2GMmZlVoMz0Tg34dHp35R3A30bEFyR9FRiWtB24BGwBiIgzkoaBs8AksDMiXkj72gE8DnQBR9OPmZlVZNbQj4hvAfc1qT8HbLzJmL3A3ib108DauR+mmZktBH8i18wsIw59M7OMOPTNzDLi0Dczy4hD38wsIw59M7OMOPTNzDLi0Dczy4hD38wsIw59M7OMOPTNzDLi0Dczy4hD38wsIw59M7OMOPTNzDLi0Dczy4hD38wsI6VDX9IiSf8i6bPp/t2Sjkl6Nt3eVdh2j6RRSeclPVCor5M0ktbtS9fKNTOziszlmf4HgXOF+7uB4xHRAxxP95G0GugH1gCbgP2SFqUxjwEDNC6W3pPWm5lZRUqFvqRu4D3AXxXKm4GhtDwEPFioH4qI6xFxARgF1ktaBiyJiBMREcCBwhgzM6vArBdGT/4c+APgDYVaLSLGASJiXNLSVF8OnCxsN5ZqN9Ly1Po0kgZo/EVArVajXq+XPMyXq3XBrnsnAVreR7uZmJjIpteiHPvOsWdw3/M1a+hL+iXgakQ8KamvxD6bzdPHDPXpxYhBYBCgt7c3+vrK/LPTPXrwMI+MNFq8+FBr+2g39XqdVn9f7SzHvnPsGdz3fJV5pv924L2S3g28Blgi6W+AK5KWpWf5y4CrafsxYEVhfDdwOdW7m9TNzKwis87pR8SeiOiOiJU0XqD9h4h4H3AE2JY22wYcTstHgH5JiyWtovGC7ak0FXRN0ob0rp2thTFmZlaBsnP6zTwMDEvaDlwCtgBExBlJw8BZYBLYGREvpDE7gMeBLuBo+jEzs4rMKfQjog7U0/JzwMabbLcX2NukfhpYO9eDNDOzheFP5JqZZcShb2aWEYe+mVlGHPpmZhlx6JuZZcShb2aWEYe+mVlGHPpmZhlx6JuZZcShb2aWEYe+mVlGHPpmZhlx6JuZZcShb2aWEYe+mVlGHPpmZhlx6JuZZWTW0Jf0GkmnJH1N0hlJf5zqd0s6JunZdHtXYcweSaOSzkt6oFBfJ2kkrduXrpVrZmYVKfNM/zrwroi4D7gf2CRpA7AbOB4RPcDxdB9Jq2lcQH0NsAnYL2lR2tdjwACNi6X3pPVmZlaRWUM/GibS3TvTTwCbgaFUHwIeTMubgUMRcT0iLgCjwHpJy4AlEXEiIgI4UBhjZmYVKHVh9PRM/UngJ4CPRsQTkmoRMQ4QEeOSlqbNlwMnC8PHUu1GWp5ab/bvDdD4i4BarUa9Xi/dUFGtC3bdOwnQ8j7azcTERDa9FuXYd449g/uer1KhHxEvAPdLeiPwaUlrZ9i82Tx9zFBv9u8NAoMAvb290dfXV+Ywp3n04GEeGWm0ePGh1vbRbur1Oq3+vtpZjn3n2DO47/ma07t3IuJ5oE5jLv5KmrIh3V5Nm40BKwrDuoHLqd7dpG5mZhUp8+6dN6Vn+EjqAn4O+AZwBNiWNtsGHE7LR4B+SYslraLxgu2pNBV0TdKG9K6drYUxZmZWgTLTO8uAoTSv/ypgOCI+K+kEMCxpO3AJ2AIQEWckDQNngUlgZ5oeAtgBPA50AUfTj5mZVWTW0I+IrwM/3aT+HLDxJmP2Anub1E8DM70eYGZmryB/ItfMLCMOfTOzjDj0zcwy4tA3M8uIQ9/MLCMOfTOzjDj0zcwyUuq7dzrByt2fe2n54sPvuYVHYmZ26/iZvplZRhz6ZmYZceibmWXEoW9mlhGHvplZRhz6ZmYZceibmWXEoW9mlhGHvplZRspcI3eFpC9LOifpjKQPpvrdko5Jejbd3lUYs0fSqKTzkh4o1NdJGknr9qVr5ZqZWUXKPNOfBHZFxE8BG4CdklYDu4HjEdEDHE/3Sev6gTXAJmB/ur4uwGPAAI2Lpfek9WZmVpFZQz8ixiPiqbR8DTgHLAc2A0NpsyHgwbS8GTgUEdcj4gIwCqyXtAxYEhEnIiKAA4UxZmZWgTl94ZqklTQukv4EUIuIcWg8MEhamjZbDpwsDBtLtRtpeWq92b8zQOMvAmq1GvV6fS6H+ZJaF+y6d3JavdX9tYOJiYmO7u9mcuw7x57Bfc9X6dCX9Hrg74EPRcR3Z5iOb7YiZqhPL0YMAoMAvb290dfXV/YwX+bRg4d5ZGR6ixcfam1/7aBer9Pq76ud5dh3jj2D+56vUu/ekXQnjcA/GBGfSuUracqGdHs11ceAFYXh3cDlVO9uUjczs4qUefeOgI8B5yLizwqrjgDb0vI24HCh3i9psaRVNF6wPZWmgq5J2pD2ubUwxszMKlBmeuftwK8BI5KeTrU/BB4GhiVtBy4BWwAi4oykYeAsjXf+7IyIF9K4HcDjQBdwNP2YmVlFZg39iPgKzefjATbeZMxeYG+T+mlg7VwO0MzMFo4/kWtmlhGHvplZRhz6ZmYZceibmWXEoW9mlhGHvplZRhz6ZmYZceibmWXEoW9mlhGHvplZRhz6ZmYZmdNFVDrFyt2fe2n54sPvuYVHYmZWLT/TNzPLiEPfzCwjDn0zs4w49M3MMuLQNzPLSJlr5H5c0lVJzxRqd0s6JunZdHtXYd0eSaOSzkt6oFBfJ2kkrduXrpNrZmYVKvNM/3Fg05TabuB4RPQAx9N9JK0G+oE1acx+SYvSmMeAARoXSu9psk8zM3uFzRr6EfGPwH9NKW8GhtLyEPBgoX4oIq5HxAVgFFgvaRmwJCJOREQABwpjzMysIq1+OKsWEeMAETEuaWmqLwdOFrYbS7UbaXlqvSlJAzT+KqBWq1Gv11s7yC7Yde/kjNu0uu/b1cTERMf1VEaOfefYM7jv+VroT+Q2m6ePGepNRcQgMAjQ29sbfX19LR3MowcP88jIzC1efKi1fd+u6vU6rf6+2lmOfefYM7jv+Wr13TtX0pQN6fZqqo8BKwrbdQOXU727Sd3MzCrUaugfAbal5W3A4UK9X9JiSatovGB7Kk0FXZO0Ib1rZ2thjJmZVWTW6R1JnwD6gHskjQEfBh4GhiVtBy4BWwAi4oykYeAsMAnsjIgX0q520HgnUBdwNP2YmVmFZg39iPjVm6zaeJPt9wJ7m9RPA2vndHQV8DdumllO/IlcM7OMOPTNzDLi0Dczy4hD38wsIw59M7OMZHmN3JvxO3nMrNP5mb6ZWUYc+mZmGXHom5llxKFvZpYRv5B7E35R18w6kZ/pm5llxKFvZpYRT++U4KkeM+sUDv058gOAmbUzh/48+AHAzNqNQ3+B+AHAzNpB5aEvaRPwEWAR8FcR8XDVx1AlPxiY2e2k0tCXtAj4KPDzwBjwVUlHIuJslcfxSisGfZn6TPxAYWYLqepn+uuB0Yj4FoCkQ8BmGhdStybm+kCx695Jfn0OY/ygYpaXqkN/OfCdwv0x4GembiRpABhIdycknW/x37sH+M8Wx7al35ljz/qTV/BgqpXduSbPnsF9l/WjzYpVh76a1GJaIWIQGJz3Pyadjoje+e6nneTYM+TZd449g/ue736q/kTuGLCicL8buFzxMZiZZavq0P8q0CNplaRXA/3AkYqPwcwsW5VO70TEpKTfBr5I4y2bH4+IM6/gPznvKaI2lGPPkGffOfYM7nteFDFtSt3MzDqUv2XTzCwjDn0zs4x0ROhL2iTpvKRRSbubrJekfWn91yW99VYc50Iq0XOfpP+R9HT6+aNbcZwLSdLHJV2V9MxN1nfceYZSfXfiuV4h6cuSzkk6I+mDTbbpqPNdsuf5n+uIaOsfGi8IfxP4MeDVwNeA1VO2eTdwlMbnBDYAT9zq466g5z7gs7f6WBe473cAbwWeucn6jjrPc+i7E8/1MuCtafkNwL9m8P+6TM/zPted8Ez/pa92iIj/A178aoeizcCBaDgJvFHSsqoPdAGV6bnjRMQ/Av81wyaddp6BUn13nIgYj4in0vI14ByNT/QXddT5LtnzvHVC6Df7aoepv6gy27STsv28TdLXJB2VtKaaQ7ulOu08z0XHnmtJK4GfBp6Ysqpjz/cMPcM8z3UnfJ9+ma92KPX1D22kTD9PAT8aEROS3g18Buh5pQ/sFuu081xWx55rSa8H/h74UER8d+rqJkPa/nzP0vO8z3UnPNMv89UOnfb1D7P2ExHfjYiJtPx54E5J91R3iLdEp53nUjr1XEu6k0b4HYyITzXZpOPO92w9L8S57oTQL/PVDkeArenV/g3A/0TEeNUHuoBm7VnSD0tSWl5P41w/V/mRVqvTznMpnXiuUz8fA85FxJ/dZLOOOt9lel6Ic9320ztxk692kPRbaf1fAJ+n8Ur/KPB94P236ngXQsmefxnYIWkS+F+gP9LL/+1K0idovHvhHkljwIeBO6Ezz/OLSvTdcecaeDvwa8CIpKdT7Q+BH4GOPd9lep73ufbXMJiZZaQTpnfMzKwkh76ZWUYc+mZmGXHom5llxKFvZnYbme0L9pps/yuSzqYvafvbWbf3u3fMzG4fkt4BTND4XqG1s2zbAwwD74qI/5a0NCKuzjTGz/TNzG4jzb5gT9KPS/qCpCcl/ZOkn0yrfgP4aET8dxo7Y+CDQ9/MrB0MAh+IiHXA7wH7U/3NwJsl/bOkk5I2zbajtv9ErplZJ0tfwPazwN+lb2AAWJxu76DxhWt9NL576J8krY2I52+2P4e+mdnt7VXA8xFxf5N1Y8DJiLgBXJB0nsaDwFdn2pmZmd2m0tcrX5C0BV66TOR9afVngHem+j00pnu+NdP+HPpmZreR9AV7J4C3SBqTtB14CNgu6WvAGX5wpbwvAs9JOgt8Gfj9iJjxWzf9lk0zs4z4mb6ZWUYc+mZmGXHom5llxKFvZpYRh76ZWUYc+mZmGXHom5ll5P8BQlJJViIXFGYAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----------------------------------------\n", + "Total sequences: 15137\n", + "----------------------------------------\n", + "Top 5 longest sequences:\n", + "id length\n", + "734 2473537\n", + "7904 2304997\n", + "15761 2303293\n", + "5357 2298478\n", + "5466 2172911\n", + "----------------------------------------\n", + "Top 5 shortest sequences:\n", + "id length\n", + "15229 76\n", + "12750 117\n", + "13021 128\n", + "13309 135\n", + "14763 148\n", + "----------------------------------------\n", + "Average length: 64199.51278324635\n", + "----------------------------------------\n", + "Top 5 most common lengths:\n", + "length count\n", + "930 15\n", + "939 13\n", + "945 12\n", + "1952 11\n", + "597 9\n", + "----------------------------------------\n", + "Top 5 least common lengths:\n", + "length count\n", + "4873 1\n", + "20360 1\n", + "4419 1\n", + "1337027 1\n", + "2059620 1\n" + ] + } + ], + "source": [ + "# add a column that contains length of the sequence\n", + "positive[\"length\"] = positive[\"sequence\"].apply(len)\n", + "negative[\"length\"] = negative[\"sequence\"].apply(len)\n", + "\n", + "stats(positive, \"positive_nada\")\n", + "print(\"*\" * 100)\n", + "stats(negative, \"negative_nada\")" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAD4CAYAAAAD6PrjAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAWZklEQVR4nO3dfZBdd33f8fendjDgBT/UsKPKbmQygtZYaYK2LikNs1uHYB6K3Da08jip3LqjSWsoaWEau8yU/OOpEwodOm6aUWOPRU28OAZqlwwpjsrG05kYxzIG+QHHAquOZGGVJ8ESairy7R/37OF6vau92vuw99rv14zmnvM7v3vOZ4+v97u/83RTVUiSBPAXNjqAJGl8WBQkSS2LgiSpZVGQJLUsCpKk1ukbHQDgvPPOqy1btvTc/3vf+x5nnnnm8AINiblHaxJzT2JmMPeoLeXev3//16vqFQNdeVVt+L/t27fXqfjc5z53Sv3HhblHaxJzT2LmKnOP2lJu4P4a8O9jDx9JkloWBUlSy6IgSWpZFCRJLYuCJKllUZAktSwKkqSWRUGS1LIoSJJaY/GYi2HZcu3vtdOHbnjbBiaRpMngSEGS1LIoSJJaFgVJUsuiIElqrVkUktyc5FiSh5a1vzvJY0keTvIbXe3XJTnYLHvzMEJLkoajl6uPbgFuBD661JBkDtgB/GRVPZPklU37RcBO4LXAXwL+IMmrq+qHgw4uSRq8NUcKVXUP8M1lzf8MuKGqnmn6HGvadwDzVfVMVT0BHAQuGWBeSdIQpfPlPWt0SrYAn66qi5v5B4E7gcuA/wu8r6r+OMmNwL1VdWvT7ybgM1V1xwrr3A3sBpient4+Pz/fc+jFxUWmpqbW7HfgyPF2etvms3pe/7D0mnvcmHt0JjEzmHvUlnLPzc3tr6qZQa57vTevnQ6cA7we+OvA7UleBWSFvitWnaraA+wBmJmZqdnZ2Z43vrCwQC/9r+q+ee3K3tc/LL3mHjfmHp1JzAzmHrVh5l7v1UeHgU82Xxd6H/DnwHlN+wVd/c4HnuovoiRpVNY7UvhvwN8GFpK8GngR8HXgLuB3knyYzonmrcB9A8g5UD7+QpJWtmZRSHIbMAucl+Qw8AHgZuDm5jLVHwC7qnNy4uEktwOPACeAa7zySJImx5pFoaquWGXRL67S/3rg+n5CSZI2hnc0S5JaFgVJUut5/X0K3bpPLkuSVuZIQZLUsihIkloWBUlSy6IgSWpZFCRJLYuCJKllUZAktSwKkqSWRUGS1LIoSJJaFgVJUsuiIElqPe8eiOeD7yRp/dYcKSS5Ocmx5lvWli97X5JKcl5X23VJDiZ5LMmbBx1YkjQ8vYwUbgFuBD7a3ZjkAuBNwJNdbRcBO4HX0vmO5j9I8upx/kpOv69Zkn5kzZFCVd0DfHOFRf8B+NdAdbXtAOar6pmqegI4CFwyiKCSpOFLVa3dKdkCfLqqLm7m3wFcWlXvSXIImKmqrye5Ebi3qm5t+t0EfKaq7lhhnbuB3QDT09Pb5+fnew69uLjI1NTUissOHDne83qW27b5rBXX093ej5PlHmfmHp1JzAzmHrWl3HNzc/uramaQ6z7lE81JXgq8H/j5lRav0LZi1amqPcAegJmZmZqdne05w8LCAqv1v6qPE82HrvzROrvX093ej5PlHmfmHp1JzAzmHrVh5l7P1Uc/AVwIfDEJwPnAA0kuAQ4DF3T1PR94qt+QkqTROOX7FKrqQFW9sqq2VNUWOoXgdVX1NeAuYGeSM5JcCGwF7htoYknS0PRySeptwB8Br0lyOMnVq/WtqoeB24FHgN8HrhnnK48kSc+25uGjqrpijeVbls1fD1zfXyxJ0kbwMReSpJZFQZLUsihIkloWBUlSy6IgSWpZFCRJLYuCJKllUZAktSwKkqTW8+7rOPvhV3lKeqFzpCBJalkUJEkti4IkqWVRkCS1LAqSpJZFQZLU6uWb125OcizJQ11tH0zy5SRfSvKpJGd3LbsuycEkjyV585ByS5KGoJeRwi3AZcva7gYurqqfBP4EuA4gyUXATuC1zXt+M8lpA0srSRqqNYtCVd0DfHNZ22er6kQzey9wfjO9A5ivqmeq6gngIHDJAPNKkoYoVbV2p2QL8OmquniFZf8d+HhV3ZrkRuDeqrq1WXYT8JmqumOF9+0GdgNMT09vn5+f7zn04uIiU1NTKy47cOR4z+vp1bbNZw1kPSfLPc7MPTqTmBnMPWpLuefm5vZX1cwg193XYy6SvB84AXxsqWmFbitWnaraA+wBmJmZqdnZ2Z63u7CwwGr9rxrCoyoOXbnytk7VyXKPM3OPziRmBnOP2jBzr7soJNkFvB24tH403DgMXNDV7XzgqfXHkySN0rouSU1yGfCrwDuq6s+6Ft0F7ExyRpILga3Aff3HlCSNwpojhSS3AbPAeUkOAx+gc7XRGcDdSaBzHuGXq+rhJLcDj9A5rHRNVf1wWOElSYO1ZlGoqitWaL7pJP2vB67vJ5QkaWN4R7MkqWVRkCS1LAqSpJZFQZLUsihIkloWBUlSy6IgSWpZFCRJLYuCJKllUZAktSwKkqRWX9+n8EKxpes7Gg7d8LYNTCJJw+VIQZLUsihIkloWBUlSy6IgSWqtWRSS3JzkWJKHutrOTXJ3kseb13O6ll2X5GCSx5K8eVjBJUmD18tI4RbgsmVt1wL7qmorsK+ZJ8lFwE7gtc17fjPJaQNLK0kaqjWLQlXdA3xzWfMOYG8zvRe4vKt9vqqeqaongIPAJYOJKkkatlTV2p2SLcCnq+riZv7bVXV21/JvVdU5SW4E7q2qW5v2m4DPVNUdK6xzN7AbYHp6evv8/HzPoRcXF5mamlpx2YEjx3tez3ps23zWut97stzjzNyjM4mZwdyjtpR7bm5uf1XNDHLdg755LSu0rVh1qmoPsAdgZmamZmdne97IwsICq/W/qutGs2E4dOXK2+3FyXKPM3OPziRmBnOP2jBzr/fqo6eTbAJoXo817YeBC7r6nQ88tf54kqRRWm9RuAvY1UzvAu7sat+Z5IwkFwJbgfv6iyhJGpU1Dx8luQ2YBc5Lchj4AHADcHuSq4EngXcCVNXDSW4HHgFOANdU1Q+HlF2SNGBrFoWqumKVRZeu0v964Pp+QkmSNoZPST1FPjFV0vOZj7mQJLUsCpKklkVBktSyKEiSWhYFSVLLoiBJalkUJEkti4IkqWVRkCS1LAqSpJZFQZLUsihIkloWBUlSy6IgSWr1VRSS/MskDyd5KMltSV6c5Nwkdyd5vHk9Z1BhJUnDte6ikGQz8C+Amaq6GDgN2AlcC+yrqq3AvmZekjQB+j18dDrwkiSnAy8FngJ2AHub5XuBy/vchiRpRNZdFKrqCPDv6XxH81HgeFV9FpiuqqNNn6PAKwcRVJI0fKmq9b2xc67gE8A/BL4N/C5wB3BjVZ3d1e9bVfWc8wpJdgO7Aaanp7fPz8/3vO3FxUWmpqZWXHbgyPGe19OvbZvPOqX+J8s9zsw9OpOYGcw9aku55+bm9lfVzCDX3c93NP8c8ERV/R+AJJ8E/ibwdJJNVXU0ySbg2Epvrqo9wB6AmZmZmp2d7XnDCwsLrNb/qq7vUB62Q1eunGE1J8s9zsw9OpOYGcw9asPM3c85hSeB1yd5aZIAlwKPAncBu5o+u4A7+4soSRqVdY8UqurzSe4AHgBOAF+g85f/FHB7kqvpFI53DiKoJGn4+jl8RFV9APjAsuZn6IwaJEkTxjuaJUkti4IkqWVRkCS1LAqSpJZFQZLU6uvqoxe6LV03yh264W0bmESSBsORgiSp5UhhCBxBSJpUjhQkSS2LgiSpZVGQJLWeF+cUtozwcdmS9HzmSEGS1LIoSJJaFgVJUsuiIElq9VUUkpyd5I4kX07yaJKfSXJukruTPN68njOosJKk4ep3pPAR4Per6q8Af43OdzRfC+yrqq3AvmZekjQB1l0UkrwceCNwE0BV/aCqvg3sAPY23fYCl/cXUZI0Kqmq9b0x+SlgD/AInVHCfuA9wJGqOrur37eq6jmHkJLsBnYDTE9Pb5+fn+9524uLi0xNTbXzB44cX9fPMEjbNp/VTnfn6W5fnntSmHt0JjEzmHvUlnLPzc3tr6qZQa67n6IwA9wLvKGqPp/kI8B3gHf3UhS6zczM1P3339/zthcWFpidnW3nx+Hmte4H3632QLzluSeFuUdnEjODuUdtKXeSgReFfu5oPgwcrqrPN/N30Dl/8HSSTVV1NMkm4Fi/ISfBOBQmSerXuotCVX0tyZ8meU1VPQZcSudQ0iPALuCG5vXOgSSdUN3F4pbLztzAJJK0tn6fffRu4GNJXgR8FfjHdE5e357kauBJ4J19bkOSNCJ9FYWqehBY6XjWpf2sV5K0MbyjWZLUsihIkloWBUlSy6IgSWpZFCRJLYuCJKllUZAktfq9eU3rtNrzkSRpIzlSkCS1HCmM0IEjx7nKB+dJGmOOFCRJLYuCJKllUZAktSwKkqSWRUGS1LIoSJJafReFJKcl+UKSTzfz5ya5O8njzes5/ceUJI3CIEYK7wEe7Zq/FthXVVuBfc28JGkC9FUUkpwPvA347a7mHcDeZnovcHk/25AkjU6qav1vTu4A/h3wMuB9VfX2JN+uqrO7+nyrqp5zCCnJbmA3wPT09Pb5+fmet7u4uMjU1FQ7f+DI8XX/DKM0/RJ4+vvPbd+2+azRhzkFy/f3pJjE3JOYGcw9aku55+bm9lfVzCDXve7HXCR5O3CsqvYnmT3V91fVHmAPwMzMTM3O9r6KhYUFuvtPyqMj3rvtBB868NxdfujK2dGHOQXL9/ekmMTck5gZzD1qw8zdz7OP3gC8I8lbgRcDL09yK/B0kk1VdTTJJuDYIIJKkoZv3UWhqq4DrgNoRgrvq6pfTPJBYBdwQ/N6Z/8xn9+2rDLS8ZHakkZtGPcp3AC8KcnjwJuaeUnSBBjIo7OragFYaKa/AVw6iPVKkkbLO5olSS2LgiSpZVGQJLUsCpKklkVBktSyKEiSWhYFSVLLoiBJalkUJEkti4IkqTWQx1xoOLoflOfD8SSNgiMFSVLLoiBJalkUJEktzylMiNW+iGc5zz1I6ocjBUlSa91FIckFST6X5NEkDyd5T9N+bpK7kzzevJ4zuLiSpGHqZ6RwAnhvVf1V4PXANUkuAq4F9lXVVmBfMy9JmgDrLgpVdbSqHmimvws8CmwGdgB7m257gcv7zChJGpFUVf8rSbYA9wAXA09W1dldy75VVc85hJRkN7AbYHp6evv8/HzP21tcXGRqaqqdP3Dk+Hqjj9T0S+Dp7w93G9s2nzXwdS7f35NiEnNPYmYw96gt5Z6bm9tfVTODXHffRSHJFPCHwPVV9ckk3+6lKHSbmZmp+++/v+dtLiwsMDs72873emXORnvvthN86MBwL/gaxtVHy/f3pJjE3JOYGcw9aku5kwy8KPR19VGSHwM+AXysqj7ZND+dZFOzfBNwrL+IkqRR6efqowA3AY9W1Ye7Ft0F7GqmdwF3rj+eJGmU+jmW8Qbgl4ADSR5s2v4NcANwe5KrgSeBd/aVUJI0MusuClX1v4CssvjS9a5X/fHJqpL64R3NkqSWRUGS1LIoSJJaPiX1eczzC5JOlUXhBcICIakXFoUXOIuFpG6eU5AktSwKkqSWRUGS1PKcgtZ04MhxrmrOPax23mEY5yY83yGNnkXhBWhSHjW+GouFNDwWBW0If7FL48lzCpKkliMFDdypjgIm/XCW9HxiUdC69fLLfNiHiVZb/5Zrf4/3bjux4glyD11Jq7MoqLXaL/n3blu7z7CdagFaz/slDbEoJLkM+AhwGvDbVXXDsLalyTCOv5hPNtJYST+Hw0Y5WnE0pPUayonmJKcB/wl4C3ARcEWSi4axLUnS4AxrpHAJcLCqvgqQZB7YATwypO1JfetnJDOoUVD3jYLdhvXXfi8jpV5uWLzlsjNPaVvdTnX0NYz+o1rXMNc5KKmqwa80+QXgsqr6p838LwF/o6re1dVnN7C7mX0N8NgpbOI84OsDijtK5h6tScw9iZnB3KO2lPvHq+oVg1zxsEYKWaHtWdWnqvYAe9a18uT+qppZz3s3krlHaxJzT2JmMPeoDTP3sG5eOwxc0DV/PvDUkLYlSRqQYRWFPwa2JrkwyYuAncBdQ9qWJGlAhnL4qKpOJHkX8D/oXJJ6c1U9PMBNrOuw0xgw92hNYu5JzAzmHrWh5R7KiWZJ0mTygXiSpJZFQZLUmqiikOSyJI8lOZjk2jHIc0GSzyV5NMnDSd7TtP9akiNJHmz+vbXrPdc1+R9L8uau9u1JDjTL/mOSlS7rHWT2Q832Hkxyf9N2bpK7kzzevJ4zTrmTvKZrnz6Y5DtJfmUc93eSm5McS/JQV9vA9m+SM5J8vGn/fJItQ8r8wSRfTvKlJJ9KcnbTviXJ97v2+W9tROaT5B7YZ2LEuT/elflQkgeb9tHt76qaiH90Tlh/BXgV8CLgi8BFG5xpE/C6ZvplwJ/QeazHrwHvW6H/RU3uM4ALm5/ntGbZfcDP0LnH4zPAW4ac/RBw3rK23wCubaavBX593HIv+zx8DfjxcdzfwBuB1wEPDWP/Av8c+K1meifw8SFl/nng9Gb617syb+nut2w9I8t8ktwD+0yMMvey5R8C/u2o9/ckjRTaR2dU1Q+ApUdnbJiqOlpVDzTT3wUeBTaf5C07gPmqeqaqngAOApck2QS8vKr+qDr/BT8KXD7c9Kvm29tM7+3KMI65LwW+UlX/+yR9Nix3Vd0DfHOFPIPav93rugO4tN/RzkqZq+qzVXWimb2Xzj1Hqxp15tVyn8RY7Ou1cjfr/wfAbSdbxzByT1JR2Az8adf8YU7+C3ikmqHZTwOfb5re1Qy5b+46TLDaz7C5mV7ePkwFfDbJ/nQeOQIwXVVHoVPwgFc27eOUe8lOnv0/zLjvbxjs/m3f0/zSPg78xaEl7/gndP4SXXJhki8k+cMkP9uVa1wyD+ozsRH7+meBp6vq8a62kezvSSoKaz46Y6MkmQI+AfxKVX0H+M/ATwA/BRylMwyE1X+GjfjZ3lBVr6PzJNtrkrzxJH3HKTfp3BD5DuB3m6ZJ2N8ns56cI/0ZkrwfOAF8rGk6Cvzlqvpp4F8Bv5Pk5WvkGmXmQX4mNuLzcgXP/qNnZPt7korCWD46I8mP0SkIH6uqTwJU1dNV9cOq+nPgv9A59AWr/wyHefawfOg/W1U91bweAz7VZHy6GY4uDUuPjVvuxluAB6rqaZiM/d0Y5P5t35PkdOAsej+EckqS7ALeDlzZHKKgOfzyjWZ6P51j868el8wD/kyMLHfXNv4e8PGltlHu70kqCmP36Izm+NxNwKNV9eGu9k1d3f4usHR1wV3AzuaqgAuBrcB9zaGE7yZ5fbPOfwTcOcTcZyZ52dI0nZOJDzX5djXddnVlGIvcXZ71V9S47+8ug9y/3ev6BeB/Lv3CHqR0vizrV4F3VNWfdbW/Ip3vTSHJq5rMXx2HzE2mQX4mRpa78XPAl6uqPSw00v19KmfLN/of8FY6V/h8BXj/GOT5W3SGY18CHmz+vRX4r8CBpv0uYFPXe97f5H+MritegBk6H9yvADfS3G0+pNyvonMFxheBh5f2JZ3jjfuAx5vXc8cpd7O9lwLfAM7qahu7/U2naB0F/h+dv9iuHuT+BV5M5/DZQTpXn7xqSJkP0jkuvfT5Xrqa5e83n50vAg8Af2cjMp8k98A+E6PM3bTfAvzysr4j298+5kKS1Jqkw0eSpCGzKEiSWhYFSVLLoiBJalkUJEkti4IkqWVRkCS1/j9Wd8qbXEl8dAAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----------------------------------------\n", + "Total sequences: 2010\n", + "----------------------------------------\n", + "Top 5 longest sequences:\n", + "id length\n", + "676 16791\n", + "248 15615\n", + "204 14574\n", + "491 13941\n", + "31 13167\n", + "----------------------------------------\n", + "Top 5 shortest sequences:\n", + "id length\n", + "1112 192\n", + "1427 195\n", + "1500 204\n", + "1611 204\n", + "1346 210\n", + "----------------------------------------\n", + "Average length: 1903.5980099502488\n", + "----------------------------------------\n", + "Top 5 most common lengths:\n", + "length count\n", + "1083 9\n", + "1041 8\n", + "1314 7\n", + "618 7\n", + "1371 7\n", + "----------------------------------------\n", + "Top 5 least common lengths:\n", + "length count\n", + "2496 1\n", + "2763 1\n", + "3162 1\n", + "2982 1\n", + "3324 1\n", + "****************************************************************************************************\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXsAAAD4CAYAAAANbUbJAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAUYklEQVR4nO3dX4zl5X3f8fcni41d1oal2KMNi7pYXTlZgozNiJq6imZNajbGynIRpLXsdJ0S7UVJ5LRU7W5zUeViVVoVq6kIrVfB9SrYnmw3dllhERetM4qQCoRtnPB3y8ZL8QJhEwdIhgsS6LcX81t8WGaYM2fOmZkzz/slofM7z3l+5zzfmeVznvP8fuc3qSokSevbj632ACRJo2fYS1IDDHtJaoBhL0kNMOwlqQHnrfYAAC655JLaunXrQPu++uqrXHDBBcMd0BpnzW2w5jYsp+bjx4//RVV9oJ++ayLst27dyiOPPDLQvjMzM0xNTQ13QGucNbfBmtuwnJqT/N9++7qMI0kNMOwlqQF9hX2Si5IcSfJUkieTXJvk4iT3J3m6u93U039/kpNJTiS5fnTDlyT1o9+Z/W8Av1dVPwF8BHgS2Accq6ptwLHuPkm2A7uBK4CdwJ1JNgx74JKk/i0a9kneD/w0cBdAVf1NVb0M7AIOdd0OATd227uA6ap6rapOASeBa4Y7bEnSUmSxC6EluQo4CDzB3Kz+OPBF4Lmquqin30tVtSnJHcCDVXV3134XcF9VHTnnefcCewEmJiaunp6eHqiA2dlZNm7cONC+48qa22DNbVhOzTt27DheVZP99O3n1MvzgI8Bv1JVDyX5DbolmwVknra3vaNU1UHm3kSYnJysQU898lStNlhzG6x5dPpZsz8NnK6qh7r7R5gL/xeTbAbobs/09L+sZ/8twPPDGa4kaRCLhn1V/RnwgyQf7pquY25J5yiwp2vbA9zTbR8Fdic5P8nlwDbg4aGOWpK0JP1+g/ZXgK8leTfwfeAXmXujOJzkZuBZ4CaAqno8yWHm3hBeB26pqjeGPvIFbN337Te3n7nthpV6WUla0/oK+6r6HjDfQYDrFuh/ADgw+LAkScPkN2glqQGGvSQ1wLCXpAYY9pLUAMNekhpg2EtSAwx7SWqAYS9JDTDsJakBhr0kNcCwl6QGGPaS1ADDXpIaYNhLUgP6vZ79mtZ7DXtJ0ts5s5ekBhj2ktQAw16SGmDYS1IDDHtJaoBhL0kNMOwlqQGGvSQ1wLCXpAYY9pLUAMNekhrQV9gneSbJo0m+l+SRru3iJPcnebq73dTTf3+Sk0lOJLl+VIOXJPVnKTP7HVV1VVVNdvf3AceqahtwrLtPku3AbuAKYCdwZ5INQxyzJGmJlrOMsws41G0fAm7saZ+uqteq6hRwErhmGa8jSVqmVNXinZJTwEtAAV+uqoNJXq6qi3r6vFRVm5LcATxYVXd37XcB91XVkXOecy+wF2BiYuLq6enpgQqYnZ3l1CtvLNrvyksvHOj516LZ2Vk2bty42sNYUdbcBmtemh07dhzvWW15R/1ez/4TVfV8kg8C9yd56h36Zp62t72jVNVB4CDA5ORkTU1N9TmUt5qZmeH2B15dtN8znxvs+deimZkZBv15jStrboM1j05fyzhV9Xx3ewb4FnPLMi8m2QzQ3Z7pup8GLuvZfQvw/LAGLElaukXDPskFSd53dhv4FPAYcBTY03XbA9zTbR8Fdic5P8nlwDbg4WEPXJLUv36WcSaAbyU52//rVfV7Sf4QOJzkZuBZ4CaAqno8yWHgCeB14JaqWnxRXZI0MouGfVV9H/jIPO0/BK5bYJ8DwIFlj06SNBR+g1aSGmDYS1IDDHtJaoBhL0kNMOwlqQGGvSQ1wLCXpAYY9pLUAMNekhpg2EtSA/q9xPHY27rv229uP3PbDas4Eklaec2E/UJ8E5DUApdxJKkBhr0kNaDJZZzepRtJaoEze0lqgGEvSQ0w7CWpAYa9JDXAsJekBhj2ktQAw16SGmDYS1IDDHtJaoBhL0kNMOwlqQF9h32SDUn+KMm93f2Lk9yf5OnudlNP3/1JTiY5keT6UQxcktS/pczsvwg82XN/H3CsqrYBx7r7JNkO7AauAHYCdybZMJzhSpIG0VfYJ9kC3AD8Vk/zLuBQt30IuLGnfbqqXquqU8BJ4JqhjFaSNJBU1eKdkiPAvwPeB/zLqvpMkper6qKePi9V1aYkdwAPVtXdXftdwH1VdeSc59wL7AWYmJi4enp6eqACZmdnOfXKGwPte64rL71wKM8zarOzs2zcuHG1h7GirLkN1rw0O3bsOF5Vk/30XfR69kk+A5ypquNJpvp4zszT9rZ3lKo6CBwEmJycrKmpfp767WZmZrj9gVcH2vdcz3xusDGstJmZGQb9eY0ra26DNY9OP3+85BPAzyX5NPAe4P1J7gZeTLK5ql5Ishk40/U/DVzWs/8W4PlhDlqStDSLrtlX1f6q2lJVW5k78Prdqvo8cBTY03XbA9zTbR8Fdic5P8nlwDbg4aGPXJLUt+X8WcLbgMNJbgaeBW4CqKrHkxwGngBeB26pquEsqkuSBrKksK+qGWCm2/4hcN0C/Q4AB5Y5NknSkPgNWklqwHKWcdadrfu+/eb2M7fdsIojkaThcmYvSQ0w7CWpAS7jLGAUSzouE0laLc7sJakBhr0kNcCwl6QGGPaS1ADDXpIaYNhLUgMMe0lqgGEvSQ0w7CWpAX6Ddhn8RqykceHMXpIa4Mx+iXpn85I0LpzZS1IDDHtJaoDLOH1Y6tKNB24lrTXO7CWpAYa9JDXAsJekBhj2ktQAD9AOieffS1rLnNlLUgMWDfsk70nycJI/TvJ4kl/v2i9Ocn+Sp7vbTT377E9yMsmJJNePsgBJ0uL6mdm/Bnyyqj4CXAXsTPJxYB9wrKq2Ace6+yTZDuwGrgB2Ancm2TCCsUuS+rRo2Nec2e7uu7r/CtgFHOraDwE3dtu7gOmqeq2qTgEngWuGOWhJ0tKkqhbvNDczPw78feA3q+pfJ3m5qi7q6fNSVW1KcgfwYFXd3bXfBdxXVUfOec69wF6AiYmJq6enpwcqYHZ2llOvvDHQvivhyksvfHP70edembd9qWZnZ9m4ceOyxjVurLkN1rw0O3bsOF5Vk/307etsnKp6A7gqyUXAt5L81Dt0z3xPMc9zHgQOAkxOTtbU1FQ/Q3mbmZkZbn/g1YH2XQnPfG7qze0v9F5Goad9qWZmZhj05zWurLkN1jw6Szobp6peBmaYW4t/MclmgO72TNftNHBZz25bgOeXO1BJ0uD6ORvnA92MniTvBX4GeAo4Cuzpuu0B7um2jwK7k5yf5HJgG/DwkMctSVqCfpZxNgOHunX7HwMOV9W9Sf4XcDjJzcCzwE0AVfV4ksPAE8DrwC3dMlCT/LKVpLVg0bCvqj8BPjpP+w+B6xbY5wBwYNmjkyQNhd+glaQGGPaS1ADDXpIaYNhLUgMMe0lqgGEvSQ0w7CWpAYa9JDXAsJekBvg3aNeA3ksqPHPbDas4EknrlTN7SWqAYS9JDTDsJakBhr0kNcADtKvE69xLWknO7CWpAc7s1xhPw5Q0Cs7sJakBhr0kNcCwl6QGuGa/hrl+L2lYnNlLUgMMe0lqgGEvSQ0w7CWpAYa9JDVg0bBPclmS30/yZJLHk3yxa784yf1Jnu5uN/Xssz/JySQnklw/ygIkSYvrZ2b/OnBrVf0k8HHgliTbgX3AsaraBhzr7tM9thu4AtgJ3JlkwygGL0nqz6JhX1UvVNX/7rb/GngSuBTYBRzquh0Cbuy2dwHTVfVaVZ0CTgLXDHnckqQlSFX13znZCvwB8FPAs1V1Uc9jL1XVpiR3AA9W1d1d+13AfVV15Jzn2gvsBZiYmLh6enp6oAJmZ2c59cobA+07Tq689MI3t2dnZ9m4ceMqjmblWXMbrHlpduzYcbyqJvvp2/c3aJNsBH4X+NWq+qskC3adp+1t7yhVdRA4CDA5OVlTU1P9DuUtZmZmuP2BVwfad5w887mpN7dnZmYY9Oc1rqy5DdY8On2djZPkXcwF/deq6ptd84tJNnePbwbOdO2ngct6dt8CPD+c4UqSBtHP2TgB7gKerKov9Tx0FNjTbe8B7ulp353k/CSXA9uAh4c3ZEnSUvWzjPMJ4BeAR5N8r2v7N8BtwOEkNwPPAjcBVNXjSQ4DTzB3Js8tVbX+F9UlaQ1bNOyr6gHmX4cHuG6BfQ4AB5YxLp2j9wqYX915wSqORNI48hu0ktQAw16SGmDYS1IDDHtJaoB/lnAd888aSjrLmb0kNcCZ/TrTO5uXpLOc2Y+hR597ha37vm2wS+qbYS9JDXAZZ8w5u5fUD2f2ktQAZ/aN8/RMqQ3O7CWpAYa9JDXAsJekBrhm3yDP4JHaY9g3op+AX6iPB26l8ecyjiQ1wLCXpAa4jKNFeS6+NP6c2UtSAwx7SWqAyzgamMs70vhwZi9JDXBmryXxC1nSeDLsNXQu70hrz6LLOEm+kuRMksd62i5Ocn+Sp7vbTT2P7U9yMsmJJNePauAaD2f/fKKfCKTV1c+a/VeBnee07QOOVdU24Fh3nyTbgd3AFd0+dybZMLTRaqwZ/NLqWTTsq+oPgL88p3kXcKjbPgTc2NM+XVWvVdUp4CRwzXCGKkka1KBn40xU1QsA3e0Hu/ZLgR/09DvdtUmSVtGwD9Bmnraat2OyF9gLMDExwczMzEAvODs7y61XvjHQvuNq4r1w65Wvr/Yw3qL399fP2Jb6+56dnR3438i4suY2rFTNg4b9i0k2V9ULSTYDZ7r208BlPf22AM/P9wRVdRA4CDA5OVlTU1MDDWRmZobbH3h1oH3H1a1Xvs7tj66xE6ke7f0d9DG2nv79nLEzMzPDoP9GYDzPEFpuzePImkdn0MQ4CuwBbutu7+lp/3qSLwE/DmwDHl7uIKXVNI5vFNK5Fg37JN8ApoBLkpwG/i1zIX84yc3As8BNAFX1eJLDwBPA68AtVdXWGoskrUGLhn1VfXaBh65boP8B4MByBiUNYpgzcE8P1XrjtXEkqQFr7CifWrfQjPqrOy8YyvOsZx5b0Dsx7LXq+gnmR597hS90/UYVZC2+Qagdhr3G2loJaGfVWusMe42dYQb8WnmzkEbNsFdzDHi1yLCXBuSbhsaJYS8tQT8B7/q91iLDXlplvjloJfilKklqgDN7aYUsdQbfz3cLPG6gfhn20ggtFMaGtFaaYS+tQwu9mQxyTMBjCuuDYS+tIb3BeuuVqzgQrTuGvaSRWehTwVI/LfjpYvkMe2nMjOpyEQuF8TjyzeHtDHtJwPK+MDbubw4tMOylMTCsMF3NUB5FDc7a+2fYS1pVveG91D9SM4oxwPDeRNbSG5NhL2nN6P0i2VKtxLGMcWbYS1rXVmLpahwukGfYSxrIWjgouxbGMC4Me0nq02rPzpfDsJekd7Berm/kJY4lqQHO7CVpha3G6abO7CWpASML+yQ7k5xIcjLJvlG9jiRpcSMJ+yQbgN8EfhbYDnw2yfZRvJYkaXGjmtlfA5ysqu9X1d8A08CuEb2WJGkRqarhP2ny88DOqvql7v4vAP+gqn65p89eYG9398PAiQFf7hLgL5Yx3HFkzW2w5jYsp+a/V1Uf6KfjqM7GyTxtb3lXqaqDwMFlv1DySFVNLvd5xok1t8Ga27BSNY9qGec0cFnP/S3A8yN6LUnSIkYV9n8IbEtyeZJ3A7uBoyN6LUnSIkayjFNVryf5ZeA7wAbgK1X1+CheiyEsBY0ha26DNbdhRWoeyQFaSdLa4jdoJakBhr0kNWCsw36cL8mQ5LIkv5/kySSPJ/li135xkvuTPN3dburZZ39X64kk1/e0X53k0e6x/5wkXfv5SX6na38oydYVL3QeSTYk+aMk93b313XNSS5KciTJU93v+9oGav7n3b/rx5J8I8l71lvNSb6S5EySx3raVqTGJHu613g6yZ6+BlxVY/kfcwd+/xT4EPBu4I+B7as9riWMfzPwsW77fcD/Ye7SEv8B2Ne17wP+fbe9vavxfODyrvYN3WMPA9cy9/2G+4Cf7dr/GfBfu+3dwO+sdt3dWP4F8HXg3u7+uq4ZOAT8Urf9buCi9VwzcClwCnhvd/8w8IX1VjPw08DHgMd62kZeI3Ax8P3udlO3vWnR8a72/wjL+EFfC3yn5/5+YP9qj2sZ9dwD/GPmvkm8uWvbDJyYrz7mznS6tuvzVE/7Z4Ev9/bpts9j7lt6WeU6twDHgE/yo7BftzUD72cu+HJO+3qu+VLgB10YnQfcC3xqPdYMbOWtYT/yGnv7dI99GfjsYmMd52Wcs/+gzjrdtY2d7uPZR4GHgImqegGgu/1g122hei/tts9tf8s+VfU68Arwd0dSRP/+E/CvgP/X07aea/4Q8OfAf+uWrn4ryQWs45qr6jngPwLPAi8Ar1TV/2Qd19xjJWocKPvGOewXvSTDOEiyEfhd4Fer6q/eqes8bfUO7e+0z6pI8hngTFUd73eXedrGqmbmZmQfA/5LVX0UeJW5j/cLGfuau3XqXcwtV/w4cEGSz7/TLvO0jVXNfRhmjQPVPs5hP/aXZEjyLuaC/mtV9c2u+cUkm7vHNwNnuvaF6j3dbZ/b/pZ9kpwHXAj85fAr6dsngJ9L8gxzV0L9ZJK7Wd81nwZOV9VD3f0jzIX/eq75Z4BTVfXnVfW3wDeBf8j6rvmslahxoOwb57Af60sydEfc7wKerKov9Tx0FDh7dH0Pc2v5Z9t3d0foLwe2AQ93HxX/OsnHu+f8J+fsc/a5fh74bnWLfKuhqvZX1Zaq2src7+u7VfV51nfNfwb8IMmHu6brgCdYxzUzt3zz8SR/pxvrdcCTrO+az1qJGr8DfCrJpu5T1Ke6tne20gc0hnxw5NPMncXyp8CvrfZ4ljj2f8TcR68/Ab7X/fdp5tbkjgFPd7cX9+zza12tJ+iO2Hftk8Bj3WN38KNvRr8H+O/ASeaO+H9otevuGfMUPzpAu65rBq4CHul+1/+DuTMo1nvNvw481Y33t5k7C2Vd1Qx8g7ljEn/L3Gz75pWqEfinXftJ4Bf7Ga+XS5CkBozzMo4kqU+GvSQ1wLCXpAYY9pLUAMNekhpg2EtSAwx7SWrA/wf2/Y+ngTzk6QAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "----------------------------------------\n", + "Total sequences: 12624\n", + "----------------------------------------\n", + "Top 5 longest sequences:\n", + "id length\n", + "12906 99993\n", + "7152 99954\n", + "8826 99916\n", + "5233 99794\n", + "9819 99778\n", + "----------------------------------------\n", + "Top 5 shortest sequences:\n", + "id length\n", + "15229 76\n", + "12750 117\n", + "13021 128\n", + "13309 135\n", + "14763 148\n", + "----------------------------------------\n", + "Average length: 25173.596720532318\n", + "----------------------------------------\n", + "Top 5 most common lengths:\n", + "length count\n", + "930 15\n", + "939 13\n", + "945 12\n", + "1952 11\n", + "597 9\n", + "----------------------------------------\n", + "Top 5 least common lengths:\n", + "length count\n", + "60747 1\n", + "969 1\n", + "23223 1\n", + "75025 1\n", + "5711 1\n" + ] + } + ], + "source": [ + "# remove seequnces with length less than 50\n", + "positive = positive[positive[\"length\"] >= 50]\n", + "negative = negative[negative[\"length\"] >= 50]\n", + "\n", + "folder = \"essential_genes_100k\"\n", + "positive = positive[positive[\"length\"] < 100000]\n", + "negative = negative[negative[\"length\"] < 100000]\n", + "\n", + "stats(positive, \"positive_cut\")\n", + "print(\"*\" * 100)\n", + "stats(negative, \"negative_cut\")" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [], + "source": [ + "positive[\"label\"] = 1\n", + "negative[\"label\"] = 0\n", + "\n", + "dataset = pd.concat([positive, negative])\n", + "\n", + "# remove length column\n", + "dataset = dataset.drop(columns=[\"length\"])\n", + "\n", + "# dataset.to_csv(\"../datasets/\" + folder + \"/dataset.csv\", index=False)" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(14634, 3)\n" + ] + } + ], + "source": [ + "cutting_length = 2000\n", + "\n", + "# cut sequences to the cutting length\n", + "dataset[\"sequence\"] = dataset[\"sequence\"].apply(lambda x: x[:cutting_length])\n", + "\n", + "# fill with \"N\" the sequences that are shorter than cutting length\n", + "dataset[\"sequence\"] = dataset[\"sequence\"].apply(lambda x: x.ljust(cutting_length, \"N\"))\n", + "\n", + "print(dataset.shape)\n", + "\n", + "# write dataset to csv\n", + "# dataset.to_csv(\"../datasets/\" + folder + \"_cut/dataset.csv\", index=False)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3.8.13 ('dna-conda': conda)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.13" + }, + "orig_nbformat": 4, + "vscode": { + "interpreter": { + "hash": "ba449ea13c29f64a91968d8f927cecceedd6e605eda30388903386e6cd94168d" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/src/propythia/DNA/essential_genes/dataset_processer.py b/src/propythia/DNA/essential_genes/dataset_processer.py new file mode 100644 index 0000000..45e5121 --- /dev/null +++ b/src/propythia/DNA/essential_genes/dataset_processer.py @@ -0,0 +1,182 @@ +from Bio.SeqIO.FastaIO import SimpleFastaParser +import csv + + +def read_csv(filename): + with open(filename, 'r') as f: + reader = csv.reader(f, delimiter=';') + return list(reader) + + +def read_tsv(filename): + d = {} + with open(filename, 'r') as f: + reader = csv.reader(f, delimiter='\t') + + for i in list(reader)[1:]: + d[i[0]] = i[1] + return d + + +def read_fasta(filename): + d = {} + with open(filename) as handle: + for key, sequence in SimpleFastaParser(handle): + sequence = sequence.upper() + if(sequence != "SEQUENCEUNAVAILABLE"): + d[key] = sequence + print(len(d), "keys", len(d.values()), "seqs", len(set(d.values())), "unique") + return d + + +def get_ids(data): + """ + embl: dict of id4 -> sequence + hgnc: dict of id4 -> sequence + """ + embl = {} + hgnc = {} + neither = [] + all_seqs = [] + for i in data[1:]: + if(i[3].startswith("EMBL:")): + embl[i[3].replace("EMBL:", "")] = i[-1] + elif(i[3].startswith("HGNC:")): + hgnc[i[3]] = i[-1] + else: + neither.append(i[-1]) + + all_seqs.append(i[-1]) + + print(len(all_seqs), "seqs,", len(set(all_seqs)), "unique") + print("EMBL:", len(embl), "ids unique") + print("HGNC:", len(hgnc), "ids unique") + + return embl, hgnc + + +def get_ensembl_ids_from_DEG(data): + embl, hgnc = get_ids(data) + ligacoes = read_tsv("../datasets/essential_genes/ligacoes.tsv") + + ensembl_from_hgnc = [] + for i in hgnc: + if(i in ligacoes): + if(ligacoes[i] != ""): + ensembl_from_hgnc.append(ligacoes[i]) + else: + print(i, "not in ligacoes") + + print("converted", len(ensembl_from_hgnc), "hgnc ids to ensembl ids") + + ensembl_from_hgnc = set(ensembl_from_hgnc) + embl = set(embl.keys()) + + to_remove = ensembl_from_hgnc.union(embl) + print("EMBL ids + converted HGNC to EMBL ids:", len(to_remove), "unique") + + return to_remove + + +def create_dict_of_occurrences(d): + result = {} + for key, value in d.items(): + key = key.split("|")[0] + if(key in result): + result[key].append(value) + else: + result[key] = [value] + return result + + +def remove_essential_genes(ensembl_dataset, embl_ids): + d = create_dict_of_occurrences(ensembl_dataset) + all_sequences = [j for i in d.values() for j in i] + print("before removing:", len(d), "keys", len(all_sequences), "seqs", len(set(all_sequences)), "unique") + + for i in embl_ids: + if(i in d): + del d[i] + + all_sequences = [j for i in d.values() for j in i] + print("after removing:", len(d), "keys", len(all_sequences), "seqs", len(set(all_sequences)), "unique") + return d + + +def match_sequences_to_DEG(d, deg_data): + unique_sequences_deg = set([i[-1] for i in deg_data]) + arr = [] + for i in d.values(): + if(i in unique_sequences_deg): + arr.append(i) + return arr + + +def create_negative_dataset(d): + res = {} + for key, seqs in d.items(): + if(len(seqs) <= 2): + res[key] = seqs + else: + res[key] = seqs[:2] + + all_sequences = [j for i in res.values() for j in i] + print("negative dataset:", len(d), "keys", len( + all_sequences), "seqs", len(set(all_sequences)), "unique") + + # with open("../datasets/essential_genes/essential_genes_negative.csv", "w") as f: + # headers = ["id", "sequence"] + # writer = csv.writer(f, delimiter=",") + # writer.writerow(headers) + # for key, seqs in res.items(): + # for i in seqs: + # writer.writerow([key, i]) + + +def main(): + print("DEG stats") + print("-" * 50) + deg_data = read_csv("../datasets/essential_genes/deg.csv") + embl_ids = get_ensembl_ids_from_DEG(deg_data) + + # ---------------------------------------------------------------------- + filename = "../datasets/essential_genes/mart_export_unspliced_97.fa" + print() + print("ENSEMBL stats") + print("-" * 50) + ensembl_dataset = read_fasta(filename) + match_seqs = set(match_sequences_to_DEG(ensembl_dataset, deg_data)) + + # Removing essential genes with same seqs + + no_egs = {} + for key, val in ensembl_dataset.items(): + if(val not in match_seqs): + no_egs[key] = val + print("after removing essential genes:", len(no_egs), "keys", len( + no_egs.values()), "seqs", len(set(no_egs.values())), "unique") + + # Removing essential genes with EMBL + converted HGNC to EMBL ids + + negative_dataset = {} + for key, val in no_egs.items(): + gene_id = key.split("|")[0] + if(gene_id not in embl_ids): + negative_dataset[key] = val + + print("after removing essential genes with EMBL id:", len(negative_dataset), "keys", len( + negative_dataset.values()), "seqs", len(set(negative_dataset.values())), "unique") + + # Grouping by gene id + + d = create_dict_of_occurrences(negative_dataset) + + all_sequences = [j for i in d.values() for j in i] + print("after grouping gene stable ids:", len(d), "keys", len( + all_sequences), "seqs", len(set(all_sequences)), "unique") + + + create_negative_dataset(d) + +if __name__ == "__main__": + main() diff --git a/src/propythia/DNA/notebooks/quick-start-DL.ipynb b/src/propythia/DNA/notebooks/quick-start-DL.ipynb new file mode 100644 index 0000000..e63195b --- /dev/null +++ b/src/propythia/DNA/notebooks/quick-start-DL.ipynb @@ -0,0 +1,12006 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# ProPythia DNA Deep Learning module quick start" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This is a notebook that explains how to perform every step of the developed Deep Learning modules. They include all the necessary steps to complete an entire Deep Learning pipeline. The steps are:\n", + "\n", + "- Data reading and validation\n", + "- Encoders\n", + "- DNA Descriptors\n", + "- Data splitting\n", + "- Model building and training\n", + "- Hyperparameter tuning" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "import pandas as pd\n", + "import sys\n", + "sys.path.append(\"../\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Data reading and validation" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "(The machine learning pipeline uses the same module to read and validate the sequences.)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This module comprehends functions to read and to validate DNA sequences. First is necessary to create the object ReadDNA." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "from read_sequence import ReadDNA\n", + "reader = ReadDNA()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "It is possible to create sequence objects using a single DNA sequence, a *CSV* and a *FASTA* file. The single sequence is going to be validated (check if all letters belong to the DNA alphabet) and the output will be the sequence in upper case." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ACGTACGAGCATGCAT\n" + ] + } + ], + "source": [ + "data = reader.read_sequence(\"ACGTACGAGCATGCAT\")\n", + "print(data)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "With *CSV* there must be at least a column named 'sequence' in the file. The labels may also be retrieved and validated if the user wants them, but he must specify the `with_label` parameter as **True** and the column with the labels must be named 'label'." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " sequence\n", + "0 CCGAGGGCTATGGTTTGGAAGTTAGAACCCTGGGGCTTCTCGCGGA...\n", + "1 GAGTTTATATGGCGCGAGCCTAGTGGTTTTTGTACTTGTTTGTCGC...\n", + "2 GATCAGTAGGGAAACAAACAGAGGGCCCAGCCACATCTAGCAGGTA...\n", + "3 GTCCACGACCGAACTCCCACCTTGACCGCAGAGGTACCACCAGAGC...\n", + "4 GGCGACCGAACTCCAACTAGAACCTGCATAACTGGCCTGGGAGATA...\n", + "(2000, 1)\n", + "----------------------------------------------------------------------------------------------------\n", + " sequence label\n", + "0 CCGAGGGCTATGGTTTGGAAGTTAGAACCCTGGGGCTTCTCGCGGA... 0\n", + "1 GAGTTTATATGGCGCGAGCCTAGTGGTTTTTGTACTTGTTTGTCGC... 0\n", + "2 GATCAGTAGGGAAACAAACAGAGGGCCCAGCCACATCTAGCAGGTA... 0\n", + "3 GTCCACGACCGAACTCCCACCTTGACCGCAGAGGTACCACCAGAGC... 1\n", + "4 GGCGACCGAACTCCAACTAGAACCTGCATAACTGGCCTGGGAGATA... 1\n", + "(2000, 2)\n" + ] + } + ], + "source": [ + "filename = \"../datasets/primer/dataset.csv\"\n", + "data = reader.read_csv(filename, with_labels=False)\n", + "print(data.head())\n", + "print(data.shape)\n", + "\n", + "print(\"-\" * 100)\n", + "\n", + "data = reader.read_csv(filename, with_labels=True)\n", + "print(data.head())\n", + "print(data.shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The *FASTA* format is similar to the *CSV* format. It always reads the sequence, and the labels only if the user wants them. The *FASTA* format must be one of the following examples:\n", + "\n", + "```\n", + ">sequence_id1\n", + "ACTGACTGACTGACTGACTGACTGACTGACTGACTGACTG...\n", + ">sequence_id2\n", + "ACTGACTGACTGACTGACTGACTGACTGACTGACTGACTG...\n", + "``` \n", + "\n", + "```\n", + ">sequence_id1,label1\n", + "ACTGACTGACTGACTGACTGACTGACTGACTGACTGACTG...\n", + ">sequence_id2,label2\n", + "ACTGACTGACTGACTGACTGACTGACTGACTGACTGACTG...\n", + "``` " + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " sequence\n", + "0 CCGAGGGCTATGGTTTGGAAGTTAGAACCCTGGGGCTTCTCGCGGA...\n", + "1 GAGTTTATATGGCGCGAGCCTAGTGGTTTTTGTACTTGTTTGTCGC...\n", + "2 GATCAGTAGGGAAACAAACAGAGGGCCCAGCCACATCTAGCAGGTA...\n", + "3 GTCCACGACCGAACTCCCACCTTGACCGCAGAGGTACCACCAGAGC...\n", + "4 GGCGACCGAACTCCAACTAGAACCTGCATAACTGGCCTGGGAGATA...\n", + "(2000, 1)\n", + "----------------------------------------------------------------------------------------------------\n", + " sequence label\n", + "0 CCGAGGGCTATGGTTTGGAAGTTAGAACCCTGGGGCTTCTCGCGGA... 0\n", + "1 GAGTTTATATGGCGCGAGCCTAGTGGTTTTTGTACTTGTTTGTCGC... 0\n", + "2 GATCAGTAGGGAAACAAACAGAGGGCCCAGCCACATCTAGCAGGTA... 0\n", + "3 GTCCACGACCGAACTCCCACCTTGACCGCAGAGGTACCACCAGAGC... 1\n", + "4 GGCGACCGAACTCCAACTAGAACCTGCATAACTGGCCTGGGAGATA... 1\n", + "(2000, 2)\n" + ] + } + ], + "source": [ + "filename = \"../datasets/primer/dataset.fasta\"\n", + "data = reader.read_fasta(filename, with_labels=False)\n", + "print(data.head())\n", + "print(data.shape)\n", + "\n", + "print(\"-\" * 100)\n", + "\n", + "data = reader.read_fasta(filename, with_labels=True)\n", + "print(data.head())\n", + "print(data.shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Encoders" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Deep learning models automatically extract features from the sequences, but it is necessary to build a representation of the sequences first due to the fact that models can't handle anything other than numerical values. Encoders are easily calculated and can serve as numerical representations of sequences, which can subsequently be used as model input." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This module comprehends functions to encode the DNA sequences. The encoding step is important because sequences need to be converted into a numerical value in order to create an input matrix for the model. The encoders that have been implemented are:\n", + "\n", + "- One-hot encoding\n", + "- Chemical encoding\n", + "- K-mer One-hot encoding\n", + "\n", + "Below there's an example for each of them.\n", + "\n", + "| Encoder | Sequence | Encoded sequence |\n", + "| ------------------- | -------- | -------------------------------------------- |\n", + "| One-Hot | ACGT | [[1,0,0,0], [0,1,0,0], [0,0,1,0], [0,0,0,1]] |\n", + "| Chemical | ACGT | [[1,1,1], [0,1,0], [1,0,0], [0,0,1]] |\n", + "| K-mer One-Hot (k=2) | ACGT | [[0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0]] |" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.1. One-hot encoding" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "One-hot encoding is extensively used in deep learning models and is well suited for most models. It is a simple encoding that converts the DNA alphabet into a binary vector. \n", + "\n", + "- A -> [1,0,0,0]\n", + "- C -> [0,1,0,0]\n", + "- G -> [0,0,1,0]\n", + "- T -> [0,0,0,1]\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To encode a sequence, we need first to create the object DNAEncoder." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "from src.encoding import DNAEncoder\n", + "encoder = DNAEncoder('ACGTACGAGCATGCAT')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, we only need to specify the encoder method (one-hot, chemical, k-mer one-hot)." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[1 0 0 0]\n", + " [0 1 0 0]\n", + " [0 0 1 0]\n", + " [0 0 0 1]\n", + " [1 0 0 0]\n", + " [0 1 0 0]\n", + " [0 0 1 0]\n", + " [1 0 0 0]\n", + " [0 0 1 0]\n", + " [0 1 0 0]\n", + " [1 0 0 0]\n", + " [0 0 0 1]\n", + " [0 0 1 0]\n", + " [0 1 0 0]\n", + " [1 0 0 0]\n", + " [0 0 0 1]]\n" + ] + } + ], + "source": [ + "encoded_sequence = encoder.one_hot_encode()\n", + "print(encoded_sequence)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.2. Chemical encoding" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The chemical encoding is a more complex encoding that uses the chemical properties of the DNA alphabet. Each letter is assigned a chemical property and the chemical properties are combined to create a vector. In a nutshell, the chemical properties are:\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Chemical propertyClassNucleotides
Ring structurePurineA, G
PyrimidineC, T
Hydrogen bondWeakA, T
StrongC, G
Functional groupAminoA, C
KetoG, T
\n", + "\n", + "If the letter is in the list of the first nucleotides, it is assigned the value 1 and if it is in the list of the second nucleotides, it is assigned the value 0. \n", + "\n", + "- A -> [1, 1, 1]\n", + "- C -> [0, 0, 1]\n", + "- G -> [1, 0, 0]\n", + "- T -> [0, 1, 0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The encoder object is already created so we just need to specify the encoder method." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[1 1 1]\n", + " [0 0 1]\n", + " [1 0 0]\n", + " [0 1 0]\n", + " [1 1 1]\n", + " [0 0 1]\n", + " [1 0 0]\n", + " [1 1 1]\n", + " [1 0 0]\n", + " [0 0 1]\n", + " [1 1 1]\n", + " [0 1 0]\n", + " [1 0 0]\n", + " [0 0 1]\n", + " [1 1 1]\n", + " [0 1 0]]\n" + ] + } + ], + "source": [ + "encoded_sequence = encoder.chemical_encode()\n", + "print(encoded_sequence)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.3. K-mer One-hot encoding" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Using one-hot encoding on DNA sequences solely preserves the positional information of each nucleotide. Recent investigations, however, have shown that including high-order dependencies among nucleotides may enhance the efficacy of DNA models. The K-mer One-hot encoding is a method that aims to overcome this problem.\n", + "\n", + "If k = 1,the encoder will create the same vector as the one-hot encoding.\n", + "\n", + "If k = 2, 16 dinucleotides will be created, and the encoder will create a vector with the following values:\n", + "\n", + "- AA = [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]\n", + "- AC = [0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0]\n", + "- AG = [0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0]\n", + "- ...\n", + "- TT = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1]\n", + "\n", + "If k = 3, 64 trinucleotides will be created, and the encoder will create a vector with the following values:\n", + "\n", + "- AAA = [1,0,0,0,...,0,0,0,0]\n", + "- AAC = [0,1,0,0,...,0,0,0,0]\n", + "- ...\n", + "- TTT = [0,0,0,0,...,0,0,0,1]\n", + "\n", + "The value of K can be any integer greater than 1 and less than or equal to the length of the sequence." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + " [0. 0. 0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + " [0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 0. 0. 0. 0.]\n", + " [0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 0. 0. 0.]\n", + " [0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + " [0. 0. 0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + " [0. 0. 0. 0. 0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0.]\n", + " [0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + " [0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0.]\n", + " [0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + " [0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + " [0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 0.]\n", + " [0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0.]\n", + " [0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n", + " [0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]\n" + ] + } + ], + "source": [ + "encoded_sequence = encoder.kmer_one_hot_encode(k=2)\n", + "print(encoded_sequence)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This module also allows the user to encode multiple sequences at once. The encoder can receive a column of a dataframe full of sequences and return an array of all encoded sequences." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[[0 1 0 0]\n", + " [0 0 1 0]\n", + " [1 0 0 0]\n", + " [0 1 0 0]\n", + " [0 0 1 0]\n", + " [1 0 0 0]\n", + " [0 0 0 1]\n", + " [0 0 1 0]\n", + " [0 1 0 0]\n", + " [1 0 0 0]\n", + " [0 0 0 1]]\n", + "\n", + " [[0 1 0 0]\n", + " [0 0 1 0]\n", + " [1 0 0 0]\n", + " [1 0 0 0]\n", + " [0 0 1 0]\n", + " [0 0 1 0]\n", + " [0 0 0 1]\n", + " [0 0 1 0]\n", + " [0 0 0 1]\n", + " [1 0 0 0]\n", + " [0 1 0 0]]\n", + "\n", + " [[1 0 0 0]\n", + " [0 0 1 0]\n", + " [0 0 0 1]\n", + " [1 0 0 0]\n", + " [0 0 1 0]\n", + " [0 0 1 0]\n", + " [0 0 1 0]\n", + " [0 0 1 0]\n", + " [0 0 0 1]\n", + " [1 0 0 0]\n", + " [1 0 0 0]]]\n" + ] + } + ], + "source": [ + "df = pd.DataFrame(\n", + " [\n", + " ['CGACGATGCAT', 1], \n", + " ['CGAAGGTGTAC', 0], \n", + " ['AGTAGGGGTAA', 1]\n", + " ], \n", + " columns=['sequence', 'labels']\n", + ")\n", + "\n", + "column = df['sequence'].values\n", + "encoder = DNAEncoder(column)\n", + "encoded_sequences = encoder.one_hot_encode()\n", + "print(encoded_sequences)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 3. DNA Descriptors" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As mentioned in the `quick-start-DL.ipynb` notebook, descriptors are manually calculated and are an attempt to serve as features for the classification model. However, deep learning models cannot use descriptors as features because their purpose is to extract features on their own instead of manually calculating beforehand. The DNA descriptors are being mentioned here because there are some deep learning models that can use them as features, such as deep neural networks, but models like CNNs and RNNs are not able to use them as features.\n", + "\n", + "So, at this point, the user can either choose to use encoders or descriptors to proceed to the next step. Using encodings it would be something like:" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(2000, 50, 4)\n" + ] + } + ], + "source": [ + "reader = ReadDNA()\n", + "data = reader.read_csv(filename='../datasets/primer/dataset.csv', with_labels=True)\n", + "\n", + "fps_x = data['sequence'].values\n", + "fps_y = data['label'].values\n", + "\n", + "# choosing one hot encoding\n", + "encoder = DNAEncoder(fps_x)\n", + "fps_x = encoder.one_hot_encode()\n", + "print(fps_x.shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Using descriptors it would be something like:" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0 / 2000\n", + "100 / 2000\n", + "200 / 2000\n", + "300 / 2000\n", + "400 / 2000\n", + "500 / 2000\n", + "600 / 2000\n", + "700 / 2000\n", + "800 / 2000\n", + "900 / 2000\n", + "1000 / 2000\n", + "1100 / 2000\n", + "1200 / 2000\n", + "1300 / 2000\n", + "1400 / 2000\n", + "1500 / 2000\n", + "1600 / 2000\n", + "1700 / 2000\n", + "1800 / 2000\n", + "1900 / 2000\n", + "Done!\n", + "(2000, 247)\n" + ] + } + ], + "source": [ + "reader = ReadDNA()\n", + "data = reader.read_csv(filename='../datasets/primer/dataset.csv', with_labels=True)\n", + "\n", + "from calculate_features import calculate_and_normalize\n", + "from sklearn.preprocessing import StandardScaler\n", + "\n", + "fps_x, fps_y = calculate_and_normalize(data)\n", + "\n", + "scaler = StandardScaler().fit(fps_x)\n", + "fps_x = scaler.transform(fps_x)\n", + "fps_y = fps_y.to_numpy()\n", + "print(fps_x.shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 4. Data splitting" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The sequences are at this point converted into numerical representations and are ready to be split into training, validation, and test sets. After that, each set needs also to be represented as the *PyTorch* object called *DataLoader*, which is a *Python* iterable over a dataset. All of this can be achieved using the function `data_splitting` from the `prepare_data.py` file." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "from src.prepare_data import data_splitting\n", + "batch_size = 32\n", + "train_size = 0.6\n", + "validation_size = 0.2\n", + "test_size = 0.2\n", + "\n", + "trainloader, testloader, validloader, _ = data_splitting(fps_x, fps_y, batch_size, train_size, test_size, validation_size)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 5. Model building and training" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Important Note:** Before continuing, it is worth noting that all of the previous steps, from the data reading, calculation of encoder/descriptors, and even the data splitting step, were compiled into a single function called `prepare_data` that can be called from the `prepare_data.py` file. An example of how to use this function will be shown later." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "At this point, the data is now ready to be used by a model. The user can choose to use one of the 6 implemented *PyTorch* models. They are:\n", + "\n", + "| Models | Features |\n", + "| --------------------- | ----------- |\n", + "| MLP | Descriptors |\n", + "| CNN | Encoders |\n", + "| LSTM / BiLSTM | Encoders |\n", + "| GRU / BiGRU | Encoders |\n", + "| CNN-LSTM / CNN-BiLSTM | Encoders |\n", + "| CNN-GRU / CNN-BiGRU | Encoders |" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As we can see, some models require the use of encoders and some require descriptors. Also, some models have the bidirectional option, resulting in 2 + 4*2 = 10 different models." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Imagining the scenario that we want to use descriptors as features, we need to choose the *MLP* model. We also need to specify some parameters for the training function. To make it easier for the user, a config file was created to provide an overview of all the parameters that will be used from now on. An example of a `config.json` file is:" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "```json\n", + "{\n", + " \"combination\":{\n", + " \"model_label\": \"mlp\",\n", + " \"mode\": \"descriptor\",\n", + " \"data_dir\": \"primer\"\n", + " },\n", + " \"do_tuning\": false,\n", + " \"fixed_vals\":{\n", + " \"epochs\": 500,\n", + " \"optimizer_label\": \"adam\",\n", + " \"loss_function\": \"cross_entropy\",\n", + " \"patience\": 8,\n", + " \"output_size\": 2,\n", + " \"cpus_per_trial\":1, \n", + " \"gpus_per_trial\":0,\n", + " \"num_samples\": 15,\n", + " \"num_layers\": 2,\n", + " \"kmer_one_hot\": 3\n", + " },\n", + " \"hyperparameters\": {\n", + " \"hidden_size\": 32,\n", + " \"lr\": 1e-3,\n", + " \"batch_size\": 32,\n", + " \"dropout\": 0.35\n", + " },\n", + " \"hyperparameter_search_space\": {\n", + " \"hidden_size\": [32, 64, 128, 256],\n", + " \"lr\": [1e-5, 1e-2],\n", + " \"batch_size\": [8, 16, 32],\n", + " \"dropout\": [0.3, 0.5]\n", + " },\n", + " \"train_all_combinations\": false\n", + "}\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To read the values from the configuraton file, we can use the function `read_config` from the `deep_ml.py` file. This functions also validates the configuration file and returns a dictionary with the values." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training on: cuda:0\n", + "combination {\n", + "\t model_label : cnn\n", + "\t mode : one_hot\n", + "\t data_dir : /home/jabreu/propythia/src/propythia/DNA/datasets/primer\n", + "}\n", + "do_tuning : False\n", + "fixed_vals {\n", + "\t epochs : 500\n", + "\t optimizer_label : adam\n", + "\t loss_function : CrossEntropyLoss()\n", + "\t patience : 7\n", + "\t output_size : 2\n", + "\t cpus_per_trial : 2\n", + "\t gpus_per_trial : 2\n", + "\t num_samples : 15\n", + "\t num_layers : 2\n", + "\t kmer_one_hot : 3\n", + "}\n", + "hyperparameters {\n", + "\t hidden_size : 32\n", + "\t lr : 0.001\n", + "\t batch_size : 32\n", + "\t dropout : 0.35\n", + "}\n", + "hyperparameter_search_space {\n", + "\t hidden_size : \n", + "\t lr : \n", + "\t batch_size : \n", + "\t dropout : \n", + "}\n", + "train_all_combinations : False\n" + ] + } + ], + "source": [ + "from deep_ml import read_config\n", + "config = read_config(filename='../config.json')\n", + "\n", + "for key, val in config.items():\n", + " if(key == \"do_tuning\" or key == 'train_all_combinations'):\n", + " print(key, \":\", val)\n", + " else:\n", + " print(key, \"{\")\n", + " for k, v in val.items():\n", + " print(\"\\t\", k,\":\", v)\n", + " print(\"}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As we can see, there is a dict called 'hyperparameters' for the training. These values were arbitrarily chosen, which can lead to poor performance, and that's why we need hyperparameter tuning to find the best values. But so far let's keep it simple and use the default values. Hyperparameter tuning will be discussed later in the tutorial (the dict called 'hyperparameter_search_space' will be used later)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, we just need to call the training function with all of these values and we will obtain a trained model. But before this, it important to specify which device we want the model to be trained on. Generally, it is a good idea to use the GPU if it is available. It is also a good practice to set a seed to ensure that the results are reproducible." + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy\n", + "import os\n", + "import torch\n", + "\n", + "numpy.random.seed(2022)\n", + "torch.manual_seed(2022)\n", + "os.environ[\"CUDA_VISIBLE_DEVICES\"] = '1,2,3,4,5'\n", + "device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we are ready to call the training function." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[1/500, 0/38] loss: 0.70695961\n", + "The Current Loss: 0.6356495572970464\n", + "trigger times: 0\n", + "[2/500, 0/38] loss: 0.64424402\n", + "The Current Loss: 0.49543410539627075\n", + "trigger times: 0\n", + "[3/500, 0/38] loss: 0.50877994\n", + "The Current Loss: 0.42889038645304167\n", + "trigger times: 0\n", + "[4/500, 0/38] loss: 0.36436936\n", + "The Current Loss: 0.3913511771422166\n", + "trigger times: 0\n", + "[5/500, 0/38] loss: 0.44344398\n", + "The Current Loss: 0.3759752168105199\n", + "trigger times: 0\n", + "[6/500, 0/38] loss: 0.33551249\n", + "The Current Loss: 0.37056772525493914\n", + "trigger times: 0\n", + "[7/500, 0/38] loss: 0.32777476\n", + "The Current Loss: 0.36761671763200027\n", + "trigger times: 0\n", + "[8/500, 0/38] loss: 0.32554531\n", + "The Current Loss: 0.3595673373112312\n", + "trigger times: 0\n", + "[9/500, 0/38] loss: 0.36578175\n", + "The Current Loss: 0.35537290343871486\n", + "trigger times: 0\n", + "[10/500, 0/38] loss: 0.32901487\n", + "The Current Loss: 0.3488809076639322\n", + "trigger times: 0\n", + "[11/500, 0/38] loss: 0.31945065\n", + "The Current Loss: 0.3502378830542931\n", + "trigger Times: 1\n", + "[12/500, 0/38] loss: 0.3177934\n", + "The Current Loss: 0.342059836937831\n", + "trigger times: 0\n", + "[13/500, 0/38] loss: 0.32371533\n", + "The Current Loss: 0.3431735336780548\n", + "trigger Times: 1\n", + "[14/500, 0/38] loss: 0.31896546\n", + "The Current Loss: 0.34466134355618405\n", + "trigger Times: 2\n", + "[15/500, 0/38] loss: 0.31500083\n", + "The Current Loss: 0.33908220437856823\n", + "trigger times: 0\n", + "[16/500, 0/38] loss: 0.34192631\n", + "The Current Loss: 0.3376885996415065\n", + "trigger times: 0\n", + "[17/500, 0/38] loss: 0.31796718\n", + "The Current Loss: 0.3385013937950134\n", + "trigger Times: 1\n", + "[18/500, 0/38] loss: 0.31551638\n", + "The Current Loss: 0.33797308802604675\n", + "trigger times: 0\n", + "[19/500, 0/38] loss: 0.31429073\n", + "The Current Loss: 0.3362450141173143\n", + "trigger times: 0\n", + "[20/500, 0/38] loss: 0.31527719\n", + "The Current Loss: 0.3348177786056812\n", + "trigger times: 0\n", + "[21/500, 0/38] loss: 0.31469947\n", + "The Current Loss: 0.3390410634187552\n", + "trigger Times: 1\n", + "[22/500, 0/38] loss: 0.31564972\n", + "The Current Loss: 0.3388374952169565\n", + "trigger times: 0\n", + "[23/500, 0/38] loss: 0.31563392\n", + "The Current Loss: 0.3346803761445559\n", + "trigger times: 0\n", + "[24/500, 0/38] loss: 0.31435257\n", + "The Current Loss: 0.3342469518001263\n", + "trigger times: 0\n", + "[25/500, 0/38] loss: 0.31409562\n", + "The Current Loss: 0.3374206836407001\n", + "trigger Times: 1\n", + "[26/500, 0/38] loss: 0.31405213\n", + "The Current Loss: 0.3349521847871634\n", + "trigger times: 0\n", + "[27/500, 0/38] loss: 0.31414026\n", + "The Current Loss: 0.3334119709638449\n", + "trigger times: 0\n", + "[28/500, 0/38] loss: 0.31364042\n", + "The Current Loss: 0.33603978615540725\n", + "trigger Times: 1\n", + "[29/500, 0/38] loss: 0.31380904\n", + "The Current Loss: 0.3353504492686345\n", + "trigger times: 0\n", + "[30/500, 0/38] loss: 0.31409103\n", + "The Current Loss: 0.3333322314115671\n", + "trigger times: 0\n", + "[31/500, 0/38] loss: 0.31382421\n", + "The Current Loss: 0.3325759447537936\n", + "trigger times: 0\n", + "[32/500, 0/38] loss: 0.31356463\n", + "The Current Loss: 0.33268067928460926\n", + "trigger Times: 1\n", + "[33/500, 0/38] loss: 0.31377432\n", + "The Current Loss: 0.3323714228776785\n", + "trigger times: 0\n", + "[34/500, 0/38] loss: 0.31406724\n", + "The Current Loss: 0.33292080347354597\n", + "trigger Times: 1\n", + "[35/500, 0/38] loss: 0.31367534\n", + "The Current Loss: 0.3336834655358241\n", + "trigger Times: 2\n", + "[36/500, 0/38] loss: 0.31353033\n", + "The Current Loss: 0.3339348297852736\n", + "trigger Times: 3\n", + "[37/500, 0/38] loss: 0.31348953\n", + "The Current Loss: 0.3324765058664175\n", + "trigger times: 0\n", + "[38/500, 0/38] loss: 0.3137677\n", + "The Current Loss: 0.33189022770294774\n", + "trigger times: 0\n", + "[39/500, 0/38] loss: 0.3133997\n", + "The Current Loss: 0.3338412092282222\n", + "trigger Times: 1\n", + "[40/500, 0/38] loss: 0.31371829\n", + "The Current Loss: 0.33217631165797895\n", + "trigger times: 0\n", + "[41/500, 0/38] loss: 0.31346869\n", + "The Current Loss: 0.3327196125800793\n", + "trigger Times: 1\n", + "[42/500, 0/38] loss: 0.31361112\n", + "The Current Loss: 0.3319312655008756\n", + "trigger times: 0\n", + "[43/500, 0/38] loss: 0.31336373\n", + "The Current Loss: 0.33177637366148144\n", + "trigger times: 0\n", + "[44/500, 0/38] loss: 0.31341678\n", + "The Current Loss: 0.33522831247403073\n", + "trigger Times: 1\n", + "[45/500, 0/38] loss: 0.31340209\n", + "The Current Loss: 0.3321813413730034\n", + "trigger times: 0\n", + "[46/500, 0/38] loss: 0.31340483\n", + "The Current Loss: 0.3350568092786349\n", + "trigger Times: 1\n", + "[47/500, 0/38] loss: 0.31336692\n", + "The Current Loss: 0.33212945552972645\n", + "trigger times: 0\n", + "[48/500, 0/38] loss: 0.31331846\n", + "The Current Loss: 0.3332089208639585\n", + "trigger Times: 1\n", + "[49/500, 0/38] loss: 0.31335628\n", + "The Current Loss: 0.3335557052722344\n", + "trigger Times: 2\n", + "[50/500, 0/38] loss: 0.31332123\n", + "The Current Loss: 0.3320192809288318\n", + "trigger times: 0\n", + "[51/500, 0/38] loss: 0.31329992\n", + "The Current Loss: 0.3316896626582512\n", + "trigger times: 0\n", + "[52/500, 0/38] loss: 0.31329581\n", + "The Current Loss: 0.33164436312822193\n", + "trigger times: 0\n", + "[53/500, 0/38] loss: 0.3133302\n", + "The Current Loss: 0.3338074340270116\n", + "trigger Times: 1\n", + "[54/500, 0/38] loss: 0.31333911\n", + "The Current Loss: 0.33210166371785677\n", + "trigger times: 0\n", + "[55/500, 0/38] loss: 0.31331253\n", + "The Current Loss: 0.3319276594198667\n", + "trigger times: 0\n", + "[56/500, 0/38] loss: 0.31331038\n", + "The Current Loss: 0.3322177529335022\n", + "trigger Times: 1\n", + "[57/500, 0/38] loss: 0.31329879\n", + "The Current Loss: 0.3319431864298307\n", + "trigger times: 0\n", + "[58/500, 0/38] loss: 0.31328976\n", + "The Current Loss: 0.3341810451104091\n", + "trigger Times: 1\n", + "[59/500, 0/38] loss: 0.31332207\n", + "The Current Loss: 0.3318466544151306\n", + "trigger times: 0\n", + "[60/500, 0/38] loss: 0.31327102\n", + "The Current Loss: 0.33229721738741946\n", + "trigger Times: 1\n", + "[61/500, 0/38] loss: 0.31327847\n", + "The Current Loss: 0.332081329364043\n", + "trigger times: 0\n", + "[62/500, 0/38] loss: 0.31330189\n", + "The Current Loss: 0.3318099838036757\n", + "trigger times: 0\n", + "[63/500, 0/38] loss: 0.31328678\n", + "The Current Loss: 0.33488412545277524\n", + "trigger Times: 1\n", + "[64/500, 0/38] loss: 0.31331736\n", + "The Current Loss: 0.3323269532277034\n", + "trigger times: 0\n", + "[65/500, 0/38] loss: 0.31328696\n", + "The Current Loss: 0.3321110010147095\n", + "trigger times: 0\n", + "[66/500, 0/38] loss: 0.3133032\n", + "The Current Loss: 0.33251670461434585\n", + "trigger Times: 1\n", + "[67/500, 0/38] loss: 0.31328124\n", + "The Current Loss: 0.3343178629875183\n", + "trigger Times: 2\n", + "[68/500, 0/38] loss: 0.31326586\n", + "The Current Loss: 0.3343017811958606\n", + "trigger times: 0\n", + "[69/500, 0/38] loss: 0.31327543\n", + "The Current Loss: 0.33205594237034136\n", + "trigger times: 0\n", + "[70/500, 0/38] loss: 0.31332093\n", + "The Current Loss: 0.3324971061486464\n", + "trigger Times: 1\n", + "[71/500, 0/38] loss: 0.31327659\n", + "The Current Loss: 0.33210762647482067\n", + "trigger times: 0\n", + "[72/500, 0/38] loss: 0.31326991\n", + "The Current Loss: 0.33262359408231884\n", + "trigger Times: 1\n", + "[73/500, 0/38] loss: 0.31328139\n", + "The Current Loss: 0.3320640508945172\n", + "trigger times: 0\n", + "[74/500, 0/38] loss: 0.31327879\n", + "The Current Loss: 0.3344904321890611\n", + "trigger Times: 1\n", + "[75/500, 0/38] loss: 0.31328756\n", + "The Current Loss: 0.3342970128242786\n", + "trigger times: 0\n", + "[76/500, 0/38] loss: 0.31329632\n", + "The Current Loss: 0.3334831091073843\n", + "trigger times: 0\n", + "[77/500, 0/38] loss: 0.31327999\n", + "The Current Loss: 0.3325768548708696\n", + "trigger times: 0\n", + "[78/500, 0/38] loss: 0.31328946\n", + "The Current Loss: 0.3320799355323498\n", + "trigger times: 0\n", + "[79/500, 0/38] loss: 0.31329805\n", + "The Current Loss: 0.3328295900271489\n", + "trigger Times: 1\n", + "[80/500, 0/38] loss: 0.31327525\n", + "The Current Loss: 0.3343419638963846\n", + "trigger Times: 2\n", + "[81/500, 0/38] loss: 0.31329086\n", + "The Current Loss: 0.3349299362072578\n", + "trigger Times: 3\n", + "[82/500, 0/38] loss: 0.31329671\n", + "The Current Loss: 0.33216070899596584\n", + "trigger times: 0\n", + "[83/500, 0/38] loss: 0.31327114\n", + "The Current Loss: 0.3320835530757904\n", + "trigger times: 0\n", + "[84/500, 0/38] loss: 0.31327981\n", + "The Current Loss: 0.3346842917112204\n", + "trigger Times: 1\n", + "[85/500, 0/38] loss: 0.31327587\n", + "The Current Loss: 0.3320966064929962\n", + "trigger times: 0\n", + "[86/500, 0/38] loss: 0.31331539\n", + "The Current Loss: 0.33249825697678786\n", + "trigger Times: 1\n", + "[87/500, 0/38] loss: 0.31327447\n", + "The Current Loss: 0.3322327435016632\n", + "trigger times: 0\n", + "[88/500, 0/38] loss: 0.31328958\n", + "The Current Loss: 0.3326404576118176\n", + "trigger Times: 1\n", + "[89/500, 0/38] loss: 0.31328699\n", + "The Current Loss: 0.3344948131304521\n", + "trigger Times: 2\n", + "[90/500, 0/38] loss: 0.31326783\n", + "The Current Loss: 0.33434570064911473\n", + "trigger times: 0\n", + "[91/500, 0/38] loss: 0.31328884\n", + "The Current Loss: 0.3332656209285443\n", + "trigger times: 0\n", + "[92/500, 0/38] loss: 0.31327617\n", + "The Current Loss: 0.33209900443370527\n", + "trigger times: 0\n", + "[93/500, 0/38] loss: 0.31326956\n", + "The Current Loss: 0.3328542892749493\n", + "trigger Times: 1\n", + "[94/500, 0/38] loss: 0.31329218\n", + "The Current Loss: 0.3343385343368237\n", + "trigger Times: 2\n", + "[95/500, 0/38] loss: 0.31327963\n", + "The Current Loss: 0.3344809069083287\n", + "trigger Times: 3\n", + "[96/500, 0/38] loss: 0.3132779\n", + "The Current Loss: 0.33427763214478123\n", + "trigger times: 0\n", + "[97/500, 0/38] loss: 0.31328189\n", + "The Current Loss: 0.3343068521756392\n", + "trigger Times: 1\n", + "[98/500, 0/38] loss: 0.31327161\n", + "The Current Loss: 0.3320807952147264\n", + "trigger times: 0\n", + "[99/500, 0/38] loss: 0.31329539\n", + "The Current Loss: 0.33427932399969834\n", + "trigger Times: 1\n", + "[100/500, 0/38] loss: 0.31327978\n", + "The Current Loss: 0.3320989448290605\n", + "trigger times: 0\n", + "[101/500, 0/38] loss: 0.31329414\n", + "The Current Loss: 0.3342809562499707\n", + "trigger Times: 1\n", + "[102/500, 0/38] loss: 0.31329885\n", + "The Current Loss: 0.332550422503398\n", + "trigger times: 0\n", + "[103/500, 0/38] loss: 0.31328699\n", + "The Current Loss: 0.33210156284845793\n", + "trigger times: 0\n", + "[104/500, 0/38] loss: 0.31329125\n", + "The Current Loss: 0.3325106203556061\n", + "trigger Times: 1\n", + "[105/500, 0/38] loss: 0.3133224\n", + "The Current Loss: 0.3320875076147226\n", + "trigger times: 0\n", + "[106/500, 0/38] loss: 0.31329563\n", + "The Current Loss: 0.33285805812248814\n", + "trigger Times: 1\n", + "[107/500, 0/38] loss: 0.31328279\n", + "The Current Loss: 0.3321165763414823\n", + "trigger times: 0\n", + "[108/500, 0/38] loss: 0.31326774\n", + "The Current Loss: 0.33215247897001415\n", + "trigger Times: 1\n", + "[109/500, 0/38] loss: 0.31327736\n", + "The Current Loss: 0.33209710396253145\n", + "trigger times: 0\n", + "[110/500, 0/38] loss: 0.31327593\n", + "The Current Loss: 0.33208059347592866\n", + "trigger times: 0\n", + "[111/500, 0/38] loss: 0.31328115\n", + "The Current Loss: 0.3328210986577548\n", + "trigger Times: 1\n", + "[112/500, 0/38] loss: 0.31328404\n", + "The Current Loss: 0.33581556723668027\n", + "trigger Times: 2\n", + "[113/500, 0/38] loss: 0.31328386\n", + "The Current Loss: 0.33436374939405\n", + "trigger times: 0\n", + "[114/500, 0/38] loss: 0.31328163\n", + "The Current Loss: 0.33208714998685396\n", + "trigger times: 0\n", + "[115/500, 0/38] loss: 0.31328669\n", + "The Current Loss: 0.3320883879294762\n", + "trigger Times: 1\n", + "[116/500, 0/38] loss: 0.31329906\n", + "The Current Loss: 0.3343860117288736\n", + "trigger Times: 2\n", + "[117/500, 0/38] loss: 0.31330904\n", + "The Current Loss: 0.33214738735785854\n", + "trigger times: 0\n", + "[118/500, 0/38] loss: 0.31328216\n", + "The Current Loss: 0.3320804719741528\n", + "trigger times: 0\n", + "[119/500, 0/38] loss: 0.31327444\n", + "The Current Loss: 0.33208428208644575\n", + "trigger Times: 1\n", + "[120/500, 0/38] loss: 0.31328756\n", + "The Current Loss: 0.3320831541831677\n", + "trigger times: 0\n", + "[121/500, 0/38] loss: 0.31328321\n", + "The Current Loss: 0.3342731434565324\n", + "trigger Times: 1\n", + "[122/500, 0/38] loss: 0.31328246\n", + "The Current Loss: 0.3345170823427347\n", + "trigger Times: 2\n", + "[123/500, 0/38] loss: 0.3132838\n", + "The Current Loss: 0.3320809304714203\n", + "trigger times: 0\n", + "[124/500, 0/38] loss: 0.31327024\n", + "The Current Loss: 0.3320814050160922\n", + "trigger Times: 1\n", + "[125/500, 0/38] loss: 0.31329435\n", + "The Current Loss: 0.33217339561535764\n", + "trigger Times: 2\n", + "[126/500, 0/38] loss: 0.31327268\n", + "The Current Loss: 0.33726779314187855\n", + "trigger Times: 3\n", + "[127/500, 0/38] loss: 0.31331453\n", + "The Current Loss: 0.332499412389902\n", + "trigger times: 0\n", + "[128/500, 0/38] loss: 0.31326628\n", + "The Current Loss: 0.33249975397036624\n", + "trigger Times: 1\n", + "[129/500, 0/38] loss: 0.31330267\n", + "The Current Loss: 0.3325649912540729\n", + "trigger Times: 2\n", + "[130/500, 0/38] loss: 0.31330085\n", + "The Current Loss: 0.3321834963101607\n", + "trigger times: 0\n", + "[131/500, 0/38] loss: 0.31326917\n", + "The Current Loss: 0.3321109459950374\n", + "trigger times: 0\n", + "[132/500, 0/38] loss: 0.31327403\n", + "The Current Loss: 0.33435596181796146\n", + "trigger Times: 1\n", + "[133/500, 0/38] loss: 0.31327897\n", + "The Current Loss: 0.33208705140994144\n", + "trigger times: 0\n", + "[134/500, 0/38] loss: 0.31331435\n", + "The Current Loss: 0.33211212433301485\n", + "trigger Times: 1\n", + "[135/500, 0/38] loss: 0.31329137\n", + "The Current Loss: 0.3320867419242859\n", + "trigger times: 0\n", + "[136/500, 0/38] loss: 0.31328094\n", + "The Current Loss: 0.3346398908358354\n", + "trigger Times: 1\n", + "[137/500, 0/38] loss: 0.31330168\n", + "The Current Loss: 0.33550121234013486\n", + "trigger Times: 2\n", + "[138/500, 0/38] loss: 0.31327775\n", + "The Current Loss: 0.3342825770378113\n", + "trigger times: 0\n", + "[139/500, 0/38] loss: 0.31330445\n", + "The Current Loss: 0.3320819139480591\n", + "trigger times: 0\n", + "[140/500, 0/38] loss: 0.3132866\n", + "The Current Loss: 0.33209198025556713\n", + "trigger Times: 1\n", + "[141/500, 0/38] loss: 0.31328785\n", + "The Current Loss: 0.33438223600387573\n", + "trigger Times: 2\n", + "[142/500, 0/38] loss: 0.31327921\n", + "The Current Loss: 0.3345325061908135\n", + "trigger Times: 3\n", + "[143/500, 0/38] loss: 0.31329483\n", + "The Current Loss: 0.33216391389186567\n", + "trigger times: 0\n", + "[144/500, 0/38] loss: 0.3132759\n", + "The Current Loss: 0.332499419267361\n", + "trigger Times: 1\n", + "[145/500, 0/38] loss: 0.31329593\n", + "The Current Loss: 0.3337028462153215\n", + "trigger Times: 2\n", + "[146/500, 0/38] loss: 0.31329775\n", + "The Current Loss: 0.33435033376400286\n", + "trigger Times: 3\n", + "[147/500, 0/38] loss: 0.31330845\n", + "The Current Loss: 0.33283586456225467\n", + "trigger times: 0\n", + "[148/500, 0/38] loss: 0.31330112\n", + "The Current Loss: 0.3320921567770151\n", + "trigger times: 0\n", + "[149/500, 0/38] loss: 0.313308\n", + "The Current Loss: 0.33210182648438674\n", + "trigger Times: 1\n", + "[150/500, 0/38] loss: 0.3132672\n", + "The Current Loss: 0.33419514619387114\n", + "trigger Times: 2\n", + "[151/500, 0/38] loss: 0.31329206\n", + "The Current Loss: 0.33208713164696324\n", + "trigger times: 0\n", + "[152/500, 0/38] loss: 0.31329417\n", + "The Current Loss: 0.33270991994784427\n", + "trigger Times: 1\n", + "[153/500, 0/38] loss: 0.31330204\n", + "The Current Loss: 0.33210274347892177\n", + "trigger times: 0\n", + "[154/500, 0/38] loss: 0.31327897\n", + "The Current Loss: 0.33431833523970383\n", + "trigger Times: 1\n", + "[155/500, 0/38] loss: 0.31334129\n", + "The Current Loss: 0.33208762682401216\n", + "trigger times: 0\n", + "[156/500, 0/38] loss: 0.31330493\n", + "The Current Loss: 0.33208491710516125\n", + "trigger times: 0\n", + "[157/500, 0/38] loss: 0.31327847\n", + "The Current Loss: 0.3320896533819345\n", + "trigger Times: 1\n", + "[158/500, 0/38] loss: 0.31326783\n", + "The Current Loss: 0.33211687436470616\n", + "trigger Times: 2\n", + "[159/500, 0/38] loss: 0.31327373\n", + "The Current Loss: 0.33213600057822007\n", + "trigger Times: 3\n", + "[160/500, 0/38] loss: 0.31329566\n", + "The Current Loss: 0.332100489964852\n", + "trigger times: 0\n", + "[161/500, 0/38] loss: 0.31329203\n", + "The Current Loss: 0.3320884704589844\n", + "trigger times: 0\n", + "[162/500, 0/38] loss: 0.31326964\n", + "The Current Loss: 0.33208203544983494\n", + "trigger times: 0\n", + "[163/500, 0/38] loss: 0.31328446\n", + "The Current Loss: 0.3328355413216811\n", + "trigger Times: 1\n", + "[164/500, 0/38] loss: 0.31328982\n", + "The Current Loss: 0.3328213393688202\n", + "trigger times: 0\n", + "[165/500, 0/38] loss: 0.31328046\n", + "The Current Loss: 0.33208133853398836\n", + "trigger times: 0\n", + "[166/500, 0/38] loss: 0.31327745\n", + "The Current Loss: 0.33478997074640715\n", + "trigger Times: 1\n", + "[167/500, 0/38] loss: 0.31332746\n", + "The Current Loss: 0.33311742773422826\n", + "trigger times: 0\n", + "[168/500, 0/38] loss: 0.31329611\n", + "The Current Loss: 0.3321591799075787\n", + "trigger times: 0\n", + "[169/500, 0/38] loss: 0.31326714\n", + "The Current Loss: 0.3321161063817831\n", + "trigger times: 0\n", + "[170/500, 0/38] loss: 0.31328437\n", + "The Current Loss: 0.33450332283973694\n", + "trigger Times: 1\n", + "[171/500, 0/38] loss: 0.31328809\n", + "The Current Loss: 0.33431269572331357\n", + "trigger times: 0\n", + "[172/500, 0/38] loss: 0.31328723\n", + "The Current Loss: 0.3336617969549619\n", + "trigger times: 0\n", + "[173/500, 0/38] loss: 0.31327596\n", + "The Current Loss: 0.3321142059106093\n", + "trigger times: 0\n", + "[174/500, 0/38] loss: 0.31327629\n", + "The Current Loss: 0.3343534790552579\n", + "trigger Times: 1\n", + "[175/500, 0/38] loss: 0.3132849\n", + "The Current Loss: 0.3353465314094837\n", + "trigger Times: 2\n", + "[176/500, 0/38] loss: 0.31326741\n", + "The Current Loss: 0.33213526698259205\n", + "trigger times: 0\n", + "[177/500, 0/38] loss: 0.31327841\n", + "The Current Loss: 0.3321560002290286\n", + "trigger Times: 1\n", + "[178/500, 0/38] loss: 0.31329438\n", + "The Current Loss: 0.33219422514622027\n", + "trigger Times: 2\n", + "[179/500, 0/38] loss: 0.31330326\n", + "The Current Loss: 0.3364259898662567\n", + "trigger Times: 3\n", + "[180/500, 0/38] loss: 0.3132737\n", + "The Current Loss: 0.33260980248451233\n", + "trigger times: 0\n", + "[181/500, 0/38] loss: 0.31327772\n", + "The Current Loss: 0.334350769336407\n", + "trigger Times: 1\n", + "[182/500, 0/38] loss: 0.31327751\n", + "The Current Loss: 0.3329101617519672\n", + "trigger times: 0\n", + "[183/500, 0/38] loss: 0.31328699\n", + "The Current Loss: 0.33312673752124494\n", + "trigger Times: 1\n", + "[184/500, 0/38] loss: 0.31329203\n", + "The Current Loss: 0.33208826872018665\n", + "trigger times: 0\n", + "[185/500, 0/38] loss: 0.31328782\n", + "The Current Loss: 0.33208095798125636\n", + "trigger times: 0\n", + "[186/500, 0/38] loss: 0.31328648\n", + "The Current Loss: 0.33219724205824047\n", + "trigger Times: 1\n", + "[187/500, 0/38] loss: 0.31330112\n", + "The Current Loss: 0.3325744179578928\n", + "trigger Times: 2\n", + "[188/500, 0/38] loss: 0.31331876\n", + "The Current Loss: 0.33211289231593794\n", + "trigger times: 0\n", + "[189/500, 0/38] loss: 0.31327319\n", + "The Current Loss: 0.33413291435975295\n", + "trigger Times: 1\n", + "[190/500, 0/38] loss: 0.31328893\n", + "The Current Loss: 0.334275660606531\n", + "trigger Times: 2\n", + "[191/500, 0/38] loss: 0.31329495\n", + "The Current Loss: 0.33209147590857285\n", + "trigger times: 0\n", + "[192/500, 0/38] loss: 0.31328881\n", + "The Current Loss: 0.33208772310843837\n", + "trigger times: 0\n", + "[193/500, 0/38] loss: 0.31329799\n", + "The Current Loss: 0.3321128258338341\n", + "trigger Times: 1\n", + "[194/500, 0/38] loss: 0.31328592\n", + "The Current Loss: 0.33209338784217834\n", + "trigger times: 0\n", + "[195/500, 0/38] loss: 0.31327742\n", + "The Current Loss: 0.3344974540747129\n", + "trigger Times: 1\n", + "[196/500, 0/38] loss: 0.31328681\n", + "The Current Loss: 0.3334937072717227\n", + "trigger times: 0\n", + "[197/500, 0/38] loss: 0.31329143\n", + "The Current Loss: 0.3320810611431415\n", + "trigger times: 0\n", + "[198/500, 0/38] loss: 0.31326759\n", + "The Current Loss: 0.3320901164641747\n", + "trigger Times: 1\n", + "[199/500, 0/38] loss: 0.31328407\n", + "The Current Loss: 0.3342716716803037\n", + "trigger Times: 2\n", + "[200/500, 0/38] loss: 0.31327602\n", + "The Current Loss: 0.3326299740717961\n", + "trigger times: 0\n", + "[201/500, 0/38] loss: 0.3132911\n", + "The Current Loss: 0.33431313588069034\n", + "trigger Times: 1\n", + "[202/500, 0/38] loss: 0.3133029\n", + "The Current Loss: 0.33208073331759524\n", + "trigger times: 0\n", + "[203/500, 0/38] loss: 0.31329522\n", + "The Current Loss: 0.332080419246967\n", + "trigger times: 0\n", + "[204/500, 0/38] loss: 0.31329384\n", + "The Current Loss: 0.33419488026545596\n", + "trigger Times: 1\n", + "[205/500, 0/38] loss: 0.3132863\n", + "The Current Loss: 0.3320812009848081\n", + "trigger times: 0\n", + "[206/500, 0/38] loss: 0.31329423\n", + "The Current Loss: 0.3321061203112969\n", + "trigger Times: 1\n", + "[207/500, 0/38] loss: 0.31329393\n", + "The Current Loss: 0.33209184958384586\n", + "trigger times: 0\n", + "[208/500, 0/38] loss: 0.31328109\n", + "The Current Loss: 0.33208168011445266\n", + "trigger times: 0\n", + "[209/500, 0/38] loss: 0.31327161\n", + "The Current Loss: 0.3320845411374019\n", + "trigger Times: 1\n", + "[210/500, 0/38] loss: 0.3132748\n", + "The Current Loss: 0.3342572611111861\n", + "trigger Times: 2\n", + "[211/500, 0/38] loss: 0.31328657\n", + "The Current Loss: 0.33209723692673904\n", + "trigger times: 0\n", + "[212/500, 0/38] loss: 0.31329906\n", + "The Current Loss: 0.3320818474659553\n", + "trigger times: 0\n", + "[213/500, 0/38] loss: 0.31327868\n", + "The Current Loss: 0.33372750649085414\n", + "trigger Times: 1\n", + "[214/500, 0/38] loss: 0.313288\n", + "The Current Loss: 0.3320826177413647\n", + "trigger times: 0\n", + "[215/500, 0/38] loss: 0.3133038\n", + "The Current Loss: 0.33252667692991406\n", + "trigger Times: 1\n", + "[216/500, 0/38] loss: 0.31328905\n", + "The Current Loss: 0.3321318213756268\n", + "trigger times: 0\n", + "[217/500, 0/38] loss: 0.31327853\n", + "The Current Loss: 0.3321065604686737\n", + "trigger times: 0\n", + "[218/500, 0/38] loss: 0.31329873\n", + "The Current Loss: 0.3320879867443672\n", + "trigger times: 0\n", + "[219/500, 0/38] loss: 0.31327778\n", + "The Current Loss: 0.33447689505723804\n", + "trigger Times: 1\n", + "[220/500, 0/38] loss: 0.31326655\n", + "The Current Loss: 0.3321789365548354\n", + "trigger times: 0\n", + "[221/500, 0/38] loss: 0.31328231\n", + "The Current Loss: 0.33209360104340774\n", + "trigger times: 0\n", + "[222/500, 0/38] loss: 0.31328079\n", + "The Current Loss: 0.3320823678603539\n", + "trigger times: 0\n", + "[223/500, 0/38] loss: 0.31327844\n", + "The Current Loss: 0.3320857286453247\n", + "trigger Times: 1\n", + "[224/500, 0/38] loss: 0.31329501\n", + "The Current Loss: 0.33210959572058457\n", + "trigger Times: 2\n", + "[225/500, 0/38] loss: 0.31328782\n", + "The Current Loss: 0.33209216136198777\n", + "trigger times: 0\n", + "[226/500, 0/38] loss: 0.31327462\n", + "The Current Loss: 0.3337042194146376\n", + "trigger Times: 1\n", + "[227/500, 0/38] loss: 0.31328136\n", + "The Current Loss: 0.3320854718868549\n", + "trigger times: 0\n", + "[228/500, 0/38] loss: 0.31327704\n", + "The Current Loss: 0.3343578760440533\n", + "trigger Times: 1\n", + "[229/500, 0/38] loss: 0.31328428\n", + "The Current Loss: 0.3337380656829247\n", + "trigger times: 0\n", + "[230/500, 0/38] loss: 0.31328797\n", + "The Current Loss: 0.3328673289372371\n", + "trigger times: 0\n", + "[231/500, 0/38] loss: 0.31328884\n", + "The Current Loss: 0.3321151389525487\n", + "trigger times: 0\n", + "[232/500, 0/38] loss: 0.31330052\n", + "The Current Loss: 0.3321506908306709\n", + "trigger Times: 1\n", + "[233/500, 0/38] loss: 0.31328002\n", + "The Current Loss: 0.3357235399576334\n", + "trigger Times: 2\n", + "[234/500, 0/38] loss: 0.31327382\n", + "The Current Loss: 0.3331192754782163\n", + "trigger times: 0\n", + "[235/500, 0/38] loss: 0.31327319\n", + "The Current Loss: 0.33211249113082886\n", + "trigger times: 0\n", + "[236/500, 0/38] loss: 0.31327179\n", + "The Current Loss: 0.33349753801639265\n", + "trigger Times: 1\n", + "[237/500, 0/38] loss: 0.31327322\n", + "The Current Loss: 0.33210639311717105\n", + "trigger times: 0\n", + "[238/500, 0/38] loss: 0.31326661\n", + "The Current Loss: 0.3320896533819345\n", + "trigger times: 0\n", + "[239/500, 0/38] loss: 0.3132759\n", + "The Current Loss: 0.3361219328183394\n", + "trigger Times: 1\n", + "[240/500, 0/38] loss: 0.3132911\n", + "The Current Loss: 0.3323075083585886\n", + "trigger times: 0\n", + "[241/500, 0/38] loss: 0.31327724\n", + "The Current Loss: 0.3321133187183967\n", + "trigger times: 0\n", + "[242/500, 0/38] loss: 0.3132692\n", + "The Current Loss: 0.33208272090324986\n", + "trigger times: 0\n", + "[243/500, 0/38] loss: 0.31327492\n", + "The Current Loss: 0.33419408477269685\n", + "trigger Times: 1\n", + "[244/500, 0/38] loss: 0.31328556\n", + "The Current Loss: 0.3331363361615401\n", + "trigger times: 0\n", + "[245/500, 0/38] loss: 0.31327763\n", + "The Current Loss: 0.33505606880554784\n", + "trigger Times: 1\n", + "[246/500, 0/38] loss: 0.31329489\n", + "The Current Loss: 0.3321066269507775\n", + "trigger times: 0\n", + "[247/500, 0/38] loss: 0.31327206\n", + "The Current Loss: 0.33352190026870143\n", + "trigger Times: 1\n", + "[248/500, 0/38] loss: 0.31326917\n", + "The Current Loss: 0.33209758079968965\n", + "trigger times: 0\n", + "[249/500, 0/38] loss: 0.31327718\n", + "The Current Loss: 0.3321039332793309\n", + "trigger Times: 1\n", + "[250/500, 0/38] loss: 0.31327382\n", + "The Current Loss: 0.33320162617243254\n", + "trigger Times: 2\n", + "[251/500, 0/38] loss: 0.31327263\n", + "The Current Loss: 0.3335534013234652\n", + "trigger Times: 3\n", + "[252/500, 0/38] loss: 0.31326732\n", + "The Current Loss: 0.33208518761854905\n", + "trigger times: 0\n", + "[253/500, 0/38] loss: 0.31331542\n", + "The Current Loss: 0.3320941627025604\n", + "trigger Times: 1\n", + "[254/500, 0/38] loss: 0.31328902\n", + "The Current Loss: 0.3321815545742328\n", + "trigger Times: 2\n", + "[255/500, 0/38] loss: 0.31328416\n", + "The Current Loss: 0.33431867452768177\n", + "trigger Times: 3\n", + "[256/500, 0/38] loss: 0.31329012\n", + "The Current Loss: 0.3321145658309643\n", + "trigger times: 0\n", + "[257/500, 0/38] loss: 0.31329045\n", + "The Current Loss: 0.3326410765831287\n", + "trigger Times: 1\n", + "[258/500, 0/38] loss: 0.3133004\n", + "The Current Loss: 0.3343524864086738\n", + "trigger Times: 2\n", + "[259/500, 0/38] loss: 0.313292\n", + "The Current Loss: 0.33251991409521836\n", + "trigger times: 0\n", + "[260/500, 0/38] loss: 0.31327757\n", + "The Current Loss: 0.33214736443299514\n", + "trigger times: 0\n", + "[261/500, 0/38] loss: 0.3132765\n", + "The Current Loss: 0.3321078098737277\n", + "trigger times: 0\n", + "[262/500, 0/38] loss: 0.31326592\n", + "The Current Loss: 0.33435423099077666\n", + "trigger Times: 1\n", + "[263/500, 0/38] loss: 0.31327489\n", + "The Current Loss: 0.3320958935297452\n", + "trigger times: 0\n", + "[264/500, 0/38] loss: 0.31329566\n", + "The Current Loss: 0.3320886194705963\n", + "trigger times: 0\n", + "[265/500, 0/38] loss: 0.31327724\n", + "The Current Loss: 0.33447885513305664\n", + "trigger Times: 1\n", + "[266/500, 0/38] loss: 0.31326857\n", + "The Current Loss: 0.33209440341362584\n", + "trigger times: 0\n", + "[267/500, 0/38] loss: 0.31330791\n", + "The Current Loss: 0.33658732588474566\n", + "trigger Times: 1\n", + "[268/500, 0/38] loss: 0.3132771\n", + "The Current Loss: 0.33455286346949065\n", + "trigger times: 0\n", + "[269/500, 0/38] loss: 0.3132709\n", + "The Current Loss: 0.33208152651786804\n", + "trigger times: 0\n", + "[270/500, 0/38] loss: 0.31328574\n", + "The Current Loss: 0.3320971131324768\n", + "trigger Times: 1\n", + "[271/500, 0/38] loss: 0.31329337\n", + "The Current Loss: 0.332098500086711\n", + "trigger Times: 2\n", + "[272/500, 0/38] loss: 0.3132799\n", + "The Current Loss: 0.3332439340077914\n", + "trigger Times: 3\n", + "[273/500, 0/38] loss: 0.31328934\n", + "The Current Loss: 0.33208606564081633\n", + "trigger times: 0\n", + "[274/500, 0/38] loss: 0.31329274\n", + "The Current Loss: 0.3320834774237413\n", + "trigger times: 0\n", + "[275/500, 0/38] loss: 0.3132866\n", + "The Current Loss: 0.3321199921461252\n", + "trigger Times: 1\n", + "[276/500, 0/38] loss: 0.31327799\n", + "The Current Loss: 0.33212587466606724\n", + "trigger Times: 2\n", + "[277/500, 0/38] loss: 0.31327102\n", + "The Current Loss: 0.332085357262538\n", + "trigger times: 0\n", + "[278/500, 0/38] loss: 0.31326929\n", + "The Current Loss: 0.33213456318928647\n", + "trigger Times: 1\n", + "[279/500, 0/38] loss: 0.31329948\n", + "The Current Loss: 0.3321517797616812\n", + "trigger Times: 2\n", + "[280/500, 0/38] loss: 0.31327745\n", + "The Current Loss: 0.33420750269523036\n", + "trigger Times: 3\n", + "[281/500, 0/38] loss: 0.31327581\n", + "The Current Loss: 0.3320891329875359\n", + "trigger times: 0\n", + "[282/500, 0/38] loss: 0.31327292\n", + "The Current Loss: 0.33267560601234436\n", + "trigger Times: 1\n", + "[283/500, 0/38] loss: 0.31328273\n", + "The Current Loss: 0.3321011272760538\n", + "trigger times: 0\n", + "[284/500, 0/38] loss: 0.31328139\n", + "The Current Loss: 0.3320813545813927\n", + "trigger times: 0\n", + "[285/500, 0/38] loss: 0.31328359\n", + "The Current Loss: 0.3322111734977135\n", + "trigger Times: 1\n", + "[286/500, 0/38] loss: 0.31328431\n", + "The Current Loss: 0.3321417776437906\n", + "trigger times: 0\n", + "[287/500, 0/38] loss: 0.31327996\n", + "The Current Loss: 0.3321299598767207\n", + "trigger times: 0\n", + "[288/500, 0/38] loss: 0.31330544\n", + "The Current Loss: 0.3328254245794736\n", + "trigger Times: 1\n", + "[289/500, 0/38] loss: 0.31328765\n", + "The Current Loss: 0.33637301050699675\n", + "trigger Times: 2\n", + "[290/500, 0/38] loss: 0.31331986\n", + "The Current Loss: 0.333108147749534\n", + "trigger times: 0\n", + "[291/500, 0/38] loss: 0.31329975\n", + "The Current Loss: 0.3321406084757585\n", + "trigger times: 0\n", + "[292/500, 0/38] loss: 0.31328163\n", + "The Current Loss: 0.33208311750338626\n", + "trigger times: 0\n", + "[293/500, 0/38] loss: 0.31327996\n", + "The Current Loss: 0.3321003524156717\n", + "trigger Times: 1\n", + "[294/500, 0/38] loss: 0.31327787\n", + "The Current Loss: 0.3337047489789816\n", + "trigger Times: 2\n", + "[295/500, 0/38] loss: 0.3132956\n", + "The Current Loss: 0.33219361992982716\n", + "trigger times: 0\n", + "[296/500, 0/38] loss: 0.31328508\n", + "The Current Loss: 0.33483755588531494\n", + "trigger Times: 1\n", + "[297/500, 0/38] loss: 0.31328353\n", + "The Current Loss: 0.3356610101002913\n", + "trigger Times: 2\n", + "[298/500, 0/38] loss: 0.31327674\n", + "The Current Loss: 0.33208247331472546\n", + "trigger times: 0\n", + "[299/500, 0/38] loss: 0.31327263\n", + "The Current Loss: 0.3320884887988751\n", + "trigger Times: 1\n", + "[300/500, 0/38] loss: 0.31329638\n", + "The Current Loss: 0.33215408141796404\n", + "trigger Times: 2\n", + "[301/500, 0/38] loss: 0.31327671\n", + "The Current Loss: 0.33208316335311305\n", + "trigger times: 0\n", + "[302/500, 0/38] loss: 0.31329042\n", + "The Current Loss: 0.3320837685695061\n", + "trigger Times: 1\n", + "[303/500, 0/38] loss: 0.31329781\n", + "The Current Loss: 0.3328264607832982\n", + "trigger Times: 2\n", + "[304/500, 0/38] loss: 0.31329459\n", + "The Current Loss: 0.3326380413312178\n", + "trigger times: 0\n", + "[305/500, 0/38] loss: 0.31327721\n", + "The Current Loss: 0.3353001819207118\n", + "trigger Times: 1\n", + "[306/500, 0/38] loss: 0.31329587\n", + "The Current Loss: 0.3342735033768874\n", + "trigger times: 0\n", + "[307/500, 0/38] loss: 0.313301\n", + "The Current Loss: 0.33350058243824887\n", + "trigger times: 0\n", + "[308/500, 0/38] loss: 0.31328261\n", + "The Current Loss: 0.33210123960788435\n", + "trigger times: 0\n", + "[309/500, 0/38] loss: 0.31327835\n", + "The Current Loss: 0.33210520102427554\n", + "trigger Times: 1\n", + "[310/500, 0/38] loss: 0.31326735\n", + "The Current Loss: 0.3331066897282234\n", + "trigger Times: 2\n", + "[311/500, 0/38] loss: 0.31327268\n", + "The Current Loss: 0.33252447614303005\n", + "trigger times: 0\n", + "[312/500, 0/38] loss: 0.31329054\n", + "The Current Loss: 0.3325200447669396\n", + "trigger times: 0\n", + "[313/500, 0/38] loss: 0.3132762\n", + "The Current Loss: 0.3320895181252406\n", + "trigger times: 0\n", + "[314/500, 0/38] loss: 0.31328267\n", + "The Current Loss: 0.3320934841266045\n", + "trigger Times: 1\n", + "[315/500, 0/38] loss: 0.31327611\n", + "The Current Loss: 0.3343285643137418\n", + "trigger Times: 2\n", + "[316/500, 0/38] loss: 0.31327033\n", + "The Current Loss: 0.3320972988238701\n", + "trigger times: 0\n", + "[317/500, 0/38] loss: 0.31327856\n", + "The Current Loss: 0.332089534172645\n", + "trigger times: 0\n", + "[318/500, 0/38] loss: 0.31328484\n", + "The Current Loss: 0.3325212850020482\n", + "trigger Times: 1\n", + "[319/500, 0/38] loss: 0.3132886\n", + "The Current Loss: 0.3344789399550511\n", + "trigger Times: 2\n", + "[320/500, 0/38] loss: 0.31327885\n", + "The Current Loss: 0.33442200834934527\n", + "trigger times: 0\n", + "[321/500, 0/38] loss: 0.3133193\n", + "The Current Loss: 0.3320925854719602\n", + "trigger times: 0\n", + "[322/500, 0/38] loss: 0.31328773\n", + "The Current Loss: 0.33210848386471087\n", + "trigger Times: 1\n", + "[323/500, 0/38] loss: 0.31328744\n", + "The Current Loss: 0.3320830166339874\n", + "trigger times: 0\n", + "[324/500, 0/38] loss: 0.31330469\n", + "The Current Loss: 0.33208970381663394\n", + "trigger Times: 1\n", + "[325/500, 0/38] loss: 0.31328884\n", + "The Current Loss: 0.3334952386525961\n", + "trigger Times: 2\n", + "[326/500, 0/38] loss: 0.31327629\n", + "The Current Loss: 0.3320939105290633\n", + "trigger times: 0\n", + "[327/500, 0/38] loss: 0.31330222\n", + "The Current Loss: 0.33216914993066055\n", + "trigger Times: 1\n", + "[328/500, 0/38] loss: 0.31328043\n", + "The Current Loss: 0.33210123960788435\n", + "trigger times: 0\n", + "[329/500, 0/38] loss: 0.31328467\n", + "The Current Loss: 0.3342341115841499\n", + "trigger Times: 1\n", + "[330/500, 0/38] loss: 0.31327993\n", + "The Current Loss: 0.33216118124815136\n", + "trigger times: 0\n", + "[331/500, 0/38] loss: 0.31328297\n", + "The Current Loss: 0.3321559520868155\n", + "trigger times: 0\n", + "[332/500, 0/38] loss: 0.31330582\n", + "The Current Loss: 0.33269432416329014\n", + "trigger Times: 1\n", + "[333/500, 0/38] loss: 0.31327349\n", + "The Current Loss: 0.33215023691837603\n", + "trigger times: 0\n", + "[334/500, 0/38] loss: 0.31328174\n", + "The Current Loss: 0.3329032292732826\n", + "trigger Times: 1\n", + "[335/500, 0/38] loss: 0.31329453\n", + "The Current Loss: 0.33214260523135847\n", + "trigger times: 0\n", + "[336/500, 0/38] loss: 0.31327528\n", + "The Current Loss: 0.3320920192278348\n", + "trigger times: 0\n", + "[337/500, 0/38] loss: 0.31328994\n", + "The Current Loss: 0.3335221020074991\n", + "trigger Times: 1\n", + "[338/500, 0/38] loss: 0.31327513\n", + "The Current Loss: 0.33263885974884033\n", + "trigger times: 0\n", + "[339/500, 0/38] loss: 0.31328806\n", + "The Current Loss: 0.33209163638261646\n", + "trigger times: 0\n", + "[340/500, 0/38] loss: 0.31328785\n", + "The Current Loss: 0.33492074792201704\n", + "trigger Times: 1\n", + "[341/500, 0/38] loss: 0.31329644\n", + "The Current Loss: 0.3320943277615767\n", + "trigger times: 0\n", + "[342/500, 0/38] loss: 0.313279\n", + "The Current Loss: 0.3321204506433927\n", + "trigger Times: 1\n", + "[343/500, 0/38] loss: 0.31331268\n", + "The Current Loss: 0.33208409410256606\n", + "trigger times: 0\n", + "[344/500, 0/38] loss: 0.31329918\n", + "The Current Loss: 0.3344790431169363\n", + "trigger Times: 1\n", + "[345/500, 0/38] loss: 0.31327724\n", + "The Current Loss: 0.3320904259498303\n", + "trigger times: 0\n", + "[346/500, 0/38] loss: 0.31328183\n", + "The Current Loss: 0.3321270484190721\n", + "trigger Times: 1\n", + "[347/500, 0/38] loss: 0.31328455\n", + "The Current Loss: 0.33210286727318394\n", + "trigger times: 0\n", + "[348/500, 0/38] loss: 0.31328231\n", + "The Current Loss: 0.33208530912032497\n", + "trigger times: 0\n", + "[349/500, 0/38] loss: 0.3132807\n", + "The Current Loss: 0.3351582632615016\n", + "trigger Times: 1\n", + "[350/500, 0/38] loss: 0.31330517\n", + "The Current Loss: 0.33216848052465\n", + "trigger times: 0\n", + "[351/500, 0/38] loss: 0.3132821\n", + "The Current Loss: 0.33208336050693804\n", + "trigger times: 0\n", + "[352/500, 0/38] loss: 0.31330717\n", + "The Current Loss: 0.3321564724812141\n", + "trigger Times: 1\n", + "[353/500, 0/38] loss: 0.31329507\n", + "The Current Loss: 0.3342035779586205\n", + "trigger Times: 2\n", + "[354/500, 0/38] loss: 0.3132908\n", + "The Current Loss: 0.3345145078805777\n", + "trigger Times: 3\n", + "[355/500, 0/38] loss: 0.31327912\n", + "The Current Loss: 0.3320872004215534\n", + "trigger times: 0\n", + "[356/500, 0/38] loss: 0.31328043\n", + "The Current Loss: 0.3320953456255106\n", + "trigger Times: 1\n", + "[357/500, 0/38] loss: 0.31328756\n", + "The Current Loss: 0.33431384655145496\n", + "trigger Times: 2\n", + "[358/500, 0/38] loss: 0.31328145\n", + "The Current Loss: 0.3320832825624026\n", + "trigger times: 0\n", + "[359/500, 0/38] loss: 0.31327942\n", + "The Current Loss: 0.33208417433958787\n", + "trigger Times: 1\n", + "[360/500, 0/38] loss: 0.31328908\n", + "The Current Loss: 0.3320990273585686\n", + "trigger Times: 2\n", + "[361/500, 0/38] loss: 0.31327665\n", + "The Current Loss: 0.33208274153562695\n", + "trigger times: 0\n", + "[362/500, 0/38] loss: 0.31330055\n", + "The Current Loss: 0.33428560999723583\n", + "trigger Times: 1\n", + "[363/500, 0/38] loss: 0.31329703\n", + "The Current Loss: 0.33210426339736354\n", + "trigger times: 0\n", + "[364/500, 0/38] loss: 0.31327316\n", + "The Current Loss: 0.3337778884630937\n", + "trigger Times: 1\n", + "[365/500, 0/38] loss: 0.31329548\n", + "The Current Loss: 0.33429308579518247\n", + "trigger Times: 2\n", + "[366/500, 0/38] loss: 0.31327644\n", + "The Current Loss: 0.33216506701249343\n", + "trigger times: 0\n", + "[367/500, 0/38] loss: 0.31328666\n", + "The Current Loss: 0.33210427256730884\n", + "trigger times: 0\n", + "[368/500, 0/38] loss: 0.31329122\n", + "The Current Loss: 0.3320831839854901\n", + "trigger times: 0\n", + "[369/500, 0/38] loss: 0.31328025\n", + "The Current Loss: 0.3335071802139282\n", + "trigger Times: 1\n", + "[370/500, 0/38] loss: 0.31329894\n", + "The Current Loss: 0.33209899297127354\n", + "trigger times: 0\n", + "[371/500, 0/38] loss: 0.31326872\n", + "The Current Loss: 0.3321659794220558\n", + "trigger Times: 1\n", + "[372/500, 0/38] loss: 0.31329122\n", + "The Current Loss: 0.3320893461887653\n", + "trigger times: 0\n", + "[373/500, 0/38] loss: 0.31329361\n", + "The Current Loss: 0.3320946739270137\n", + "trigger Times: 1\n", + "[374/500, 0/38] loss: 0.31331745\n", + "The Current Loss: 0.33209020587114185\n", + "trigger times: 0\n", + "[375/500, 0/38] loss: 0.31328517\n", + "The Current Loss: 0.3320858432696416\n", + "trigger times: 0\n", + "[376/500, 0/38] loss: 0.31327924\n", + "The Current Loss: 0.33432170060964733\n", + "trigger Times: 1\n", + "[377/500, 0/38] loss: 0.31328329\n", + "The Current Loss: 0.3320839198736044\n", + "trigger times: 0\n", + "[378/500, 0/38] loss: 0.31327578\n", + "The Current Loss: 0.33210673240514904\n", + "trigger Times: 1\n", + "[379/500, 0/38] loss: 0.31329206\n", + "The Current Loss: 0.33215515659405637\n", + "trigger Times: 2\n", + "[380/500, 0/38] loss: 0.31330431\n", + "The Current Loss: 0.33215988828585696\n", + "trigger Times: 3\n", + "[381/500, 0/38] loss: 0.31327942\n", + "The Current Loss: 0.33432631538464475\n", + "trigger Times: 4\n", + "[382/500, 0/38] loss: 0.31328863\n", + "The Current Loss: 0.33216110330361587\n", + "trigger times: 0\n", + "[383/500, 0/38] loss: 0.31326634\n", + "The Current Loss: 0.3335089637682988\n", + "trigger Times: 1\n", + "[384/500, 0/38] loss: 0.31328192\n", + "The Current Loss: 0.3321338662734398\n", + "trigger times: 0\n", + "[385/500, 0/38] loss: 0.31327635\n", + "The Current Loss: 0.3326535362463731\n", + "trigger Times: 1\n", + "[386/500, 0/38] loss: 0.31328556\n", + "The Current Loss: 0.33208361038794887\n", + "trigger times: 0\n", + "[387/500, 0/38] loss: 0.31326917\n", + "The Current Loss: 0.3321020488555615\n", + "trigger Times: 1\n", + "[388/500, 0/38] loss: 0.31328055\n", + "The Current Loss: 0.3321044651361612\n", + "trigger Times: 2\n", + "[389/500, 0/38] loss: 0.31327367\n", + "The Current Loss: 0.3320847818484673\n", + "trigger times: 0\n", + "[390/500, 0/38] loss: 0.31329697\n", + "The Current Loss: 0.3325898051261902\n", + "trigger Times: 1\n", + "[391/500, 0/38] loss: 0.31328091\n", + "The Current Loss: 0.33283506906949556\n", + "trigger Times: 2\n", + "[392/500, 0/38] loss: 0.31329253\n", + "The Current Loss: 0.3320887799446399\n", + "trigger times: 0\n", + "[393/500, 0/38] loss: 0.31327739\n", + "The Current Loss: 0.33312076100936305\n", + "trigger Times: 1\n", + "[394/500, 0/38] loss: 0.31329548\n", + "The Current Loss: 0.33208380754177386\n", + "trigger times: 0\n", + "[395/500, 0/38] loss: 0.31329849\n", + "The Current Loss: 0.3321445263349093\n", + "trigger Times: 1\n", + "[396/500, 0/38] loss: 0.31327853\n", + "The Current Loss: 0.33209221638165987\n", + "trigger times: 0\n", + "[397/500, 0/38] loss: 0.31327969\n", + "The Current Loss: 0.3321210673222175\n", + "trigger Times: 1\n", + "[398/500, 0/38] loss: 0.31328294\n", + "The Current Loss: 0.3320940389082982\n", + "trigger times: 0\n", + "[399/500, 0/38] loss: 0.31327742\n", + "The Current Loss: 0.33253203676297116\n", + "trigger Times: 1\n", + "[400/500, 0/38] loss: 0.31328693\n", + "The Current Loss: 0.3321735675518329\n", + "trigger times: 0\n", + "[401/500, 0/38] loss: 0.31329581\n", + "The Current Loss: 0.33265166328503537\n", + "trigger Times: 1\n", + "[402/500, 0/38] loss: 0.31328717\n", + "The Current Loss: 0.33209550151458156\n", + "trigger times: 0\n", + "[403/500, 0/38] loss: 0.3132804\n", + "The Current Loss: 0.33208420414191026\n", + "trigger times: 0\n", + "[404/500, 0/38] loss: 0.31326279\n", + "The Current Loss: 0.333958428639632\n", + "trigger Times: 1\n", + "[405/500, 0/38] loss: 0.31329232\n", + "The Current Loss: 0.3342763919096727\n", + "trigger Times: 2\n", + "[406/500, 0/38] loss: 0.31328002\n", + "The Current Loss: 0.3352214441849635\n", + "trigger Times: 3\n", + "[407/500, 0/38] loss: 0.31326449\n", + "The Current Loss: 0.3320847314137679\n", + "trigger times: 0\n", + "[408/500, 0/38] loss: 0.31328318\n", + "The Current Loss: 0.3320917968566601\n", + "trigger Times: 1\n", + "[409/500, 0/38] loss: 0.31329846\n", + "The Current Loss: 0.33809882402420044\n", + "trigger Times: 2\n", + "[410/500, 0/38] loss: 0.31327307\n", + "The Current Loss: 0.33209729194641113\n", + "trigger times: 0\n", + "[411/500, 0/38] loss: 0.31329975\n", + "The Current Loss: 0.3337105168746068\n", + "trigger Times: 1\n", + "[412/500, 0/38] loss: 0.31327519\n", + "The Current Loss: 0.3342831570368547\n", + "trigger Times: 2\n", + "[413/500, 0/38] loss: 0.31328154\n", + "The Current Loss: 0.333507973414201\n", + "trigger times: 0\n", + "[414/500, 0/38] loss: 0.31327096\n", + "The Current Loss: 0.3321405282387367\n", + "trigger times: 0\n", + "[415/500, 0/38] loss: 0.31329638\n", + "The Current Loss: 0.33208980010106015\n", + "trigger times: 0\n", + "[416/500, 0/38] loss: 0.31331006\n", + "The Current Loss: 0.3320981997710008\n", + "trigger Times: 1\n", + "[417/500, 0/38] loss: 0.31328529\n", + "The Current Loss: 0.33210468750733596\n", + "trigger Times: 2\n", + "[418/500, 0/38] loss: 0.31329808\n", + "The Current Loss: 0.3320867648491493\n", + "trigger times: 0\n", + "[419/500, 0/38] loss: 0.31327987\n", + "The Current Loss: 0.33250775933265686\n", + "trigger Times: 1\n", + "[420/500, 0/38] loss: 0.31329039\n", + "The Current Loss: 0.3320846649316641\n", + "trigger times: 0\n", + "[421/500, 0/38] loss: 0.31330484\n", + "The Current Loss: 0.3320972048319303\n", + "trigger Times: 1\n", + "[422/500, 0/38] loss: 0.31329983\n", + "The Current Loss: 0.33252541376994205\n", + "trigger Times: 2\n", + "[423/500, 0/38] loss: 0.31327978\n", + "The Current Loss: 0.3344781811420734\n", + "trigger Times: 3\n", + "[424/500, 0/38] loss: 0.31329519\n", + "The Current Loss: 0.3329108655452728\n", + "trigger times: 0\n", + "[425/500, 0/38] loss: 0.31329441\n", + "The Current Loss: 0.3320942245996915\n", + "trigger times: 0\n", + "[426/500, 0/38] loss: 0.31328619\n", + "The Current Loss: 0.33431222576361436\n", + "trigger Times: 1\n", + "[427/500, 0/38] loss: 0.31328684\n", + "The Current Loss: 0.3320904534596663\n", + "trigger times: 0\n", + "[428/500, 0/38] loss: 0.31327882\n", + "The Current Loss: 0.3321331280928392\n", + "trigger Times: 1\n", + "[429/500, 0/38] loss: 0.31329805\n", + "The Current Loss: 0.3321616076506101\n", + "trigger Times: 2\n", + "[430/500, 0/38] loss: 0.31329015\n", + "The Current Loss: 0.3321144191118387\n", + "trigger times: 0\n", + "[431/500, 0/38] loss: 0.31328982\n", + "The Current Loss: 0.33218354215988743\n", + "trigger Times: 1\n", + "[432/500, 0/38] loss: 0.31327584\n", + "The Current Loss: 0.3326636873758756\n", + "trigger Times: 2\n", + "[433/500, 0/38] loss: 0.31329757\n", + "The Current Loss: 0.33211090702276963\n", + "trigger times: 0\n", + "[434/500, 0/38] loss: 0.31329167\n", + "The Current Loss: 0.33447908208920407\n", + "trigger Times: 1\n", + "[435/500, 0/38] loss: 0.31328297\n", + "The Current Loss: 0.3320939586712764\n", + "trigger times: 0\n", + "[436/500, 0/38] loss: 0.31329921\n", + "The Current Loss: 0.3328306789581592\n", + "trigger Times: 1\n", + "[437/500, 0/38] loss: 0.31327119\n", + "The Current Loss: 0.3326501754614023\n", + "trigger times: 0\n", + "[438/500, 0/38] loss: 0.3132726\n", + "The Current Loss: 0.3321173328619737\n", + "trigger times: 0\n", + "[439/500, 0/38] loss: 0.31328428\n", + "The Current Loss: 0.3320867121219635\n", + "trigger times: 0\n", + "[440/500, 0/38] loss: 0.31328043\n", + "The Current Loss: 0.3321031125692221\n", + "trigger Times: 1\n", + "[441/500, 0/38] loss: 0.31327543\n", + "The Current Loss: 0.33210567098397475\n", + "trigger Times: 2\n", + "[442/500, 0/38] loss: 0.31327114\n", + "The Current Loss: 0.33223199385863084\n", + "trigger Times: 3\n", + "[443/500, 0/38] loss: 0.31328449\n", + "The Current Loss: 0.3320910884783818\n", + "trigger times: 0\n", + "[444/500, 0/38] loss: 0.31328881\n", + "The Current Loss: 0.33370602360138524\n", + "trigger Times: 1\n", + "[445/500, 0/38] loss: 0.31327283\n", + "The Current Loss: 0.3355798835937793\n", + "trigger Times: 2\n", + "[446/500, 0/38] loss: 0.3132759\n", + "The Current Loss: 0.33208781022291917\n", + "trigger times: 0\n", + "[447/500, 0/38] loss: 0.31328517\n", + "The Current Loss: 0.3320886011307056\n", + "trigger Times: 1\n", + "[448/500, 0/38] loss: 0.31329772\n", + "The Current Loss: 0.33211458875582767\n", + "trigger Times: 2\n", + "[449/500, 0/38] loss: 0.3133094\n", + "The Current Loss: 0.3325452391917889\n", + "trigger Times: 3\n", + "[450/500, 0/38] loss: 0.31329432\n", + "The Current Loss: 0.3320956780360295\n", + "trigger times: 0\n", + "[451/500, 0/38] loss: 0.31327766\n", + "The Current Loss: 0.3321729554579808\n", + "trigger Times: 1\n", + "[452/500, 0/38] loss: 0.31327736\n", + "The Current Loss: 0.3320864484860347\n", + "trigger times: 0\n", + "[453/500, 0/38] loss: 0.31327486\n", + "The Current Loss: 0.33209554048684925\n", + "trigger Times: 1\n", + "[454/500, 0/38] loss: 0.31326959\n", + "The Current Loss: 0.33209014397401077\n", + "trigger times: 0\n", + "[455/500, 0/38] loss: 0.31328234\n", + "The Current Loss: 0.33209576515051037\n", + "trigger Times: 1\n", + "[456/500, 0/38] loss: 0.31329098\n", + "The Current Loss: 0.33221580661260164\n", + "trigger Times: 2\n", + "[457/500, 0/38] loss: 0.31328961\n", + "The Current Loss: 0.33352397267635053\n", + "trigger Times: 3\n", + "[458/500, 0/38] loss: 0.31329122\n", + "The Current Loss: 0.33209193440584034\n", + "trigger times: 0\n", + "[459/500, 0/38] loss: 0.31327894\n", + "The Current Loss: 0.3320887295099405\n", + "trigger times: 0\n", + "[460/500, 0/38] loss: 0.31327403\n", + "The Current Loss: 0.3326492584668673\n", + "trigger Times: 1\n", + "[461/500, 0/38] loss: 0.31330532\n", + "The Current Loss: 0.332943673317249\n", + "trigger Times: 2\n", + "[462/500, 0/38] loss: 0.31327558\n", + "The Current Loss: 0.33208959377728975\n", + "trigger times: 0\n", + "[463/500, 0/38] loss: 0.31328493\n", + "The Current Loss: 0.33210623264312744\n", + "trigger Times: 1\n", + "[464/500, 0/38] loss: 0.31328472\n", + "The Current Loss: 0.33283648124107945\n", + "trigger Times: 2\n", + "[465/500, 0/38] loss: 0.31332386\n", + "The Current Loss: 0.3366303558533008\n", + "trigger Times: 3\n", + "[466/500, 0/38] loss: 0.31328803\n", + "The Current Loss: 0.33208818619067854\n", + "trigger times: 0\n", + "[467/500, 0/38] loss: 0.3132709\n", + "The Current Loss: 0.332830724807886\n", + "trigger Times: 1\n", + "[468/500, 0/38] loss: 0.31327164\n", + "The Current Loss: 0.33209802783452547\n", + "trigger times: 0\n", + "[469/500, 0/38] loss: 0.31329086\n", + "The Current Loss: 0.3341298217956836\n", + "trigger Times: 1\n", + "[470/500, 0/38] loss: 0.31327748\n", + "The Current Loss: 0.3320913062645839\n", + "trigger times: 0\n", + "[471/500, 0/38] loss: 0.31327912\n", + "The Current Loss: 0.3331207518394177\n", + "trigger Times: 1\n", + "[472/500, 0/38] loss: 0.31328347\n", + "The Current Loss: 0.3320863751264719\n", + "trigger times: 0\n", + "[473/500, 0/38] loss: 0.31327507\n", + "The Current Loss: 0.33208662271499634\n", + "trigger Times: 1\n", + "[474/500, 0/38] loss: 0.31328133\n", + "The Current Loss: 0.3321059071100675\n", + "trigger Times: 2\n", + "[475/500, 0/38] loss: 0.31328472\n", + "The Current Loss: 0.3342903714913588\n", + "trigger Times: 3\n", + "[476/500, 0/38] loss: 0.31329483\n", + "The Current Loss: 0.33436327484937817\n", + "trigger Times: 4\n", + "[477/500, 0/38] loss: 0.31328884\n", + "The Current Loss: 0.33428633442291844\n", + "trigger times: 0\n", + "[478/500, 0/38] loss: 0.31328282\n", + "The Current Loss: 0.3320888510117164\n", + "trigger times: 0\n", + "[479/500, 0/38] loss: 0.31327477\n", + "The Current Loss: 0.3320894195483281\n", + "trigger Times: 1\n", + "[480/500, 0/38] loss: 0.3132703\n", + "The Current Loss: 0.33209399993603045\n", + "trigger Times: 2\n", + "[481/500, 0/38] loss: 0.31330225\n", + "The Current Loss: 0.33209115954545826\n", + "trigger times: 0\n", + "[482/500, 0/38] loss: 0.3132734\n", + "The Current Loss: 0.3320988164498256\n", + "trigger Times: 1\n", + "[483/500, 0/38] loss: 0.31328231\n", + "The Current Loss: 0.3344799761588757\n", + "trigger Times: 2\n", + "[484/500, 0/38] loss: 0.31326988\n", + "The Current Loss: 0.3371816392128284\n", + "trigger Times: 3\n", + "[485/500, 0/38] loss: 0.3133007\n", + "The Current Loss: 0.3320891329875359\n", + "trigger times: 0\n", + "[486/500, 0/38] loss: 0.31328019\n", + "The Current Loss: 0.33406174412140477\n", + "trigger Times: 1\n", + "[487/500, 0/38] loss: 0.31328058\n", + "The Current Loss: 0.332096306177286\n", + "trigger times: 0\n", + "[488/500, 0/38] loss: 0.31328148\n", + "The Current Loss: 0.3321588131097647\n", + "trigger Times: 1\n", + "[489/500, 0/38] loss: 0.31329188\n", + "The Current Loss: 0.336760761646124\n", + "trigger Times: 2\n", + "[490/500, 0/38] loss: 0.31330523\n", + "The Current Loss: 0.33211910266142625\n", + "trigger times: 0\n", + "[491/500, 0/38] loss: 0.3132903\n", + "The Current Loss: 0.3337050240773421\n", + "trigger Times: 1\n", + "[492/500, 0/38] loss: 0.31329158\n", + "The Current Loss: 0.33271218492434573\n", + "trigger times: 0\n", + "[493/500, 0/38] loss: 0.31329522\n", + "The Current Loss: 0.33265785529063296\n", + "trigger times: 0\n", + "[494/500, 0/38] loss: 0.31329423\n", + "The Current Loss: 0.3326175808906555\n", + "trigger times: 0\n", + "[495/500, 0/38] loss: 0.31327051\n", + "The Current Loss: 0.33283034425515395\n", + "trigger Times: 1\n", + "[496/500, 0/38] loss: 0.31328884\n", + "The Current Loss: 0.3321351936230293\n", + "trigger times: 0\n", + "[497/500, 0/38] loss: 0.31329125\n", + "The Current Loss: 0.3349219216750218\n", + "trigger Times: 1\n", + "[498/500, 0/38] loss: 0.31327757\n", + "The Current Loss: 0.3321089927966778\n", + "trigger times: 0\n", + "[499/500, 0/38] loss: 0.3133041\n", + "The Current Loss: 0.3342926295904013\n", + "trigger Times: 1\n", + "[500/500, 0/38] loss: 0.31326774\n", + "The Current Loss: 0.33209305772414577\n", + "trigger times: 0\n" + ] + } + ], + "source": [ + "from src.train import traindata\n", + "hyperparameters = config['hyperparameters']\n", + "model = traindata(hyperparameters, device, config)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As we can see, we didn't need to read any data or calculate the descriptors. This is because the training function already did all of those steps using the `prepare_data` function mentioned in the introduction of this chapter's important note. However, we will need to do it again now to obtain the test set to see if the model is working properly. This is inconvenient because we are reading and splitting the data twice, but this is required because later we will use 'batch_size' (which is used to read the data) as a varying hyperparameter. Because we can only vary the hyperparameters inside the train function, we have to read the data in that function." + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "from src.prepare_data import prepare_data\n", + "mode = config['combination']['mode']\n", + "data_dir = config['combination']['data_dir']\n", + "kmer_one_hot = config['fixed_vals']['kmer_one_hot']\n", + "model_label = config['combination']['model_label'] \n", + "batch_size = config['hyperparameters']['batch_size']\n", + "\n", + "_, testloader, _, _, _ = prepare_data(\n", + " data_dir=data_dir,\n", + " mode=mode,\n", + " batch_size=batch_size,\n", + " k=kmer_one_hot\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now let's see how well the model performs on the test set. The metrics chosen are the accuracy, the Matthews correlation coefficient, and the confusion matrix." + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Results in test set:\n", + "--------------------\n", + "- model: cnn\n", + "- mode: one_hot\n", + "- dataset: primer\n", + "--------------------\n", + "Accuracy: 0.990\n", + "MCC: 0.980\n", + "[[200 3]\n", + " [ 1 196]]\n" + ] + } + ], + "source": [ + "from src.test import test\n", + "\n", + "acc, mcc, report = test(device, model, testloader)\n", + "print(\"Results in test set:\")\n", + "print(\"--------------------\")\n", + "print(\"- model: \", model_label)\n", + "print(\"- mode: \", mode)\n", + "print(\"- dataset:\", data_dir.split(\"/\")[-1])\n", + "print(\"--------------------\")\n", + "print('Accuracy: %.3f' % acc)\n", + "print('MCC: %.3f' % mcc)\n", + "print(report)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 6. Hyperparameter tuning" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As mentioned before, there was developed a method to find the best hyperparameters. This method is called *hyperparameter tuning*. It is a process of tuning the hyperparameters of a model to obtain the best performance. A function called `hyperparameter_tuning` was implemented that performs this process. It takes as input the config object (which must have the hyperparameters search space) and the device on which the model will be trained. It will create a scheduler called `ASHAScheduler` that will be used terminate the training if the model does not improve for a certain number of epochs. There will be created also a `CLIReporter` object that will report the metrics on the console (accuracy, Matthews correlation coefficient, and loss). Then, `num_samples` samples will be drawn from the hyperparameter search space and the model will be trained on each of them. The best model will be the one that has the highest Matthews correlation coefficient and will be then tested on the test set, outputting the metrics." + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2022-08-17 15:59:25,306\tERROR syncer.py:147 -- Log sync requires rsync to be installed.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "== Status ==\n", + "Current time: 2022-08-17 15:59:25 (running for 00:00:00.21)\n", + "Memory usage on this node: 88.7/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=0\n", + "Bracket: Iter 256.000: None | Iter 128.000: None | Iter 64.000: None | Iter 32.000: None | Iter 16.000: None | Iter 8.000: None | Iter 4.000: None | Iter 2.000: None | Iter 1.000: None\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (14 PENDING, 1 RUNNING)\n", + "+-----------------------+----------+------------------------+--------------+-----------+---------------+-------------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr |\n", + "|-----------------------+----------+------------------------+--------------+-----------+---------------+-------------|\n", + "| traindata_2f206_00000 | RUNNING | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 |\n", + "| traindata_2f206_00001 | PENDING | | 16 | 0.376144 | 32 | 2.13793e-05 |\n", + "| traindata_2f206_00002 | PENDING | | 8 | 0.331023 | 128 | 0.00030504 |\n", + "| traindata_2f206_00003 | PENDING | | 8 | 0.438603 | 128 | 0.000643721 |\n", + "| traindata_2f206_00004 | PENDING | | 16 | 0.499694 | 256 | 4.533e-05 |\n", + "| traindata_2f206_00005 | PENDING | | 8 | 0.332402 | 32 | 0.00010607 |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 |\n", + "+-----------------------+----------+------------------------+--------------+-----------+---------------+-------------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [1/500, 0/38] loss: 0.68471241\n", + "Result for traindata_2f206_00000:\n", + " accuracy: 0.745\n", + " date: 2022-08-17_15-59-30\n", + " done: false\n", + " experiment_id: eb87ed09742440a1af4158ebc534f380\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.6337895806019123\n", + " mcc: 0.5626864118225471\n", + " node_ip: 192.168.85.249\n", + " pid: 3931827\n", + " should_checkpoint: true\n", + " time_since_restore: 0.5505173206329346\n", + " time_this_iter_s: 0.5505173206329346\n", + " time_total_s: 0.5505173206329346\n", + " timestamp: 1660748370\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00000\n", + " warmup_time: 0.003675699234008789\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.6337895806019123\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [2/500, 0/38] loss: 0.61727053\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.5529582683856671\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "== Status ==\n", + "Current time: 2022-08-17 15:59:30 (running for 00:00:05.24)\n", + "Memory usage on this node: 90.6/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=0\n", + "Bracket: Iter 256.000: None | Iter 128.000: None | Iter 64.000: None | Iter 32.000: None | Iter 16.000: None | Iter 8.000: None | Iter 4.000: None | Iter 2.000: -0.5529582683856671 | Iter 1.000: -0.6337895806019123\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (13 PENDING, 2 RUNNING)\n", + "+-----------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00000 | RUNNING | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.552958 | 0.845 | 2 | 0.706333 |\n", + "| traindata_2f206_00001 | RUNNING | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | | | | |\n", + "| traindata_2f206_00002 | PENDING | | 8 | 0.331023 | 128 | 0.00030504 | | | | |\n", + "| traindata_2f206_00003 | PENDING | | 8 | 0.438603 | 128 | 0.000643721 | | | | |\n", + "| traindata_2f206_00004 | PENDING | | 16 | 0.499694 | 256 | 4.533e-05 | | | | |\n", + "| traindata_2f206_00005 | PENDING | | 8 | 0.332402 | 32 | 0.00010607 | | | | |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "+-----------------------+----------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [3/500, 0/38] loss: 0.53485584\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.4841422415696658\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [4/500, 0/38] loss: 0.47030929\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.4332763438041394\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [5/500, 0/38] loss: 0.45691448\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.4068997800350189\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [6/500, 0/38] loss: 0.40804246\n", + "\u001b[2m\u001b[36m(func pid=3931883)\u001b[0m [1/500, 0/75] loss: 0.6869092\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3930397858986488\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [7/500, 0/38] loss: 0.38453594\n", + "Result for traindata_2f206_00001:\n", + " accuracy: 0.615\n", + " date: 2022-08-17_15-59-34\n", + " done: true\n", + " experiment_id: 5012f088373848cfbfd3d77c12988299\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.6894357085227967\n", + " mcc: 0.2302575669827911\n", + " node_ip: 192.168.85.249\n", + " pid: 3931883\n", + " should_checkpoint: true\n", + " time_since_restore: 0.5592665672302246\n", + " time_this_iter_s: 0.5592665672302246\n", + " time_total_s: 0.5592665672302246\n", + " timestamp: 1660748374\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00001\n", + " warmup_time: 0.0037910938262939453\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931883)\u001b[0m The Current Loss: 0.6894357085227967\n", + "\u001b[2m\u001b[36m(func pid=3931883)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.37516661102955157\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [8/500, 0/38] loss: 0.34624705\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.36338866215485793\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [9/500, 0/38] loss: 0.35605714\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3542456879065587\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [10/500, 0/38] loss: 0.33700174\n", + "Result for traindata_2f206_00000:\n", + " accuracy: 0.98\n", + " date: 2022-08-17_15-59-35\n", + " done: false\n", + " experiment_id: eb87ed09742440a1af4158ebc534f380\n", + " hostname: tesla\n", + " iterations_since_restore: 10\n", + " loss: 0.34957010929401106\n", + " mcc: 0.9600450104271285\n", + " node_ip: 192.168.85.249\n", + " pid: 3931827\n", + " should_checkpoint: true\n", + " time_since_restore: 5.596264839172363\n", + " time_this_iter_s: 0.10375142097473145\n", + " time_total_s: 5.596264839172363\n", + " timestamp: 1660748375\n", + " timesteps_since_restore: 0\n", + " training_iteration: 10\n", + " trial_id: 2f206_00000\n", + " warmup_time: 0.003675699234008789\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.34957010929401106\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [11/500, 0/38] loss: 0.36956471\n", + "== Status ==\n", + "Current time: 2022-08-17 15:59:35 (running for 00:00:10.29)\n", + "Memory usage on this node: 90.5/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=1\n", + "Bracket: Iter 256.000: None | Iter 128.000: None | Iter 64.000: None | Iter 32.000: None | Iter 16.000: None | Iter 8.000: -0.36338866215485793 | Iter 4.000: -0.4332763438041394 | Iter 2.000: -0.5529582683856671 | Iter 1.000: -0.6616126445623545\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (13 PENDING, 1 RUNNING, 1 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00000 | RUNNING | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.343653 | 0.9775 | 11 | 0.954998 |\n", + "| traindata_2f206_00002 | PENDING | | 8 | 0.331023 | 128 | 0.00030504 | | | | |\n", + "| traindata_2f206_00003 | PENDING | | 8 | 0.438603 | 128 | 0.000643721 | | | | |\n", + "| traindata_2f206_00004 | PENDING | | 16 | 0.499694 | 256 | 4.533e-05 | | | | |\n", + "| traindata_2f206_00005 | PENDING | | 8 | 0.332402 | 32 | 0.00010607 | | | | |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.34365270458734953\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [12/500, 0/38] loss: 0.34624851\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.35019751007740313\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [13/500, 0/38] loss: 0.32940936\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3396972371981694\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [14/500, 0/38] loss: 0.32418659\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.34356719943193287\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [15/500, 0/38] loss: 0.35129735\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.334697262598918\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [16/500, 0/38] loss: 0.32027173\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3352227004674765\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [17/500, 0/38] loss: 0.31902099\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3309404712456923\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [18/500, 0/38] loss: 0.32053539\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3354311126929063\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [19/500, 0/38] loss: 0.34166914\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.33056459060082066\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [20/500, 0/38] loss: 0.31996906\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32907588665301984\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [21/500, 0/38] loss: 0.31651026\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32884907722473145\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [22/500, 0/38] loss: 0.3262758\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.33040321561006397\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [23/500, 0/38] loss: 0.31505269\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32653780625416684\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [24/500, 0/38] loss: 0.3141472\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3288710369513585\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [25/500, 0/38] loss: 0.31920126\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3282004136305589\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [26/500, 0/38] loss: 0.3148157\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32788946766119736\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [27/500, 0/38] loss: 0.31428939\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32619596215394825\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [28/500, 0/38] loss: 0.31512189\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3248769136575552\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [29/500, 0/38] loss: 0.31588718\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3254501177714421\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [30/500, 0/38] loss: 0.3149043\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3256023617891165\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [31/500, 0/38] loss: 0.31447464\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3241488245817331\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [32/500, 0/38] loss: 0.31441537\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3240627921544589\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [33/500, 0/38] loss: 0.31363207\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32405426181279695\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [34/500, 0/38] loss: 0.31376481\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32439388449375445\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [35/500, 0/38] loss: 0.31444538\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32489726405877334\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [36/500, 0/38] loss: 0.31369683\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32446321845054626\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [37/500, 0/38] loss: 0.31425807\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32361015448203456\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [38/500, 0/38] loss: 0.31436536\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32299253115287196\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [39/500, 0/38] loss: 0.31351131\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32355062319682193\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [40/500, 0/38] loss: 0.31397447\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3239628466276022\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [41/500, 0/38] loss: 0.31367519\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32274599029467654\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [42/500, 0/38] loss: 0.31348664\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.322858798962373\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [43/500, 0/38] loss: 0.31378105\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3245854698694669\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [44/500, 0/38] loss: 0.31334129\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.323161251269854\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [45/500, 0/38] loss: 0.31362095\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.322528580060372\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [46/500, 0/38] loss: 0.3134298\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.323676728285276\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [47/500, 0/38] loss: 0.31376594\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3231870440336374\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [48/500, 0/38] loss: 0.31368157\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32418073828403765\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [49/500, 0/38] loss: 0.31369159\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3232341637978187\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [50/500, 0/38] loss: 0.31360012\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3223508344246791\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [51/500, 0/38] loss: 0.31390741\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32430509420541614\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [52/500, 0/38] loss: 0.3135877\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32226383227568406\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [53/500, 0/38] loss: 0.31342432\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32259276050787705\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [54/500, 0/38] loss: 0.31380558\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3223297962775597\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [55/500, 0/38] loss: 0.31353307\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3248140078324538\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [56/500, 0/38] loss: 0.31334278\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32243671784034145\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [57/500, 0/38] loss: 0.31359807\n", + "Result for traindata_2f206_00000:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_15-59-40\n", + " done: false\n", + " experiment_id: eb87ed09742440a1af4158ebc534f380\n", + " hostname: tesla\n", + " iterations_since_restore: 57\n", + " loss: 0.32191605751331037\n", + " mcc: 0.9851108312031162\n", + " node_ip: 192.168.85.249\n", + " pid: 3931827\n", + " should_checkpoint: true\n", + " time_since_restore: 10.663471221923828\n", + " time_this_iter_s: 0.10296845436096191\n", + " time_total_s: 10.663471221923828\n", + " timestamp: 1660748380\n", + " timesteps_since_restore: 0\n", + " training_iteration: 57\n", + " trial_id: 2f206_00000\n", + " warmup_time: 0.003675699234008789\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32191605751331037\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [58/500, 0/38] loss: 0.31366253\n", + "== Status ==\n", + "Current time: 2022-08-17 15:59:40 (running for 00:00:15.35)\n", + "Memory usage on this node: 92.3/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=1\n", + "Bracket: Iter 256.000: None | Iter 128.000: None | Iter 64.000: None | Iter 32.000: -0.3240627921544589 | Iter 16.000: -0.3352227004674765 | Iter 8.000: -0.36338866215485793 | Iter 4.000: -0.4332763438041394 | Iter 2.000: -0.5529582683856671 | Iter 1.000: -0.6616126445623545\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (12 PENDING, 2 RUNNING, 1 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00000 | RUNNING | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.321838 | 0.9925 | 58 | 0.985111 |\n", + "| traindata_2f206_00002 | RUNNING | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | | | | |\n", + "| traindata_2f206_00003 | PENDING | | 8 | 0.438603 | 128 | 0.000643721 | | | | |\n", + "| traindata_2f206_00004 | PENDING | | 16 | 0.499694 | 256 | 4.533e-05 | | | | |\n", + "| traindata_2f206_00005 | PENDING | | 8 | 0.332402 | 32 | 0.00010607 | | | | |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32183835827387297\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [59/500, 0/38] loss: 0.31349683\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32190031042465794\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [60/500, 0/38] loss: 0.31354427\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [1/500, 0/150] loss: 0.69674748\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32184273921526396\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [61/500, 0/38] loss: 0.31364542\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3215730281976553\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [62/500, 0/38] loss: 0.31359908\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [1/500, 100/150] loss: 0.61192364\n", + "Result for traindata_2f206_00002:\n", + " accuracy: 0.845\n", + " date: 2022-08-17_15-59-40\n", + " done: false\n", + " experiment_id: c1afbab8bcdc409197a5bdfbe1cbd27b\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.5398776644468307\n", + " mcc: 0.6982270804501465\n", + " node_ip: 192.168.85.249\n", + " pid: 3931986\n", + " should_checkpoint: true\n", + " time_since_restore: 0.6986396312713623\n", + " time_this_iter_s: 0.6986396312713623\n", + " time_total_s: 0.6986396312713623\n", + " timestamp: 1660748380\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00002\n", + " warmup_time: 0.003938913345336914\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32278446509287906\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [63/500, 0/38] loss: 0.31342047\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.5398776644468307\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [2/500, 0/150] loss: 0.54552925\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32182275561185986\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [64/500, 0/38] loss: 0.31336609\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3216813619320209\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [65/500, 0/38] loss: 0.31340441\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [2/500, 100/150] loss: 0.40016565\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32175463208785426\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [66/500, 0/38] loss: 0.31346753\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.42899625301361083\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [3/500, 0/150] loss: 0.4783369\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32192468643188477\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [67/500, 0/38] loss: 0.31349722\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3217080648128803\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [68/500, 0/38] loss: 0.31344309\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [3/500, 100/150] loss: 0.38185823\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32143961695524365\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [69/500, 0/38] loss: 0.31341946\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.3897830992937088\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [4/500, 0/150] loss: 0.40531948\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.322191052711927\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [70/500, 0/38] loss: 0.31340149\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3224426553799556\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [71/500, 0/38] loss: 0.31336379\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [4/500, 100/150] loss: 0.31570104\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3219495484462151\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [72/500, 0/38] loss: 0.31350765\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.3658917301893234\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3214634633981265\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [73/500, 0/38] loss: 0.31335041\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [5/500, 0/150] loss: 0.35228217\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.321223829801266\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [74/500, 0/38] loss: 0.31345528\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [5/500, 100/150] loss: 0.32102674\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.322317694242184\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [75/500, 0/38] loss: 0.313301\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3215387165546417\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [76/500, 0/38] loss: 0.31337127\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32132713152812076\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.3555767124891281\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [6/500, 0/150] loss: 0.32612544\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [77/500, 0/38] loss: 0.31336588\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32157655633412874\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [6/500, 100/150] loss: 0.31639981\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [78/500, 0/38] loss: 0.31344497\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32122697738500744\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [79/500, 0/38] loss: 0.31335622\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3215579459300408\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.3493291789293289\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [7/500, 0/150] loss: 0.31661457\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [80/500, 0/38] loss: 0.31341502\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32136396261361927\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [81/500, 0/38] loss: 0.31338811\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3213486327574803\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [82/500, 0/38] loss: 0.31335264\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [7/500, 100/150] loss: 0.3212603\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3216039767632118\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [83/500, 0/38] loss: 0.31329983\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.3457822948694229\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [8/500, 0/150] loss: 0.31428185\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32172317229784453\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [84/500, 0/38] loss: 0.31335044\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3211519236748035\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [85/500, 0/38] loss: 0.31334615\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [8/500, 100/150] loss: 0.31620529\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32127029162186843\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [86/500, 0/38] loss: 0.31333989\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.34589595258235933\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [9/500, 0/150] loss: 0.32192099\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3213575344819289\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [87/500, 0/38] loss: 0.3133148\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32116830119719875\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [88/500, 0/38] loss: 0.31331602\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [9/500, 100/150] loss: 0.31905121\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32145764506780183\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [89/500, 0/38] loss: 0.3133162\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.3401418077945709\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [10/500, 0/150] loss: 0.31344643\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32145284230892474\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [90/500, 0/38] loss: 0.31330991\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3250531554222107\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [91/500, 0/38] loss: 0.31333381\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [10/500, 100/150] loss: 0.31860876\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32147852732585025\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [92/500, 0/38] loss: 0.3133671\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3213931711820456\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [93/500, 0/38] loss: 0.31329536\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.3401214039325714\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [11/500, 0/150] loss: 0.31429848\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32103475011312044\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [94/500, 0/38] loss: 0.3133156\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [11/500, 100/150] loss: 0.31459427\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3209717640509972\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [95/500, 0/38] loss: 0.31333199\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3232571092935709\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [96/500, 0/38] loss: 0.31329593\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.3379452168941498\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [12/500, 0/150] loss: 0.31460968\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3228451449137468\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [97/500, 0/38] loss: 0.31333148\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [12/500, 100/150] loss: 0.31421658\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3209574933235462\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [98/500, 0/38] loss: 0.31331947\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207971453666687\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [99/500, 0/38] loss: 0.31328788\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.3373181855678558\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [13/500, 0/150] loss: 0.31372902\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3208716076153975\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [100/500, 0/38] loss: 0.31331354\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32077049062802243\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [101/500, 0/38] loss: 0.31335488\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [13/500, 100/150] loss: 0.31451535\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32110464572906494\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [102/500, 0/38] loss: 0.31333521\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.3369577604532242\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [14/500, 0/150] loss: 0.31349432\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3210168366248791\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [103/500, 0/38] loss: 0.31330812\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32107059084452116\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [14/500, 100/150] loss: 0.31378967\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [104/500, 0/38] loss: 0.31327564\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.33616979539394376\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [15/500, 0/150] loss: 0.31337619\n", + "Result for traindata_2f206_00000:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_15-59-45\n", + " done: false\n", + " experiment_id: eb87ed09742440a1af4158ebc534f380\n", + " hostname: tesla\n", + " iterations_since_restore: 104\n", + " loss: 0.3213327618745657\n", + " mcc: 0.9851108312031162\n", + " node_ip: 192.168.85.249\n", + " pid: 3931827\n", + " should_checkpoint: true\n", + " time_since_restore: 15.719033241271973\n", + " time_this_iter_s: 0.10544371604919434\n", + " time_total_s: 15.719033241271973\n", + " timestamp: 1660748385\n", + " timesteps_since_restore: 0\n", + " training_iteration: 104\n", + " trial_id: 2f206_00000\n", + " warmup_time: 0.003675699234008789\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3213327618745657\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [105/500, 0/38] loss: 0.31327024\n", + "== Status ==\n", + "Current time: 2022-08-17 15:59:45 (running for 00:00:20.42)\n", + "Memory usage on this node: 92.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=1\n", + "Bracket: Iter 256.000: None | Iter 128.000: None | Iter 64.000: -0.3216813619320209 | Iter 32.000: -0.3240627921544589 | Iter 16.000: -0.3352227004674765 | Iter 8.000: -0.35464230736860863 | Iter 4.000: -0.3995840369967314 | Iter 2.000: -0.490977260699639 | Iter 1.000: -0.6337895806019123\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (12 PENDING, 2 RUNNING, 1 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00000 | RUNNING | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.322351 | 0.9925 | 105 | 0.985111 |\n", + "| traindata_2f206_00002 | RUNNING | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.33617 | 0.9775 | 14 | 0.955005 |\n", + "| traindata_2f206_00003 | PENDING | | 8 | 0.438603 | 128 | 0.000643721 | | | | |\n", + "| traindata_2f206_00004 | PENDING | | 16 | 0.499694 | 256 | 4.533e-05 | | | | |\n", + "| traindata_2f206_00005 | PENDING | | 8 | 0.332402 | 32 | 0.00010607 | | | | |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.322350969681373\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [106/500, 0/38] loss: 0.31330881\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [15/500, 100/150] loss: 0.31381145\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3210802972316742\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [107/500, 0/38] loss: 0.31328917\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.3363254714012146\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [16/500, 0/150] loss: 0.31372795\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32218393912682164\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [108/500, 0/38] loss: 0.31329331\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m [16/500, 100/150] loss: 0.31385511\n", + "Result for traindata_2f206_00002:\n", + " accuracy: 0.9725\n", + " date: 2022-08-17_15-59-45\n", + " done: true\n", + " experiment_id: c1afbab8bcdc409197a5bdfbe1cbd27b\n", + " hostname: tesla\n", + " iterations_since_restore: 16\n", + " loss: 0.33708340585231783\n", + " mcc: 0.9453036136838198\n", + " node_ip: 192.168.85.249\n", + " pid: 3931986\n", + " should_checkpoint: true\n", + " time_since_restore: 5.705564737319946\n", + " time_this_iter_s: 0.32146191596984863\n", + " time_total_s: 5.705564737319946\n", + " timestamp: 1660748385\n", + " timesteps_since_restore: 0\n", + " training_iteration: 16\n", + " trial_id: 2f206_00002\n", + " warmup_time: 0.003938913345336914\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32068954981290376\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [109/500, 0/38] loss: 0.31330124\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m The Current Loss: 0.33708340585231783\n", + "\u001b[2m\u001b[36m(func pid=3931986)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32093139107410723\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [110/500, 0/38] loss: 0.31330198\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32077996547405535\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [111/500, 0/38] loss: 0.3133193\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3232162594795227\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [112/500, 0/38] loss: 0.31331071\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207982824398921\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [113/500, 0/38] loss: 0.31330392\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206609327059526\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [114/500, 0/38] loss: 0.31330445\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32144373655319214\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [115/500, 0/38] loss: 0.31330577\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32080435294371384\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [116/500, 0/38] loss: 0.31327108\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32113567223915684\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [117/500, 0/38] loss: 0.31328455\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32282856794504017\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [118/500, 0/38] loss: 0.31330138\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3208004029897543\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [119/500, 0/38] loss: 0.31329858\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32056394677895766\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [120/500, 0/38] loss: 0.31331137\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32094139089951146\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [121/500, 0/38] loss: 0.31328225\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3211564811376425\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [122/500, 0/38] loss: 0.31327501\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205337616113516\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [123/500, 0/38] loss: 0.31328827\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320929779456212\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [124/500, 0/38] loss: 0.31329119\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3208141051805936\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [125/500, 0/38] loss: 0.31328213\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32035335439902085\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [126/500, 0/38] loss: 0.31328499\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3208434283733368\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [127/500, 0/38] loss: 0.31331643\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3204712202915779\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [128/500, 0/38] loss: 0.31328472\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3208131813085996\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [129/500, 0/38] loss: 0.31330442\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32244155957148624\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [130/500, 0/38] loss: 0.31328735\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32057108787389904\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [131/500, 0/38] loss: 0.31327078\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32039002501047575\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [132/500, 0/38] loss: 0.31327069\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205965826144585\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [133/500, 0/38] loss: 0.31328487\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32237353233190685\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [134/500, 0/38] loss: 0.31327483\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205718627342811\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [135/500, 0/38] loss: 0.31328058\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32086783189039964\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [136/500, 0/38] loss: 0.31326813\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3204577909066127\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [137/500, 0/38] loss: 0.31329235\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3204393134667323\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [138/500, 0/38] loss: 0.31328598\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.321167909182035\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [139/500, 0/38] loss: 0.31327584\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205019258535825\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [140/500, 0/38] loss: 0.31329179\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3204696338910323\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [141/500, 0/38] loss: 0.31326619\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205031890135545\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [142/500, 0/38] loss: 0.31327933\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32054672791407657\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [143/500, 0/38] loss: 0.31329256\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32050195794839126\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [144/500, 0/38] loss: 0.3132872\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32282532636935896\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [145/500, 0/38] loss: 0.31327382\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205051376269414\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [146/500, 0/38] loss: 0.31327203\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205307859640855\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [147/500, 0/38] loss: 0.31327847\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205419389101175\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [148/500, 0/38] loss: 0.3132765\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320535646035121\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [149/500, 0/38] loss: 0.3132847\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205342499109415\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [150/500, 0/38] loss: 0.31328207\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206842564619504\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [151/500, 0/38] loss: 0.31328771\n", + "Result for traindata_2f206_00000:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_15-59-50\n", + " done: false\n", + " experiment_id: eb87ed09742440a1af4158ebc534f380\n", + " hostname: tesla\n", + " iterations_since_restore: 151\n", + " loss: 0.3210909893879524\n", + " mcc: 0.9851108312031162\n", + " node_ip: 192.168.85.249\n", + " pid: 3931827\n", + " should_checkpoint: true\n", + " time_since_restore: 20.80236554145813\n", + " time_this_iter_s: 0.10587286949157715\n", + " time_total_s: 20.80236554145813\n", + " timestamp: 1660748390\n", + " timesteps_since_restore: 0\n", + " training_iteration: 151\n", + " trial_id: 2f206_00000\n", + " warmup_time: 0.003675699234008789\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3210909893879524\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [152/500, 0/38] loss: 0.31327736\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206224533227774\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "== Status ==\n", + "Current time: 2022-08-17 15:59:50 (running for 00:00:25.49)\n", + "Memory usage on this node: 91.9/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 256.000: None | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3216813619320209 | Iter 32.000: -0.3240627921544589 | Iter 16.000: -0.3361530531598972 | Iter 8.000: -0.35464230736860863 | Iter 4.000: -0.3995840369967314 | Iter 2.000: -0.490977260699639 | Iter 1.000: -0.6337895806019123\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (11 PENDING, 2 RUNNING, 2 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00000 | RUNNING | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320622 | 0.9925 | 152 | 0.985111 |\n", + "| traindata_2f206_00003 | RUNNING | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | | | | |\n", + "| traindata_2f206_00004 | PENDING | | 16 | 0.499694 | 256 | 4.533e-05 | | | | |\n", + "| traindata_2f206_00005 | PENDING | | 8 | 0.332402 | 32 | 0.00010607 | | | | |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [153/500, 0/38] loss: 0.31326872\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32054242262473476\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [154/500, 0/38] loss: 0.3132759\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205370261118962\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [155/500, 0/38] loss: 0.31328773\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32054035250957197\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [156/500, 0/38] loss: 0.31330228\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [1/500, 0/150] loss: 0.68959266\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205394928271954\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [157/500, 0/38] loss: 0.31328902\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32088508285008943\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [158/500, 0/38] loss: 0.3132886\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [1/500, 100/150] loss: 0.70385969\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205302586922279\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "Result for traindata_2f206_00003:\n", + " accuracy: 0.93\n", + " date: 2022-08-17_15-59-51\n", + " done: false\n", + " experiment_id: e06db0d8c6434640a8c6e8e79ec600b5\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.4154818135499954\n", + " mcc: 0.8600325083642456\n", + " node_ip: 192.168.85.249\n", + " pid: 3932223\n", + " should_checkpoint: true\n", + " time_since_restore: 0.7293224334716797\n", + " time_this_iter_s: 0.7293224334716797\n", + " time_total_s: 0.7293224334716797\n", + " timestamp: 1660748391\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00003\n", + " warmup_time: 0.0038690567016601562\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [159/500, 0/38] loss: 0.31327236\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205290161646329\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.4154818135499954\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [2/500, 0/150] loss: 0.40602371\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [160/500, 0/38] loss: 0.31327248\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32054317456025344\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [161/500, 0/38] loss: 0.31327081\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206066351670485\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [2/500, 100/150] loss: 0.40076062\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [162/500, 0/38] loss: 0.31329232\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3223023804334494\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [163/500, 0/38] loss: 0.31328756\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053305552555966\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.354632078409195\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [3/500, 0/150] loss: 0.35020319\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [164/500, 0/38] loss: 0.31328529\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32071579648898196\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [3/500, 100/150] loss: 0.33283272\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [165/500, 0/38] loss: 0.31326842\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205291491288405\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [166/500, 0/38] loss: 0.31328779\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3576673400402069\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [4/500, 0/150] loss: 0.31854677\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32104046986653256\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [167/500, 0/38] loss: 0.31328809\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320680891092007\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [168/500, 0/38] loss: 0.31330255\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32060643342825085\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [4/500, 100/150] loss: 0.31796229\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [169/500, 0/38] loss: 0.3132692\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205286103945512\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3557737797498703\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [5/500, 0/150] loss: 0.31513011\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [170/500, 0/38] loss: 0.31328735\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206255000371199\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [171/500, 0/38] loss: 0.31328681\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205340321247394\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [5/500, 100/150] loss: 0.31493998\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [172/500, 0/38] loss: 0.31328347\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205333145765158\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.33005059123039243\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [6/500, 0/150] loss: 0.31362203\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [173/500, 0/38] loss: 0.31326985\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3224816643274747\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [174/500, 0/38] loss: 0.31327888\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32054145061052763\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [6/500, 100/150] loss: 0.31569719\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [175/500, 0/38] loss: 0.31327993\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053001339618975\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3285260093212128\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [7/500, 0/150] loss: 0.3139739\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [176/500, 0/38] loss: 0.31327349\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205287433587588\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [177/500, 0/38] loss: 0.31328019\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32057867371118987\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [7/500, 100/150] loss: 0.31526044\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [178/500, 0/38] loss: 0.31328034\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3210830115354978\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32673370480537417\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [8/500, 0/150] loss: 0.31343797\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [179/500, 0/38] loss: 0.31328356\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205464780330658\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [180/500, 0/38] loss: 0.31327415\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053804856080276\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [8/500, 100/150] loss: 0.3158873\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [181/500, 0/38] loss: 0.31328982\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205297268353976\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.34472739458084106\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [9/500, 0/150] loss: 0.3163439\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [182/500, 0/38] loss: 0.31327572\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053082952132594\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [183/500, 0/38] loss: 0.31328601\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205383809713217\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [9/500, 100/150] loss: 0.31371918\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [184/500, 0/38] loss: 0.31328157\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3218996547735654\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [185/500, 0/38] loss: 0.3132681\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32405663549900054\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [10/500, 0/150] loss: 0.31412756\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205316731562981\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [186/500, 0/38] loss: 0.31326768\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053844516093916\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [187/500, 0/38] loss: 0.3132776\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [10/500, 100/150] loss: 0.31350356\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205376106959123\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [188/500, 0/38] loss: 0.31329662\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3246558928489685\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32075390678185683\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [189/500, 0/38] loss: 0.31328595\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [11/500, 0/150] loss: 0.31399488\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205680847167969\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [190/500, 0/38] loss: 0.31328362\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [11/500, 100/150] loss: 0.31436908\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206828167805305\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [191/500, 0/38] loss: 0.31326979\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205334475407234\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [192/500, 0/38] loss: 0.31328797\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32626008331775663\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [12/500, 0/150] loss: 0.31355345\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205301601153154\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [12/500, 100/150] loss: 0.31336752\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [193/500, 0/38] loss: 0.31328204\n", + "== Status ==\n", + "Current time: 2022-08-17 15:59:55 (running for 00:00:30.53)\n", + "Memory usage on this node: 92.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 256.000: None | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3216813619320209 | Iter 32.000: -0.3240627921544589 | Iter 16.000: -0.3361530531598972 | Iter 8.000: -0.34589595258235933 | Iter 4.000: -0.3658917301893234 | Iter 2.000: -0.42899625301361083 | Iter 1.000: -0.5868336225243715\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (11 PENDING, 2 RUNNING, 2 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00000 | RUNNING | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.32053 | 0.9925 | 192 | 0.985111 |\n", + "| traindata_2f206_00003 | RUNNING | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.323909 | 0.9925 | 12 | 0.98501 |\n", + "| traindata_2f206_00004 | PENDING | | 16 | 0.499694 | 256 | 4.533e-05 | | | | |\n", + "| traindata_2f206_00005 | PENDING | | 8 | 0.332402 | 32 | 0.00010607 | | | | |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "Result for traindata_2f206_00000:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_15-59-55\n", + " done: false\n", + " experiment_id: eb87ed09742440a1af4158ebc534f380\n", + " hostname: tesla\n", + " iterations_since_restore: 193\n", + " loss: 0.3205285164026114\n", + " mcc: 0.9851108312031162\n", + " node_ip: 192.168.85.249\n", + " pid: 3931827\n", + " should_checkpoint: true\n", + " time_since_restore: 25.95276951789856\n", + " time_this_iter_s: 0.1487140655517578\n", + " time_total_s: 25.95276951789856\n", + " timestamp: 1660748395\n", + " timesteps_since_restore: 0\n", + " training_iteration: 193\n", + " trial_id: 2f206_00000\n", + " warmup_time: 0.003675699234008789\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205285164026114\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [194/500, 0/38] loss: 0.31327394\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3239086389541626\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [13/500, 0/150] loss: 0.31328285\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32284100697590756\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [195/500, 0/38] loss: 0.31329378\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32233110757974476\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [196/500, 0/38] loss: 0.31326801\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [13/500, 100/150] loss: 0.31380942\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053286754168\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [197/500, 0/38] loss: 0.31327742\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32320993959903715\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [14/500, 0/150] loss: 0.31347042\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32060025288508487\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [198/500, 0/38] loss: 0.31329441\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205386514847095\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [199/500, 0/38] loss: 0.31327817\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [14/500, 100/150] loss: 0.31334424\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3229373831015367\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [200/500, 0/38] loss: 0.31328762\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3248025393486023\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [15/500, 0/150] loss: 0.31335697\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206212314275595\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [201/500, 0/38] loss: 0.3132892\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053223023047817\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [202/500, 0/38] loss: 0.31326738\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [15/500, 100/150] loss: 0.31345674\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205381448452289\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [203/500, 0/38] loss: 0.31328127\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32527772068977356\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [16/500, 0/150] loss: 0.31328174\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205291055716001\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [204/500, 0/38] loss: 0.31329072\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205318863575275\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [205/500, 0/38] loss: 0.3132728\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [16/500, 100/150] loss: 0.31329432\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320529791025015\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [206/500, 0/38] loss: 0.31327593\n", + "Result for traindata_2f206_00003:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_15-59-57\n", + " done: false\n", + " experiment_id: e06db0d8c6434640a8c6e8e79ec600b5\n", + " hostname: tesla\n", + " iterations_since_restore: 16\n", + " loss: 0.3233336466550827\n", + " mcc: 0.9850100660598952\n", + " node_ip: 192.168.85.249\n", + " pid: 3932223\n", + " should_checkpoint: true\n", + " time_since_restore: 5.792171239852905\n", + " time_this_iter_s: 0.3858773708343506\n", + " time_total_s: 5.792171239852905\n", + " timestamp: 1660748397\n", + " timesteps_since_restore: 0\n", + " training_iteration: 16\n", + " trial_id: 2f206_00003\n", + " warmup_time: 0.0038690567016601562\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205348253250122\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [207/500, 0/38] loss: 0.31328613\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3233336466550827\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [17/500, 0/150] loss: 0.31328315\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207237491240868\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [208/500, 0/38] loss: 0.31328049\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205368152031532\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [209/500, 0/38] loss: 0.31327903\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [17/500, 100/150] loss: 0.31327343\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205292018560263\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [210/500, 0/38] loss: 0.31327352\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206170407625345\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [211/500, 0/38] loss: 0.31327519\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.324750235080719\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [18/500, 0/150] loss: 0.31332907\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052836739099944\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [212/500, 0/38] loss: 0.31327784\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206062385669121\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [213/500, 0/38] loss: 0.31327912\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [18/500, 100/150] loss: 0.31357834\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205457811172192\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [214/500, 0/38] loss: 0.3132664\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205293875474196\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [215/500, 0/38] loss: 0.3132768\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206026003910945\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32302505671978\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [19/500, 0/150] loss: 0.31326979\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [216/500, 0/38] loss: 0.31328201\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205309258057521\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [217/500, 0/38] loss: 0.31328699\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053704215930057\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [19/500, 100/150] loss: 0.31334504\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [218/500, 0/38] loss: 0.31328571\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32077356714468735\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [219/500, 0/38] loss: 0.31327671\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052979331750137\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32300570368766784\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [20/500, 0/150] loss: 0.31331882\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [220/500, 0/38] loss: 0.3132771\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205362374965961\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [221/500, 0/38] loss: 0.31328368\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205435092632587\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [20/500, 100/150] loss: 0.31326818\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [222/500, 0/38] loss: 0.31327134\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207635214695564\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [223/500, 0/38] loss: 0.31327632\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205408797814296\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [224/500, 0/38] loss: 0.31327778\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.323721809387207\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [21/500, 0/150] loss: 0.3133963\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052993315916795\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [225/500, 0/38] loss: 0.31329176\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053109315725475\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [226/500, 0/38] loss: 0.31327152\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [21/500, 100/150] loss: 0.31332725\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205314553700961\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [227/500, 0/38] loss: 0.31327865\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3237955886125565\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205537933569688\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [228/500, 0/38] loss: 0.31327078\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [22/500, 0/150] loss: 0.31330639\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205433121094337\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [229/500, 0/38] loss: 0.31327847\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205537154124333\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [230/500, 0/38] loss: 0.31328425\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [22/500, 100/150] loss: 0.31331563\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205445110797882\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [231/500, 0/38] loss: 0.3132782\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32316301226615907\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053130177351147\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [232/500, 0/38] loss: 0.31327644\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [23/500, 0/150] loss: 0.31326741\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320529254583212\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [233/500, 0/38] loss: 0.31327337\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [23/500, 100/150] loss: 0.31327006\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207575266177838\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [234/500, 0/38] loss: 0.31328911\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205432456273299\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [235/500, 0/38] loss: 0.31329605\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32297717332839965\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [24/500, 0/150] loss: 0.31329459\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205421337714562\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [236/500, 0/38] loss: 0.31328163\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053414904154265\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [237/500, 0/38] loss: 0.31327882\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [24/500, 100/150] loss: 0.31329992\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32284154341771054\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [238/500, 0/38] loss: 0.3132841\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32315298736095427\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [25/500, 0/150] loss: 0.3133727\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32054459131681\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [239/500, 0/38] loss: 0.31327286\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206387758255005\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [240/500, 0/38] loss: 0.31327653\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [25/500, 100/150] loss: 0.31339851\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32083706901623654\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [241/500, 0/38] loss: 0.31327409\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3236787527799606\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [26/500, 0/150] loss: 0.31326991\n", + "== Status ==\n", + "Current time: 2022-08-17 16:00:00 (running for 00:00:35.53)\n", + "Memory usage on this node: 92.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 256.000: None | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3216813619320209 | Iter 32.000: -0.3240627921544589 | Iter 16.000: -0.3352227004674765 | Iter 8.000: -0.34589595258235933 | Iter 4.000: -0.3658917301893234 | Iter 2.000: -0.42899625301361083 | Iter 1.000: -0.5868336225243715\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (11 PENDING, 2 RUNNING, 2 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00000 | RUNNING | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320761 | 0.9925 | 241 | 0.985111 |\n", + "| traindata_2f206_00003 | RUNNING | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.323679 | 0.9925 | 25 | 0.98501 |\n", + "| traindata_2f206_00004 | PENDING | | 16 | 0.499694 | 256 | 4.533e-05 | | | | |\n", + "| traindata_2f206_00005 | PENDING | | 8 | 0.332402 | 32 | 0.00010607 | | | | |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207607360986563\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [242/500, 0/38] loss: 0.31326687\n", + "Result for traindata_2f206_00000:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-00\n", + " done: false\n", + " experiment_id: eb87ed09742440a1af4158ebc534f380\n", + " hostname: tesla\n", + " iterations_since_restore: 242\n", + " loss: 0.3205289657299335\n", + " mcc: 0.9851108312031162\n", + " node_ip: 192.168.85.249\n", + " pid: 3931827\n", + " should_checkpoint: true\n", + " time_since_restore: 31.066117525100708\n", + " time_this_iter_s: 0.11099815368652344\n", + " time_total_s: 31.066117525100708\n", + " timestamp: 1660748400\n", + " timesteps_since_restore: 0\n", + " training_iteration: 242\n", + " trial_id: 2f206_00000\n", + " warmup_time: 0.003675699234008789\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205289657299335\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [243/500, 0/38] loss: 0.31327581\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [26/500, 100/150] loss: 0.31328699\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205380898255568\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [244/500, 0/38] loss: 0.31328565\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3234120798110962\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [27/500, 0/150] loss: 0.31332886\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205777956889226\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [245/500, 0/38] loss: 0.31329444\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320529220195917\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [246/500, 0/38] loss: 0.31329393\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [27/500, 100/150] loss: 0.31331366\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205515444278717\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [247/500, 0/38] loss: 0.31327912\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3221659272909164\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [28/500, 0/150] loss: 0.31326666\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32076892256736755\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [248/500, 0/38] loss: 0.31328645\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205304489685939\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [249/500, 0/38] loss: 0.31328371\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [28/500, 100/150] loss: 0.31327066\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205385414453653\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [250/500, 0/38] loss: 0.31326544\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32283606350421906\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [29/500, 0/150] loss: 0.31332859\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205486031679007\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [251/500, 0/38] loss: 0.31326896\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205401072135338\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [252/500, 0/38] loss: 0.31330559\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [29/500, 100/150] loss: 0.31328648\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053905725479126\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [253/500, 0/38] loss: 0.31329077\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.323403337597847\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [30/500, 0/150] loss: 0.31328788\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3222986161708832\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [254/500, 0/38] loss: 0.31329313\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32230252027511597\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [255/500, 0/38] loss: 0.313274\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [30/500, 100/150] loss: 0.3132658\n", + "Result for traindata_2f206_00003:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-02\n", + " done: false\n", + " experiment_id: e06db0d8c6434640a8c6e8e79ec600b5\n", + " hostname: tesla\n", + " iterations_since_restore: 30\n", + " loss: 0.3225029271841049\n", + " mcc: 0.9850100660598952\n", + " node_ip: 192.168.85.249\n", + " pid: 3932223\n", + " should_checkpoint: true\n", + " time_since_restore: 10.916210412979126\n", + " time_this_iter_s: 0.3142111301422119\n", + " time_total_s: 10.916210412979126\n", + " timestamp: 1660748402\n", + " timesteps_since_restore: 0\n", + " training_iteration: 30\n", + " trial_id: 2f206_00003\n", + " warmup_time: 0.0038690567016601562\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052876399113583\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [256/500, 0/38] loss: 0.31326723\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3225029271841049\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [31/500, 0/150] loss: 0.31328076\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053314264004046\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [257/500, 0/38] loss: 0.31328484\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205382823944092\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [258/500, 0/38] loss: 0.31328011\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [31/500, 100/150] loss: 0.31327516\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053558184550357\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [259/500, 0/38] loss: 0.31328422\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3229963207244873\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [32/500, 0/150] loss: 0.31327525\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205293187728295\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [260/500, 0/38] loss: 0.31327677\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205294425670917\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [261/500, 0/38] loss: 0.31329271\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32073064950796276\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [32/500, 100/150] loss: 0.31326687\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [262/500, 0/38] loss: 0.31327903\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32076881940548235\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32196231007575987\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [33/500, 0/150] loss: 0.31327814\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [263/500, 0/38] loss: 0.31328154\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205283811459175\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [264/500, 0/38] loss: 0.31328231\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3228426300562345\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [33/500, 100/150] loss: 0.31327367\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [265/500, 0/38] loss: 0.31328589\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053531133211577\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [266/500, 0/38] loss: 0.31327471\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3218207061290741\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [34/500, 0/150] loss: 0.31327879\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205311642243312\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [267/500, 0/38] loss: 0.3132863\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32056512511693513\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [268/500, 0/38] loss: 0.31327423\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [34/500, 100/150] loss: 0.31328773\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205431562203627\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [269/500, 0/38] loss: 0.3132861\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3222669351100922\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [35/500, 0/150] loss: 0.31327248\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207542621172391\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [270/500, 0/38] loss: 0.31329021\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3229258931600131\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [271/500, 0/38] loss: 0.31329548\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [35/500, 100/150] loss: 0.31326702\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32229400827334476\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [272/500, 0/38] loss: 0.31328726\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32219249904155733\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [36/500, 0/150] loss: 0.31327826\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205288350582123\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [273/500, 0/38] loss: 0.31328133\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205978045096764\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [274/500, 0/38] loss: 0.31326878\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [36/500, 100/150] loss: 0.31327307\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205305544229654\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [275/500, 0/38] loss: 0.31327873\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32245600640773775\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [37/500, 0/150] loss: 0.3132647\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205309693629925\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [276/500, 0/38] loss: 0.31327432\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205295571914086\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [277/500, 0/38] loss: 0.3132861\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [37/500, 100/150] loss: 0.31328681\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205284613829393\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [278/500, 0/38] loss: 0.31327397\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32254319965839384\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [38/500, 0/150] loss: 0.31328097\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205305016957797\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [279/500, 0/38] loss: 0.31328255\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206946414250594\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [280/500, 0/38] loss: 0.31329009\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [38/500, 100/150] loss: 0.31327221\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32057291498551\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [281/500, 0/38] loss: 0.31327596\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32213226079940793\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [39/500, 0/150] loss: 0.31327254\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32116944973285383\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [282/500, 0/38] loss: 0.31327441\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205367647684537\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [283/500, 0/38] loss: 0.31326863\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [39/500, 100/150] loss: 0.3132782\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205297314203702\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [284/500, 0/38] loss: 0.31327167\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32182838320732116\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [40/500, 0/150] loss: 0.31326276\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205293141878568\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [285/500, 0/38] loss: 0.31328732\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32068154215812683\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [286/500, 0/38] loss: 0.31328028\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [40/500, 100/150] loss: 0.31326336\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32065272560486424\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [287/500, 0/38] loss: 0.31328481\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32194960355758667\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [41/500, 0/150] loss: 0.31327319\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206457541539119\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [288/500, 0/38] loss: 0.31328529\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32055703493265003\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [289/500, 0/38] loss: 0.31328028\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [41/500, 100/150] loss: 0.31326556\n", + "== Status ==\n", + "Current time: 2022-08-17 16:00:05 (running for 00:00:40.56)\n", + "Memory usage on this node: 92.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3216813619320209 | Iter 32.000: -0.3230125511151094 | Iter 16.000: -0.3352227004674765 | Iter 8.000: -0.34589595258235933 | Iter 4.000: -0.3658917301893234 | Iter 2.000: -0.42899625301361083 | Iter 1.000: -0.5868336225243715\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (11 PENDING, 2 RUNNING, 2 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00000 | RUNNING | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320539 | 0.9925 | 289 | 0.985111 |\n", + "| traindata_2f206_00003 | RUNNING | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321631 | 0.9925 | 41 | 0.98501 |\n", + "| traindata_2f206_00004 | PENDING | | 16 | 0.499694 | 256 | 4.533e-05 | | | | |\n", + "| traindata_2f206_00005 | PENDING | | 8 | 0.332402 | 32 | 0.00010607 | | | | |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205389449229607\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [290/500, 0/38] loss: 0.313288\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3216306924819946\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [42/500, 0/150] loss: 0.31326339\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053545117378235\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [291/500, 0/38] loss: 0.31328079\n", + "Result for traindata_2f206_00000:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-05\n", + " done: false\n", + " experiment_id: eb87ed09742440a1af4158ebc534f380\n", + " hostname: tesla\n", + " iterations_since_restore: 291\n", + " loss: 0.32053000651873076\n", + " mcc: 0.9851108312031162\n", + " node_ip: 192.168.85.249\n", + " pid: 3931827\n", + " should_checkpoint: true\n", + " time_since_restore: 36.150763750076294\n", + " time_this_iter_s: 0.10370254516601562\n", + " time_total_s: 36.150763750076294\n", + " timestamp: 1660748405\n", + " timesteps_since_restore: 0\n", + " training_iteration: 291\n", + " trial_id: 2f206_00000\n", + " warmup_time: 0.003675699234008789\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053000651873076\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [292/500, 0/38] loss: 0.3132664\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [42/500, 100/150] loss: 0.31328672\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052871584892273\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [293/500, 0/38] loss: 0.31328094\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.321844961643219\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [43/500, 0/150] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052996296149033\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [294/500, 0/38] loss: 0.31327552\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205640040911161\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [295/500, 0/38] loss: 0.31327686\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [43/500, 100/150] loss: 0.31326893\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205285370349884\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [296/500, 0/38] loss: 0.31328312\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3214355802536011\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [44/500, 0/150] loss: 0.31326386\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32060526884519136\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [297/500, 0/38] loss: 0.31327099\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3230903698847844\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [298/500, 0/38] loss: 0.31328389\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [44/500, 100/150] loss: 0.31326666\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320536182476924\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [299/500, 0/38] loss: 0.31328192\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3215671133995056\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [45/500, 0/150] loss: 0.31326696\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052863102692825\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [300/500, 0/38] loss: 0.31328723\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320530137190452\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [301/500, 0/38] loss: 0.31327561\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [45/500, 100/150] loss: 0.31326759\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32054519194823045\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [302/500, 0/38] loss: 0.31328422\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32216841220855713\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [46/500, 0/150] loss: 0.31329891\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320528323833759\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [303/500, 0/38] loss: 0.31329587\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32072290778160095\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [304/500, 0/38] loss: 0.31327522\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [46/500, 100/150] loss: 0.31326854\n", + "Result for traindata_2f206_00003:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-07\n", + " done: false\n", + " experiment_id: e06db0d8c6434640a8c6e8e79ec600b5\n", + " hostname: tesla\n", + " iterations_since_restore: 46\n", + " loss: 0.32153730511665346\n", + " mcc: 0.9850100660598952\n", + " node_ip: 192.168.85.249\n", + " pid: 3932223\n", + " should_checkpoint: true\n", + " time_since_restore: 16.01649785041809\n", + " time_this_iter_s: 0.3200042247772217\n", + " time_total_s: 16.01649785041809\n", + " timestamp: 1660748407\n", + " timesteps_since_restore: 0\n", + " training_iteration: 46\n", + " trial_id: 2f206_00003\n", + " warmup_time: 0.0038690567016601562\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32061453736745393\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [305/500, 0/38] loss: 0.31327319\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32153730511665346\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [47/500, 0/150] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052849577023435\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [306/500, 0/38] loss: 0.31327257\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32081358249370867\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [307/500, 0/38] loss: 0.31328052\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053303489318263\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [47/500, 100/150] loss: 0.31326276\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [308/500, 0/38] loss: 0.31326821\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3210828464764815\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32198085069656374\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [48/500, 0/150] loss: 0.31327048\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [309/500, 0/38] loss: 0.31328508\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3218989991224729\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [310/500, 0/38] loss: 0.31326905\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052934628266555\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [48/500, 100/150] loss: 0.313263\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [311/500, 0/38] loss: 0.31328157\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207538907344525\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32182998418807984\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [49/500, 0/150] loss: 0.3132624\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [312/500, 0/38] loss: 0.31328207\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052897260739255\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [313/500, 0/38] loss: 0.31327927\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205283421736497\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [49/500, 100/150] loss: 0.31326261\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [314/500, 0/38] loss: 0.3132875\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3209566130087926\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.321798740029335\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [50/500, 0/150] loss: 0.31326777\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [315/500, 0/38] loss: 0.31327745\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205471749489124\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [316/500, 0/38] loss: 0.31327027\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32054534554481506\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [317/500, 0/38] loss: 0.31328753\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [50/500, 100/150] loss: 0.31326509\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32054372704946077\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [318/500, 0/38] loss: 0.31326744\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3215301024913788\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [51/500, 0/150] loss: 0.31326747\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205289703149062\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [319/500, 0/38] loss: 0.31328046\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207618502470163\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [320/500, 0/38] loss: 0.31328049\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [51/500, 100/150] loss: 0.31326467\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205324067519261\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [321/500, 0/38] loss: 0.31327567\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32191194117069244\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205283834384038\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [322/500, 0/38] loss: 0.31327716\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [52/500, 0/150] loss: 0.31326556\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32212144365677464\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [323/500, 0/38] loss: 0.31330565\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [52/500, 100/150] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205297222504249\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [324/500, 0/38] loss: 0.31327015\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320538546030338\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [325/500, 0/38] loss: 0.31328115\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213387852907181\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [53/500, 0/150] loss: 0.31326327\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207611693785741\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [326/500, 0/38] loss: 0.31328171\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [53/500, 100/150] loss: 0.31326249\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205288304732396\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [327/500, 0/38] loss: 0.31329229\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32062541292263913\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [328/500, 0/38] loss: 0.31329229\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32175444066524506\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [54/500, 0/150] loss: 0.31326395\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205611270207625\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [329/500, 0/38] loss: 0.3132796\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [54/500, 100/150] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205297360053429\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [330/500, 0/38] loss: 0.31327325\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052885110561663\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [331/500, 0/38] loss: 0.31330308\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32210037171840666\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [55/500, 0/150] loss: 0.31327417\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205364805001479\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [332/500, 0/38] loss: 0.3132787\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053051315821135\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [333/500, 0/38] loss: 0.31327483\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [55/500, 100/150] loss: 0.3132627\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205299973487854\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [334/500, 0/38] loss: 0.31327903\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32191411435604095\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [56/500, 0/150] loss: 0.3132638\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32076196716381955\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [335/500, 0/38] loss: 0.31329238\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32054357803784883\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [336/500, 0/38] loss: 0.31328717\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [56/500, 100/150] loss: 0.31326255\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207645255785722\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [337/500, 0/38] loss: 0.31328091\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32150392651557924\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [57/500, 0/150] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205332182920896\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [338/500, 0/38] loss: 0.31328776\n", + "== Status ==\n", + "Current time: 2022-08-17 16:00:10 (running for 00:00:45.65)\n", + "Memory usage on this node: 92.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3216813619320209 | Iter 32.000: -0.3230125511151094 | Iter 16.000: -0.3352227004674765 | Iter 8.000: -0.34589595258235933 | Iter 4.000: -0.3658917301893234 | Iter 2.000: -0.42899625301361083 | Iter 1.000: -0.5868336225243715\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (11 PENDING, 2 RUNNING, 2 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00000 | RUNNING | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.322833 | 0.9925 | 338 | 0.985111 |\n", + "| traindata_2f206_00003 | RUNNING | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321504 | 0.9925 | 56 | 0.98501 |\n", + "| traindata_2f206_00004 | PENDING | | 16 | 0.499694 | 256 | 4.533e-05 | | | | |\n", + "| traindata_2f206_00005 | PENDING | | 8 | 0.332402 | 32 | 0.00010607 | | | | |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32283294888643116\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [339/500, 0/38] loss: 0.31328619\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [57/500, 100/150] loss: 0.31326386\n", + "Result for traindata_2f206_00000:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-10\n", + " done: false\n", + " experiment_id: eb87ed09742440a1af4158ebc534f380\n", + " hostname: tesla\n", + " iterations_since_restore: 339\n", + " loss: 0.32283877638670117\n", + " mcc: 0.9851108312031162\n", + " node_ip: 192.168.85.249\n", + " pid: 3931827\n", + " should_checkpoint: true\n", + " time_since_restore: 41.18391251564026\n", + " time_this_iter_s: 0.11077523231506348\n", + " time_total_s: 41.18391251564026\n", + " timestamp: 1660748410\n", + " timesteps_since_restore: 0\n", + " training_iteration: 339\n", + " trial_id: 2f206_00000\n", + " warmup_time: 0.003675699234008789\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32283877638670117\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [340/500, 0/38] loss: 0.31328136\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3214773070812225\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [58/500, 0/150] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207646837601295\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [341/500, 0/38] loss: 0.31327394\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053423615602344\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [342/500, 0/38] loss: 0.31327692\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [58/500, 100/150] loss: 0.31326285\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32054394712814915\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [343/500, 0/38] loss: 0.31327704\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053102667515093\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [344/500, 0/38] loss: 0.31328064\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32146471679210664\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [59/500, 0/150] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207598351515256\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [345/500, 0/38] loss: 0.31328285\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [59/500, 100/150] loss: 0.31326506\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32076494510357195\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [346/500, 0/38] loss: 0.31329849\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32063741179612965\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [347/500, 0/38] loss: 0.31329232\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32137439012527463\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [60/500, 0/150] loss: 0.31326494\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205297222504249\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [348/500, 0/38] loss: 0.31327835\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205297589302063\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [349/500, 0/38] loss: 0.31328699\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [60/500, 100/150] loss: 0.31326395\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32061490645775426\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [350/500, 0/38] loss: 0.31328145\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3215734165906906\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [61/500, 0/150] loss: 0.31326219\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052911244905913\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [351/500, 0/38] loss: 0.31327286\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205539630009578\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [352/500, 0/38] loss: 0.31326616\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [61/500, 100/150] loss: 0.313263\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3219124193374927\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [353/500, 0/38] loss: 0.31326464\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213862085342407\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [62/500, 0/150] loss: 0.31326276\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32128153626735395\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [354/500, 0/38] loss: 0.31329834\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205285622523381\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [355/500, 0/38] loss: 0.31328437\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [62/500, 100/150] loss: 0.31326342\n", + "Result for traindata_2f206_00003:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-12\n", + " done: false\n", + " experiment_id: e06db0d8c6434640a8c6e8e79ec600b5\n", + " hostname: tesla\n", + " iterations_since_restore: 62\n", + " loss: 0.3214064699411392\n", + " mcc: 0.9850100660598952\n", + " node_ip: 192.168.85.249\n", + " pid: 3932223\n", + " should_checkpoint: true\n", + " time_since_restore: 21.310381650924683\n", + " time_this_iter_s: 0.33643341064453125\n", + " time_total_s: 21.310381650924683\n", + " timestamp: 1660748412\n", + " timesteps_since_restore: 0\n", + " training_iteration: 62\n", + " trial_id: 2f206_00003\n", + " warmup_time: 0.0038690567016601562\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205281679446881\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [356/500, 0/38] loss: 0.31328654\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3214064699411392\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [63/500, 0/150] loss: 0.31326464\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32058279330913836\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [357/500, 0/38] loss: 0.31327942\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320540393774326\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [358/500, 0/38] loss: 0.31328982\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [63/500, 100/150] loss: 0.31326255\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052987813949585\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [359/500, 0/38] loss: 0.31327817\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3210997647047043\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053645757528454\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [360/500, 0/38] loss: 0.31328496\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [64/500, 0/150] loss: 0.31326321\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205306025651785\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [361/500, 0/38] loss: 0.31328493\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [64/500, 100/150] loss: 0.31326261\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205355520431812\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [362/500, 0/38] loss: 0.31327447\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205405588333423\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [363/500, 0/38] loss: 0.31327686\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3215651273727417\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [65/500, 0/150] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205529772318326\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [364/500, 0/38] loss: 0.31328902\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [65/500, 100/150] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053455251913804\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [365/500, 0/38] loss: 0.31327364\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053004319851214\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [366/500, 0/38] loss: 0.31327826\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3212441468238831\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [66/500, 0/150] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207553510482495\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [367/500, 0/38] loss: 0.31328255\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [66/500, 100/150] loss: 0.31326267\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053060027269215\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [368/500, 0/38] loss: 0.31328216\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32140096843242644\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [67/500, 0/150] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32119107017150295\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [369/500, 0/38] loss: 0.31329253\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [67/500, 100/150] loss: 0.31326246\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205546851341541\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [370/500, 0/38] loss: 0.31328368\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053738832473755\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [371/500, 0/38] loss: 0.31328946\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32218522250652315\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [68/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205284017782945\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [372/500, 0/38] loss: 0.3132925\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [68/500, 100/150] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205288235957806\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [373/500, 0/38] loss: 0.31328684\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3209487016384418\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [374/500, 0/38] loss: 0.31328377\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3215513825416565\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [69/500, 0/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053199181189906\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [375/500, 0/38] loss: 0.3132892\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [69/500, 100/150] loss: 0.31326315\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3210810858469743\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [376/500, 0/38] loss: 0.31327844\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32054529052514297\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [377/500, 0/38] loss: 0.31329459\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3212739443778992\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [70/500, 0/150] loss: 0.31326279\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205389449229607\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [378/500, 0/38] loss: 0.31328127\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205292889705071\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [379/500, 0/38] loss: 0.31326759\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [70/500, 100/150] loss: 0.31326273\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32075300124975353\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [380/500, 0/38] loss: 0.31329161\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3214095836877823\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [71/500, 0/150] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052929126299345\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [381/500, 0/38] loss: 0.31327304\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205640476483565\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [382/500, 0/38] loss: 0.31329277\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [71/500, 100/150] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053162501408505\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [383/500, 0/38] loss: 0.31328356\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32075350559674776\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32158623039722445\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [72/500, 0/150] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [384/500, 0/38] loss: 0.31327957\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205321797957787\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [385/500, 0/38] loss: 0.31326988\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [72/500, 100/150] loss: 0.31326258\n", + "== Status ==\n", + "Current time: 2022-08-17 16:00:15 (running for 00:00:50.69)\n", + "Memory usage on this node: 92.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3216232446523813 | Iter 32.000: -0.3230125511151094 | Iter 16.000: -0.3352227004674765 | Iter 8.000: -0.34589595258235933 | Iter 4.000: -0.3658917301893234 | Iter 2.000: -0.42899625301361083 | Iter 1.000: -0.5868336225243715\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (11 PENDING, 2 RUNNING, 2 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00000 | RUNNING | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320532 | 0.9925 | 384 | 0.985111 |\n", + "| traindata_2f206_00003 | RUNNING | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321207 | 0.9925 | 72 | 0.98501 |\n", + "| traindata_2f206_00004 | PENDING | | 16 | 0.499694 | 256 | 4.533e-05 | | | | |\n", + "| traindata_2f206_00005 | PENDING | | 8 | 0.332402 | 32 | 0.00010607 | | | | |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205374272970053\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [386/500, 0/38] loss: 0.3132723\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.321207377910614\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [73/500, 0/150] loss: 0.31326222\n", + "Result for traindata_2f206_00000:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-15\n", + " done: false\n", + " experiment_id: eb87ed09742440a1af4158ebc534f380\n", + " hostname: tesla\n", + " iterations_since_restore: 386\n", + " loss: 0.3205290986941411\n", + " mcc: 0.9851108312031162\n", + " node_ip: 192.168.85.249\n", + " pid: 3931827\n", + " should_checkpoint: true\n", + " time_since_restore: 46.21915817260742\n", + " time_this_iter_s: 0.10437560081481934\n", + " time_total_s: 46.21915817260742\n", + " timestamp: 1660748415\n", + " timesteps_since_restore: 0\n", + " training_iteration: 386\n", + " trial_id: 2f206_00000\n", + " warmup_time: 0.003675699234008789\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205290986941411\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [387/500, 0/38] loss: 0.31327951\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205367143337543\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [388/500, 0/38] loss: 0.31328019\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [73/500, 100/150] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3228334830357478\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [389/500, 0/38] loss: 0.31327406\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32142930626869204\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [74/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205298391672281\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [390/500, 0/38] loss: 0.31328344\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205319941043854\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [391/500, 0/38] loss: 0.31329221\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [74/500, 100/150] loss: 0.31326273\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205683964949388\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [392/500, 0/38] loss: 0.31327236\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3211813348531723\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [75/500, 0/150] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320528119802475\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [393/500, 0/38] loss: 0.31327897\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205769337140597\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [394/500, 0/38] loss: 0.3132807\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [75/500, 100/150] loss: 0.31326246\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32072841662627\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [395/500, 0/38] loss: 0.31326783\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3212603324651718\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [76/500, 0/150] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3207883101243239\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [396/500, 0/38] loss: 0.31329876\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052833758867705\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [397/500, 0/38] loss: 0.31329814\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [76/500, 100/150] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205281037550706\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [398/500, 0/38] loss: 0.31328791\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213132464885712\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [77/500, 0/150] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205281908695514\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [399/500, 0/38] loss: 0.31329238\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053056130042445\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [400/500, 0/38] loss: 0.31327796\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [77/500, 100/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32073078476465666\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [401/500, 0/38] loss: 0.31327915\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3212735390663147\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205289290501521\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [402/500, 0/38] loss: 0.31328559\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [78/500, 0/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205446211191324\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [403/500, 0/38] loss: 0.31327936\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [78/500, 100/150] loss: 0.31326243\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205723166465759\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [404/500, 0/38] loss: 0.31327754\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32131375968456266Result for traindata_2f206_00003:\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-17\n", + " done: false\n", + " experiment_id: e06db0d8c6434640a8c6e8e79ec600b5\n", + " hostname: tesla\n", + " iterations_since_restore: 78\n", + " loss: 0.32131375968456266\n", + " mcc: 0.9850100660598952\n", + " node_ip: 192.168.85.249\n", + " pid: 3932223\n", + " should_checkpoint: true\n", + " time_since_restore: 26.514336585998535\n", + " time_this_iter_s: 0.32125115394592285\n", + " time_total_s: 26.514336585998535\n", + " timestamp: 1660748417\n", + " timesteps_since_restore: 0\n", + " training_iteration: 78\n", + " trial_id: 2f206_00003\n", + " warmup_time: 0.0038690567016601562\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205315080972818\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [405/500, 0/38] loss: 0.31327516\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [79/500, 0/150] loss: 0.31326237\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205276681826665\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [406/500, 0/38] loss: 0.3132849\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [79/500, 100/150] loss: 0.31326237\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205275329259726\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [407/500, 0/38] loss: 0.31327388\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213146185874939\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32058414358359116\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [408/500, 0/38] loss: 0.3132807\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [80/500, 0/150] loss: 0.31326246\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205282435967372\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [409/500, 0/38] loss: 0.31329179\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205637037754059\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [80/500, 100/150] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [410/500, 0/38] loss: 0.31327108\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32056503800245434\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32133733689785005\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [411/500, 0/38] loss: 0.31327233\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052810604755694\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [81/500, 0/150] loss: 0.31326345\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [412/500, 0/38] loss: 0.31327572\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205309877028832\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [81/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [413/500, 0/38] loss: 0.3132813\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205365790770604\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [414/500, 0/38] loss: 0.31327939\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32283814824544466\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3212586134672165\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [415/500, 0/38] loss: 0.31327111\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32056356164125294\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [416/500, 0/38] loss: 0.31328276\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205274503964644\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [417/500, 0/38] loss: 0.3132689\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32190306599323565\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [418/500, 0/38] loss: 0.31329191\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3221202836586879\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [419/500, 0/38] loss: 0.31327277\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052902304209197\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [82/500, 0/150] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [420/500, 0/38] loss: 0.31328723\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32080135207909805\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [82/500, 100/150] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [421/500, 0/38] loss: 0.31327274\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053443560233486\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [422/500, 0/38] loss: 0.31328332\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205346694359413\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [423/500, 0/38] loss: 0.31328389\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213057714700699\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [83/500, 0/150] loss: 0.31326219\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053688856271595\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [424/500, 0/38] loss: 0.31328458\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205359371808859\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [425/500, 0/38] loss: 0.31327486\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [83/500, 100/150] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052796391340405\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [426/500, 0/38] loss: 0.31327152\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32126233041286467\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [84/500, 0/150] loss: 0.31326231\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32189709635881275\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [427/500, 0/38] loss: 0.3132714\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32189698631946856\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [428/500, 0/38] loss: 0.31326824\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [84/500, 100/150] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32075267800917995\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [429/500, 0/38] loss: 0.31327888\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213347953557968\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [85/500, 0/150] loss: 0.31326249\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205278699214642\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [430/500, 0/38] loss: 0.31328934\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206466000813704\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [431/500, 0/38] loss: 0.3132861\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [85/500, 100/150] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052718905302197\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [432/500, 0/38] loss: 0.31328321\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213509702682495\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [86/500, 0/150] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3210868308177361\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [433/500, 0/38] loss: 0.31327844\n", + "== Status ==\n", + "Current time: 2022-08-17 16:00:20 (running for 00:00:55.70)\n", + "Memory usage on this node: 92.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3216232446523813 | Iter 32.000: -0.3230125511151094 | Iter 16.000: -0.3352227004674765 | Iter 8.000: -0.34589595258235933 | Iter 4.000: -0.3658917301893234 | Iter 2.000: -0.42899625301361083 | Iter 1.000: -0.5868336225243715\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (11 PENDING, 2 RUNNING, 2 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00000 | RUNNING | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320536 | 0.9925 | 433 | 0.985111 |\n", + "| traindata_2f206_00003 | RUNNING | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321351 | 0.9925 | 85 | 0.98501 |\n", + "| traindata_2f206_00004 | PENDING | | 16 | 0.499694 | 256 | 4.533e-05 | | | | |\n", + "| traindata_2f206_00005 | PENDING | | 8 | 0.332402 | 32 | 0.00010607 | | | | |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320536154967088\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [434/500, 0/38] loss: 0.31327546\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [86/500, 100/150] loss: 0.31326216\n", + "Result for traindata_2f206_00000:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-20\n", + " done: false\n", + " experiment_id: eb87ed09742440a1af4158ebc534f380\n", + " hostname: tesla\n", + " iterations_since_restore: 434\n", + " loss: 0.3205330967903137\n", + " mcc: 0.9851108312031162\n", + " node_ip: 192.168.85.249\n", + " pid: 3931827\n", + " should_checkpoint: true\n", + " time_since_restore: 51.22985076904297\n", + " time_this_iter_s: 0.111328125\n", + " time_total_s: 51.22985076904297\n", + " timestamp: 1660748420\n", + " timesteps_since_restore: 0\n", + " training_iteration: 434\n", + " trial_id: 2f206_00000\n", + " warmup_time: 0.003675699234008789\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205330967903137\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [435/500, 0/38] loss: 0.31327882\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3225040848438556\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [436/500, 0/38] loss: 0.31327561\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32134968519210816\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [87/500, 0/150] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32090463088108945\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [437/500, 0/38] loss: 0.31327164\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205312146590306\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [438/500, 0/38] loss: 0.31329143\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [87/500, 100/150] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32076284289360046\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [439/500, 0/38] loss: 0.31330398\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32133745968341826\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3222940747554486\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [440/500, 0/38] loss: 0.31329694\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [88/500, 0/150] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32315373649963963\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [441/500, 0/38] loss: 0.31327659\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205357767068423\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [442/500, 0/38] loss: 0.31327438\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [88/500, 100/150] loss: 0.31326228\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3210838666329017\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [443/500, 0/38] loss: 0.31327921\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213272303342819\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [89/500, 0/150] loss: 0.31326231\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205339771050673\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [444/500, 0/38] loss: 0.31329781\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205354672211867\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [445/500, 0/38] loss: 0.31327975\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [89/500, 100/150] loss: 0.31326231\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205295961636763\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [446/500, 0/38] loss: 0.31328389\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205418311632596\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [447/500, 0/38] loss: 0.31327501\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213229846954346\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [90/500, 0/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205335827974173\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [448/500, 0/38] loss: 0.31327441\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205638046448047\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [449/500, 0/38] loss: 0.31328222\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [90/500, 100/150] loss: 0.3132624\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052942422720104\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [450/500, 0/38] loss: 0.31327689\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052746644386876\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [451/500, 0/38] loss: 0.31327438\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32132733225822446\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [91/500, 0/150] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052799142324007\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [452/500, 0/38] loss: 0.31326786\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052835592856777\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [453/500, 0/38] loss: 0.31327361\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [91/500, 100/150] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053523568006664\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [454/500, 0/38] loss: 0.31327584\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32131448209285735\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "Result for traindata_2f206_00003:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-22\n", + " done: false\n", + " experiment_id: e06db0d8c6434640a8c6e8e79ec600b5\n", + " hostname: tesla\n", + " iterations_since_restore: 91\n", + " loss: 0.32131448209285735\n", + " mcc: 0.9850100660598952\n", + " node_ip: 192.168.85.249\n", + " pid: 3932223\n", + " should_checkpoint: true\n", + " time_since_restore: 31.704716444015503\n", + " time_this_iter_s: 0.40067577362060547\n", + " time_total_s: 31.704716444015503\n", + " timestamp: 1660748422\n", + " timesteps_since_restore: 0\n", + " training_iteration: 91\n", + " trial_id: 2f206_00003\n", + " warmup_time: 0.0038690567016601562\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052945173703706\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [455/500, 0/38] loss: 0.31327915\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [92/500, 0/150] loss: 0.31326285\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052911015657276\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [456/500, 0/38] loss: 0.31329054\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32060797627155596\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [457/500, 0/38] loss: 0.31327677\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [92/500, 100/150] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053911915192235\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [458/500, 0/38] loss: 0.3132818\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213169056177139\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [93/500, 0/150] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320528669999196\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [459/500, 0/38] loss: 0.31328085\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205359876155853\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [460/500, 0/38] loss: 0.31327274\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [93/500, 100/150] loss: 0.313263\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32054140246831453\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [461/500, 0/38] loss: 0.31328124\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205298070724194\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [462/500, 0/38] loss: 0.3132908\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130702257156374\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [94/500, 0/150] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205272945073935\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [463/500, 0/38] loss: 0.31328052\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205351371031541\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [464/500, 0/38] loss: 0.31327593\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [94/500, 100/150] loss: 0.31326231\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053616872200597\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [465/500, 0/38] loss: 0.31326953\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205276865225572\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [466/500, 0/38] loss: 0.31329855\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213072991371155\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [95/500, 0/150] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3236729410978464\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [467/500, 0/38] loss: 0.31328869\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205284269956442\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [468/500, 0/38] loss: 0.3132821\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [95/500, 100/150] loss: 0.31326246\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205948999294868\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [469/500, 0/38] loss: 0.31328481\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205280235180488\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [470/500, 0/38] loss: 0.31326896\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3212909400463104\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [96/500, 0/150] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3213354211587172\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [471/500, 0/38] loss: 0.31327808\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205273953767923\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [472/500, 0/38] loss: 0.31329086\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [96/500, 100/150] loss: 0.31326261\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32072672706383926\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [473/500, 0/38] loss: 0.3132773\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213068962097168\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [97/500, 0/150] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32057860035162705\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [474/500, 0/38] loss: 0.31329364\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053166169386643\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [475/500, 0/38] loss: 0.31328332\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [97/500, 100/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205807896760794\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [476/500, 0/38] loss: 0.31327882\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206131343658154\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [477/500, 0/38] loss: 0.31329742\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213062536716461\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [98/500, 0/150] loss: 0.31326234\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32066285380950343\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [478/500, 0/38] loss: 0.31330571\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205265723741971\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [479/500, 0/38] loss: 0.31327718\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [98/500, 100/150] loss: 0.31326222\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205309464381291\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [480/500, 0/38] loss: 0.31327683\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130445301532745\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32054537305465114\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [481/500, 0/38] loss: 0.31327274\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [99/500, 0/150] loss: 0.31326228\n", + "== Status ==\n", + "Current time: 2022-08-17 16:00:25 (running for 00:01:00.76)\n", + "Memory usage on this node: 92.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=2\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3216232446523813 | Iter 32.000: -0.3230125511151094 | Iter 16.000: -0.3352227004674765 | Iter 8.000: -0.34589595258235933 | Iter 4.000: -0.3658917301893234 | Iter 2.000: -0.42899625301361083 | Iter 1.000: -0.5868336225243715\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (11 PENDING, 2 RUNNING, 2 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00000 | RUNNING | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320534 | 0.9925 | 481 | 0.985111 |\n", + "| traindata_2f206_00003 | RUNNING | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321304 | 0.9925 | 98 | 0.98501 |\n", + "| traindata_2f206_00004 | PENDING | | 16 | 0.499694 | 256 | 4.533e-05 | | | | |\n", + "| traindata_2f206_00005 | PENDING | | 8 | 0.332402 | 32 | 0.00010607 | | | | |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053359425984895\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [482/500, 0/38] loss: 0.31327832\n", + "Result for traindata_2f206_00000:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-25\n", + " done: false\n", + " experiment_id: eb87ed09742440a1af4158ebc534f380\n", + " hostname: tesla\n", + " iterations_since_restore: 482\n", + " loss: 0.3205342407409961\n", + " mcc: 0.9851108312031162\n", + " node_ip: 192.168.85.249\n", + " pid: 3931827\n", + " should_checkpoint: true\n", + " time_since_restore: 56.29356122016907\n", + " time_this_iter_s: 0.11139273643493652\n", + " time_total_s: 56.29356122016907\n", + " timestamp: 1660748425\n", + " timesteps_since_restore: 0\n", + " training_iteration: 482\n", + " trial_id: 2f206_00000\n", + " warmup_time: 0.003675699234008789\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205342407409961\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [483/500, 0/38] loss: 0.31327489\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [99/500, 100/150] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32230132588973415\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [484/500, 0/38] loss: 0.31327331\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130571603775027\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [100/500, 0/150] loss: 0.31326267\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052634541804975\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [485/500, 0/38] loss: 0.31327987\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320541833455746\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [486/500, 0/38] loss: 0.31327125\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [100/500, 100/150] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3224005401134491\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [487/500, 0/38] loss: 0.3132844\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130429208278655\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [101/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205262285012465\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [488/500, 0/38] loss: 0.31327808\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.320534898684575\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [489/500, 0/38] loss: 0.31328845\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [101/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206511162794553\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [490/500, 0/38] loss: 0.31329036\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213041305541992\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3228459381140195\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [491/500, 0/38] loss: 0.31327131\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [102/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32053115276189953\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [492/500, 0/38] loss: 0.31328738\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [102/500, 100/150] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32052616889660174\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [493/500, 0/38] loss: 0.31327686\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32189869422179\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [494/500, 0/38] loss: 0.31329542\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130364418029783\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [103/500, 0/150] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3206800612119528\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [495/500, 0/38] loss: 0.31327155\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [103/500, 100/150] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32103729706544143\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [496/500, 0/38] loss: 0.31329072\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205689856639275\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [497/500, 0/38] loss: 0.31327349\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213034933805466\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [104/500, 0/150] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205384566233708\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [498/500, 0/38] loss: 0.31329718\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32068525827847993\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [499/500, 0/38] loss: 0.31327471\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [104/500, 100/150] loss: 0.31326225\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.32190743547219497\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m [500/500, 0/38] loss: 0.31328577\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213029891252518\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [105/500, 0/150] loss: 0.31326193\n", + "Result for traindata_2f206_00000:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-27\n", + " done: true\n", + " experiment_id: eb87ed09742440a1af4158ebc534f380\n", + " hostname: tesla\n", + " iterations_since_restore: 500\n", + " loss: 0.3205354053240556\n", + " mcc: 0.9851108312031162\n", + " node_ip: 192.168.85.249\n", + " pid: 3931827\n", + " should_checkpoint: true\n", + " time_since_restore: 58.20078086853027\n", + " time_this_iter_s: 0.10422086715698242\n", + " time_total_s: 58.20078086853027\n", + " timestamp: 1660748427\n", + " timesteps_since_restore: 0\n", + " training_iteration: 500\n", + " trial_id: 2f206_00000\n", + " warmup_time: 0.003675699234008789\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m The Current Loss: 0.3205354053240556\n", + "\u001b[2m\u001b[36m(func pid=3931827)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [105/500, 100/150] loss: 0.31326199\n", + "Result for traindata_2f206_00003:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-28\n", + " done: false\n", + " experiment_id: e06db0d8c6434640a8c6e8e79ec600b5\n", + " hostname: tesla\n", + " iterations_since_restore: 105\n", + " loss: 0.32130116581916807\n", + " mcc: 0.9850100660598952\n", + " node_ip: 192.168.85.249\n", + " pid: 3932223\n", + " should_checkpoint: true\n", + " time_since_restore: 36.844499826431274\n", + " time_this_iter_s: 0.3309593200683594\n", + " time_total_s: 36.844499826431274\n", + " timestamp: 1660748428\n", + " timesteps_since_restore: 0\n", + " training_iteration: 105\n", + " trial_id: 2f206_00003\n", + " warmup_time: 0.0038690567016601562\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130116581916807\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [106/500, 0/150] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [106/500, 100/150] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3212984263896942\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [107/500, 0/150] loss: 0.31326288\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [107/500, 100/150] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130235612392427\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [108/500, 0/150] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [108/500, 100/150] loss: 0.31326219\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213021504878998\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [109/500, 0/150] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [109/500, 100/150] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130200624465943\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [110/500, 0/150] loss: 0.31326249\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [110/500, 100/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.321301965713501\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [111/500, 0/150] loss: 0.3132624\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [111/500, 100/150] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130134999752047\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [112/500, 0/150] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [112/500, 100/150] loss: 0.31326267\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130186319351195\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [113/500, 0/150] loss: 0.31326276\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [113/500, 100/150] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130130887031555\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [114/500, 0/150] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [114/500, 100/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213012856245041\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "== Status ==\n", + "Current time: 2022-08-17 16:00:31 (running for 00:01:05.97)\n", + "Memory usage on this node: 90.7/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=3\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3216232446523813 | Iter 32.000: -0.3230125511151094 | Iter 16.000: -0.3352227004674765 | Iter 8.000: -0.34589595258235933 | Iter 4.000: -0.3658917301893234 | Iter 2.000: -0.42899625301361083 | Iter 1.000: -0.5868336225243715\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (10 PENDING, 2 RUNNING, 3 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00003 | RUNNING | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 114 | 0.98501 |\n", + "| traindata_2f206_00004 | RUNNING | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | | | | |\n", + "| traindata_2f206_00005 | PENDING | | 8 | 0.332402 | 32 | 0.00010607 | | | | |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [115/500, 0/150] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [115/500, 100/150] loss: 0.3132619\n", + "Result for traindata_2f206_00003:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-33\n", + " done: false\n", + " experiment_id: e06db0d8c6434640a8c6e8e79ec600b5\n", + " hostname: tesla\n", + " iterations_since_restore: 115\n", + " loss: 0.32130114555358885\n", + " mcc: 0.9850100660598952\n", + " node_ip: 192.168.85.249\n", + " pid: 3932223\n", + " should_checkpoint: true\n", + " time_since_restore: 42.43229818344116\n", + " time_this_iter_s: 2.597268581390381\n", + " time_total_s: 42.43229818344116\n", + " timestamp: 1660748433\n", + " timesteps_since_restore: 0\n", + " training_iteration: 115\n", + " trial_id: 2f206_00003\n", + " warmup_time: 0.0038690567016601562\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130114555358885\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [116/500, 0/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [116/500, 100/150] loss: 0.31326243\n", + "\u001b[2m\u001b[36m(func pid=3933151)\u001b[0m [1/500, 0/75] loss: 0.67366892\n", + "Result for traindata_2f206_00004:\n", + " accuracy: 0.7975\n", + " date: 2022-08-17_16-00-33\n", + " done: true\n", + " experiment_id: 6b2143a1ec6c436483d748346bcb684a\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.6708784818649292\n", + " mcc: 0.6018897602811472\n", + " node_ip: 192.168.85.249\n", + " pid: 3933151\n", + " should_checkpoint: true\n", + " time_since_restore: 0.6307995319366455\n", + " time_this_iter_s: 0.6307995319366455\n", + " time_total_s: 0.6307995319366455\n", + " timestamp: 1660748433\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00004\n", + " warmup_time: 0.003902435302734375\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213005667924881\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [117/500, 0/150] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3933151)\u001b[0m The Current Loss: 0.6708784818649292\n", + "\u001b[2m\u001b[36m(func pid=3933151)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [117/500, 100/150] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213006180524826\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [118/500, 0/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [118/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213010269403458\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [119/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [119/500, 100/150] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213010162115097\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [120/500, 0/150] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [120/500, 100/150] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213010323047638\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [121/500, 0/150] loss: 0.31326225\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [121/500, 100/150] loss: 0.31326225\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213010185956955\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [122/500, 0/150] loss: 0.31326234\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [122/500, 100/150] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130097508430483\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [123/500, 0/150] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [123/500, 100/150] loss: 0.31326249\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213009661436081\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "== Status ==\n", + "Current time: 2022-08-17 16:00:36 (running for 00:01:11.30)\n", + "Memory usage on this node: 90.6/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=4\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3216232446523813 | Iter 32.000: -0.3230125511151094 | Iter 16.000: -0.3352227004674765 | Iter 8.000: -0.34589595258235933 | Iter 4.000: -0.3658917301893234 | Iter 2.000: -0.42899625301361083 | Iter 1.000: -0.6337895806019123\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (9 PENDING, 2 RUNNING, 4 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00003 | RUNNING | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 123 | 0.98501 |\n", + "| traindata_2f206_00005 | RUNNING | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | | | | |\n", + "| traindata_2f206_00006 | PENDING | | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | PENDING | | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [124/500, 0/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [124/500, 100/150] loss: 0.31326196\n", + "Result for traindata_2f206_00003:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-39\n", + " done: false\n", + " experiment_id: e06db0d8c6434640a8c6e8e79ec600b5\n", + " hostname: tesla\n", + " iterations_since_restore: 124\n", + " loss: 0.32130094289779665\n", + " mcc: 0.9850100660598952\n", + " node_ip: 192.168.85.249\n", + " pid: 3932223\n", + " should_checkpoint: true\n", + " time_since_restore: 48.42118430137634\n", + " time_this_iter_s: 3.2621710300445557\n", + " time_total_s: 48.42118430137634\n", + " timestamp: 1660748439\n", + " timesteps_since_restore: 0\n", + " training_iteration: 124\n", + " trial_id: 2f206_00003\n", + " warmup_time: 0.0038690567016601562\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130094289779665\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [125/500, 0/150] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3933287)\u001b[0m [1/500, 0/150] loss: 0.740282\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [125/500, 100/150] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3933287)\u001b[0m [1/500, 100/150] loss: 0.68908316\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.3213009560108185\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [126/500, 0/150] loss: 0.31326225\n", + "Result for traindata_2f206_00005:\n", + " accuracy: 0.515\n", + " date: 2022-08-17_16-00-40\n", + " done: true\n", + " experiment_id: ac06fe1c47934fe1afc253b980715bee\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.6849408125877381\n", + " mcc: 0.08824300313883983\n", + " node_ip: 192.168.85.249\n", + " pid: 3933287\n", + " should_checkpoint: true\n", + " time_since_restore: 0.6857128143310547\n", + " time_this_iter_s: 0.6857128143310547\n", + " time_total_s: 0.6857128143310547\n", + " timestamp: 1660748440\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00005\n", + " warmup_time: 0.0038983821868896484\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933287)\u001b[0m The Current Loss: 0.6849408125877381\n", + "\u001b[2m\u001b[36m(func pid=3933287)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [126/500, 100/150] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130090177059173\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [127/500, 0/150] loss: 0.31326234\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [127/500, 100/150] loss: 0.31326246\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130091965198515\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [128/500, 0/150] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m [128/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00003:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-00-40\n", + " done: true\n", + " experiment_id: e06db0d8c6434640a8c6e8e79ec600b5\n", + " hostname: tesla\n", + " iterations_since_restore: 128\n", + " loss: 0.32130091965198515\n", + " mcc: 0.9850100660598952\n", + " node_ip: 192.168.85.249\n", + " pid: 3932223\n", + " should_checkpoint: true\n", + " time_since_restore: 49.721484899520874\n", + " time_this_iter_s: 0.32651257514953613\n", + " time_total_s: 49.721484899520874\n", + " timestamp: 1660748440\n", + " timesteps_since_restore: 0\n", + " training_iteration: 128\n", + " trial_id: 2f206_00003\n", + " warmup_time: 0.0038690567016601562\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m The Current Loss: 0.32130091965198515\n", + "\u001b[2m\u001b[36m(func pid=3932223)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [1/500, 0/75] loss: 0.67838752\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.38644643425941466\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "== Status ==\n", + "Current time: 2022-08-17 16:00:42 (running for 00:01:17.70)\n", + "Memory usage on this node: 88.8/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=6\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.32105705048029237 | Iter 64.000: -0.3216232446523813 | Iter 32.000: -0.3230125511151094 | Iter 16.000: -0.3352227004674765 | Iter 8.000: -0.34589595258235933 | Iter 4.000: -0.3658917301893234 | Iter 2.000: -0.42899625301361083 | Iter 1.000: -0.6523340312334207\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (7 PENDING, 2 RUNNING, 6 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | | | | |\n", + "| traindata_2f206_00007 | RUNNING | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | | | | |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.925\n", + " date: 2022-08-17_16-00-45\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.38644643425941466\n", + " mcc: 0.8564048343012224\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 0.5281059741973877\n", + " time_this_iter_s: 0.5281059741973877\n", + " time_total_s: 0.5281059741973877\n", + " timestamp: 1660748445\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [2/500, 0/75] loss: 0.42802602\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.341545889377594\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [3/500, 0/75] loss: 0.37690109\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.33834433794021607\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [4/500, 0/75] loss: 0.31337431\n", + "\u001b[2m\u001b[36m(func pid=3933423)\u001b[0m [1/500, 0/38] loss: 0.68934417\n", + "Result for traindata_2f206_00007:\n", + " accuracy: 0.4925\n", + " date: 2022-08-17_16-00-47\n", + " done: true\n", + " experiment_id: 1079dac156f941f6b8b28a8e83127dc8\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.693847853403825\n", + " mcc: 0.0\n", + " node_ip: 192.168.85.249\n", + " pid: 3933423\n", + " should_checkpoint: true\n", + " time_since_restore: 0.4767336845397949\n", + " time_this_iter_s: 0.4767336845397949\n", + " time_total_s: 0.4767336845397949\n", + " timestamp: 1660748447\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00007\n", + " warmup_time: 0.0038635730743408203\n", + " \n", + "== Status ==\n", + "Current time: 2022-08-17 16:00:47 (running for 00:01:22.73)\n", + "Memory usage on this node: 92.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=7\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.32105705048029237 | Iter 64.000: -0.3216232446523813 | Iter 32.000: -0.3230125511151094 | Iter 16.000: -0.3352227004674765 | Iter 8.000: -0.34589595258235933 | Iter 4.000: -0.3658917301893234 | Iter 2.000: -0.39181416571140293 | Iter 1.000: -0.6523340312334207\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (7 PENDING, 2 RUNNING, 6 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.338344 | 0.97 | 3 | 0.941207 |\n", + "| traindata_2f206_00007 | RUNNING | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | PENDING | | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933423)\u001b[0m The Current Loss: 0.693847853403825\n", + "\u001b[2m\u001b[36m(func pid=3933423)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3196658670902252\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [5/500, 0/75] loss: 0.31351417\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3231245684623718\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [6/500, 0/75] loss: 0.31336054\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.319616014957428\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [7/500, 0/75] loss: 0.31330553\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31947951555252074\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [8/500, 0/75] loss: 0.31335053\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31941957712173463\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [9/500, 0/75] loss: 0.31326473\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.319235657453537\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [10/500, 0/75] loss: 0.31329378\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31918301463127136\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [11/500, 0/75] loss: 0.31327569\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31913188576698304\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [12/500, 0/75] loss: 0.3132633\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31914293885231015\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [13/500, 0/75] loss: 0.31326556\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31908098340034485\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [14/500, 0/75] loss: 0.31327471\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3190838003158569\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [15/500, 0/75] loss: 0.3132695\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31905520796775816\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [16/500, 0/75] loss: 0.31327099\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3190491700172424\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [17/500, 0/75] loss: 0.31327426\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3190009415149689\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [18/500, 0/75] loss: 0.31326362\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3189455533027649\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [19/500, 0/75] loss: 0.31326661\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31893757462501526\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [20/500, 0/75] loss: 0.31327114\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3189601445198059\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [21/500, 0/75] loss: 0.31327647\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3189193844795227\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [22/500, 0/75] loss: 0.31326377\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3189055395126343\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [23/500, 0/75] loss: 0.3132664\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31889763593673703\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [24/500, 0/75] loss: 0.31326276\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3188804948329926\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [25/500, 0/75] loss: 0.31326443\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3188776683807373\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [26/500, 0/75] loss: 0.31326321\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3188796055316925\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [27/500, 0/75] loss: 0.31326821\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31886823415756227\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [28/500, 0/75] loss: 0.31326786\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31885043263435364\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [29/500, 0/75] loss: 0.31326243\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3188387441635132\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [30/500, 0/75] loss: 0.31326416\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-00-52\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 30\n", + " loss: 0.3188227140903473\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 7.213489532470703\n", + " time_this_iter_s: 0.1904916763305664\n", + " time_total_s: 7.213489532470703\n", + " timestamp: 1660748452\n", + " timesteps_since_restore: 0\n", + " training_iteration: 30\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3188227140903473\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [31/500, 0/75] loss: 0.31326675\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3188177835941315\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [32/500, 0/75] loss: 0.3757672\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3187961435317993\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "== Status ==\n", + "Current time: 2022-08-17 16:00:52 (running for 00:01:27.81)\n", + "Memory usage on this node: 92.1/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=7\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.32105705048029237 | Iter 64.000: -0.3216232446523813 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3292781735612796 | Iter 8.000: -0.3453116735816002 | Iter 4.000: -0.3608327549695969 | Iter 2.000: -0.39181416571140293 | Iter 1.000: -0.6523340312334207\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (6 PENDING, 2 RUNNING, 7 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318796 | 0.995 | 32 | 0.989998 |\n", + "| traindata_2f206_00008 | RUNNING | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | | | | |\n", + "| traindata_2f206_00009 | PENDING | | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [33/500, 0/75] loss: 0.31326485\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3188030767440796\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [34/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31879557967185973\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [35/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933523)\u001b[0m [1/500, 0/75] loss: 0.69649231\n", + "Result for traindata_2f206_00008:\n", + " accuracy: 0.54\n", + " date: 2022-08-17_16-00-53\n", + " done: true\n", + " experiment_id: 9bdbcd15dcb64ca48c510bc756141f7f\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.6770060205459595\n", + " mcc: 0.18696707506424096\n", + " node_ip: 192.168.85.249\n", + " pid: 3933523\n", + " should_checkpoint: true\n", + " time_since_restore: 0.5599002838134766\n", + " time_this_iter_s: 0.5599002838134766\n", + " time_total_s: 0.5599002838134766\n", + " timestamp: 1660748453\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00008\n", + " warmup_time: 0.0037078857421875\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3187883949279785\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [36/500, 0/75] loss: 0.31326526\n", + "\u001b[2m\u001b[36m(func pid=3933523)\u001b[0m The Current Loss: 0.6770060205459595\n", + "\u001b[2m\u001b[36m(func pid=3933523)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3187793564796448\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [37/500, 0/75] loss: 0.31326339\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3187821578979492\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [38/500, 0/75] loss: 0.31326318\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31877565264701846\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [39/500, 0/75] loss: 0.31326386\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31875607252120974\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [40/500, 0/75] loss: 0.31326398\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31875781655311586\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [41/500, 0/75] loss: 0.31326246\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31876572728157043\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [42/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3187565267086029\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [43/500, 0/75] loss: 0.31326398\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3187372040748596\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [44/500, 0/75] loss: 0.31326246\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31873035430908203\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [45/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3187300741672516\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [46/500, 0/75] loss: 0.31326622\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3187334203720093\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [47/500, 0/75] loss: 0.31326446\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3187235057353973\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [48/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31871426701545713\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [49/500, 0/75] loss: 0.31326377\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3187248861789703\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [50/500, 0/75] loss: 0.31326237\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31871310114860535\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [51/500, 0/75] loss: 0.31326246\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31871828317642215\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [52/500, 0/75] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186993336677551\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [53/500, 0/75] loss: 0.31326473\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31870236992836\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [54/500, 0/75] loss: 0.31326306\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186980545520782\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [55/500, 0/75] loss: 0.31326315\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186880445480347\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [56/500, 0/75] loss: 0.31326395\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186809742450714\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [57/500, 0/75] loss: 0.31326276\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-00-57\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 57\n", + " loss: 0.3186769211292267\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 12.398419380187988\n", + " time_this_iter_s: 0.18949055671691895\n", + " time_total_s: 12.398419380187988\n", + " timestamp: 1660748457\n", + " timesteps_since_restore: 0\n", + " training_iteration: 57\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186769211292267\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [58/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186794853210449\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [59/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186738777160645\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "== Status ==\n", + "Current time: 2022-08-17 16:00:58 (running for 00:01:32.98)\n", + "Memory usage on this node: 91.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=8\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.32105705048029237 | Iter 64.000: -0.3216232446523813 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3292781735612796 | Iter 8.000: -0.3453116735816002 | Iter 4.000: -0.3608327549695969 | Iter 2.000: -0.39181416571140293 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (5 PENDING, 2 RUNNING, 8 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318674 | 0.995 | 59 | 0.989998 |\n", + "| traindata_2f206_00009 | RUNNING | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | | | | |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [60/500, 0/75] loss: 0.31326243\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31867490530014037\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [61/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31867299795150755\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [62/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [1/500, 0/150] loss: 0.68898034\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186690926551819\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [63/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [1/500, 100/150] loss: 0.52779633\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31867385387420655\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.4979471242427826\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "Result for traindata_2f206_00009:\n", + " accuracy: 0.8275\n", + " date: 2022-08-17_16-01-00\n", + " done: false\n", + " experiment_id: ee502829a8ba43c3b78c90e667ef1d8b\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.4979471242427826\n", + " mcc: 0.6824536680489807\n", + " node_ip: 192.168.85.249\n", + " pid: 3933632\n", + " should_checkpoint: true\n", + " time_since_restore: 0.7098147869110107\n", + " time_this_iter_s: 0.7098147869110107\n", + " time_total_s: 0.7098147869110107\n", + " timestamp: 1660748460\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00009\n", + " warmup_time: 0.0035848617553710938\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [64/500, 0/75] loss: 0.31326237\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [2/500, 0/150] loss: 0.54928958\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31865538835525514\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [65/500, 0/75] loss: 0.31326234\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [2/500, 100/150] loss: 0.56930006\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186586534976959\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [66/500, 0/75] loss: 0.3132627\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.3880224198102951\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [3/500, 0/150] loss: 0.35905948\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31865242004394534\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [3/500, 100/150] loss: 0.38402584\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [67/500, 0/75] loss: 0.31326252\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31865452527999877\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [68/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.3550905805826187\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [4/500, 0/150] loss: 0.32053831\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [4/500, 100/150] loss: 0.35488534\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31864811658859254\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [69/500, 0/75] loss: 0.31326261\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186446499824524\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [70/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.34480741202831267\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [5/500, 0/150] loss: 0.31565803\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [5/500, 100/150] loss: 0.32043114\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31863592982292177\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [71/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.33939932644367216\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [6/500, 0/150] loss: 0.31778467\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31864399433135987\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [72/500, 0/75] loss: 0.31326219\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [6/500, 100/150] loss: 0.3146638\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186356317996979\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [73/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.33591542541980746\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [7/500, 0/150] loss: 0.32379115\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186275744438171\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [74/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [7/500, 100/150] loss: 0.31354704\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186317765712738\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [75/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.35365145742893217\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [8/500, 0/150] loss: 0.32406104\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186245906352997\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [76/500, 0/75] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [8/500, 100/150] loss: 0.31536555\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862412095069886\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [77/500, 0/75] loss: 0.31326228\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.33084432542324066\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [9/500, 0/150] loss: 0.31345934\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862515211105347\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [78/500, 0/75] loss: 0.31326234\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [9/500, 100/150] loss: 0.31353688\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624974489212\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [79/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.33006534576416013\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [10/500, 0/150] loss: 0.31497702\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-02\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 79\n", + " loss: 0.31862558245658873\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 17.494374990463257\n", + " time_this_iter_s: 0.17295145988464355\n", + " time_total_s: 17.494374990463257\n", + " timestamp: 1660748462\n", + " timesteps_since_restore: 0\n", + " training_iteration: 79\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862558245658873\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [80/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [10/500, 100/150] loss: 0.31365314\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.3302014225721359\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [11/500, 0/150] loss: 0.31389642\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862573623657225\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [81/500, 0/75] loss: 0.31326202\n", + "== Status ==\n", + "Current time: 2022-08-17 16:01:03 (running for 00:01:38.07)\n", + "Memory usage on this node: 92.6/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=8\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.32105705048029237 | Iter 64.000: -0.3215651273727417 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3292781735612796 | Iter 8.000: -0.34472739458084106 | Iter 4.000: -0.3557737797498703 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6523340312334207\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (5 PENDING, 2 RUNNING, 8 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318625 | 0.995 | 81 | 0.989998 |\n", + "| traindata_2f206_00009 | RUNNING | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.330201 | 0.9825 | 10 | 0.96501 |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862472534179687\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [82/500, 0/75] loss: 0.31326222\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [11/500, 100/150] loss: 0.31360418\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.32866484224796294\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [12/500, 0/150] loss: 0.31529146\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862597465515136\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [83/500, 0/75] loss: 0.31326225\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [12/500, 100/150] loss: 0.31356937\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186254560947418\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [84/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.32896833956241606\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [13/500, 0/150] loss: 0.31343555\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244475841522\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [85/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [13/500, 100/150] loss: 0.31373921\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862581968307496\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [86/500, 0/75] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.3279634094238281\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [14/500, 0/150] loss: 0.31387889\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862414002418515\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [87/500, 0/75] loss: 0.3132624\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [14/500, 100/150] loss: 0.3135893\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862423300743103\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [88/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.327578729391098\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [15/500, 0/150] loss: 0.31331557\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186241662502289\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [89/500, 0/75] loss: 0.31326234\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [15/500, 100/150] loss: 0.31351694\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624165058136\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [90/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.32728382825851443\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [16/500, 0/150] loss: 0.31330884\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186241614818573\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [91/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [16/500, 100/150] loss: 0.31372726\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.3269789707660675\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [17/500, 0/150] loss: 0.31399038\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862411022186277\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [92/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862406849861147\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [93/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [17/500, 100/150] loss: 0.31345332\n", + "Result for traindata_2f206_00009:\n", + " accuracy: 0.99\n", + " date: 2022-08-17_16-01-05\n", + " done: false\n", + " experiment_id: ee502829a8ba43c3b78c90e667ef1d8b\n", + " hostname: tesla\n", + " iterations_since_restore: 17\n", + " loss: 0.32678875386714934\n", + " mcc: 0.9800490036753062\n", + " node_ip: 192.168.85.249\n", + " pid: 3933632\n", + " should_checkpoint: true\n", + " time_since_restore: 5.889561653137207\n", + " time_this_iter_s: 0.32332420349121094\n", + " time_total_s: 5.889561653137207\n", + " timestamp: 1660748465\n", + " timesteps_since_restore: 0\n", + " training_iteration: 17\n", + " trial_id: 2f206_00009\n", + " warmup_time: 0.0035848617553710938\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.32678875386714934\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186241018772125\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [94/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [18/500, 0/150] loss: 0.31360865\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186240160465241\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [18/500, 100/150] loss: 0.31342942\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [95/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.32620088577270506\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862412810325624\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [96/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [19/500, 0/150] loss: 0.3135516\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [19/500, 100/150] loss: 0.3135539\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862414002418515\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [97/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439632415773\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.3259702253341675\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [20/500, 0/150] loss: 0.31348619\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [98/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [20/500, 100/150] loss: 0.31347066\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243748664856\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [99/500, 0/75] loss: 0.31326222\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.32576411187648774\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [21/500, 0/150] loss: 0.31347036\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243772506714\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [100/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [21/500, 100/150] loss: 0.31365526\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243677139282\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [101/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.33131646633148193\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [22/500, 0/150] loss: 0.3134293\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243677139282\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [102/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [22/500, 100/150] loss: 0.31342953\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862436413764955\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [103/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.3384240275621414\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [23/500, 0/150] loss: 0.3139742\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243760585785\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [104/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [23/500, 100/150] loss: 0.31339586\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438082695005\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [105/500, 0/75] loss: 0.31326225\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.3249796724319458\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [24/500, 0/150] loss: 0.31326705\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438082695005\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [106/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [24/500, 100/150] loss: 0.31327307\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243748664856\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [107/500, 0/75] loss: 0.31326222\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.32631585955619813\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [25/500, 0/150] loss: 0.31327906\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243629455566\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [108/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [25/500, 100/150] loss: 0.31329706\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.3246171146631241\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [26/500, 0/150] loss: 0.31339484\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-07\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 108\n", + " loss: 0.31862435817718504\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 22.569194793701172\n", + " time_this_iter_s: 0.1983795166015625\n", + " time_total_s: 22.569194793701172\n", + " timestamp: 1660748467\n", + " timesteps_since_restore: 0\n", + " training_iteration: 108\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862435817718504\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [109/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862436056137083\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [110/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [26/500, 100/150] loss: 0.31340328\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.3241936057806015\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "== Status ==\n", + "Current time: 2022-08-17 16:01:08 (running for 00:01:43.08)\n", + "Memory usage on this node: 92.6/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=8\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.32105705048029237 | Iter 64.000: -0.3215651273727417 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3269789707660675 | Iter 8.000: -0.34472739458084106 | Iter 4.000: -0.3557737797498703 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6523340312334207\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (5 PENDING, 2 RUNNING, 8 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 109 | 0.989998 |\n", + "| traindata_2f206_00009 | RUNNING | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.324194 | 0.9925 | 26 | 0.985112 |\n", + "| traindata_2f206_00010 | PENDING | | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862436890602114\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [111/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [27/500, 0/150] loss: 0.3132745\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [27/500, 100/150] loss: 0.3133609\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862436890602114\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [112/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243772506714\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 4\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [113/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.3241724693775177\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [28/500, 0/150] loss: 0.31348535\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [28/500, 100/150] loss: 0.31326613\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243724822998\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [114/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.323753827214241\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [29/500, 0/150] loss: 0.31335801\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624370098114\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [115/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [29/500, 100/150] loss: 0.31340244\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243760585785\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [116/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.32345715939998626\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [30/500, 0/150] loss: 0.31331125\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438082695005\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [117/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624382019043\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [30/500, 100/150] loss: 0.3132689\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [118/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.32346185863018034\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [31/500, 0/150] loss: 0.3133336\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438797950743\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 4\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [119/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [31/500, 100/150] loss: 0.31335777\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 5\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [120/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.32344073712825777\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [32/500, 0/150] loss: 0.31339115\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624382019043\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [121/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m [32/500, 100/150] loss: 0.31326455\n", + "Result for traindata_2f206_00009:\n", + " accuracy: 0.9925\n", + " date: 2022-08-17_16-01-10\n", + " done: true\n", + " experiment_id: ee502829a8ba43c3b78c90e667ef1d8b\n", + " hostname: tesla\n", + " iterations_since_restore: 32\n", + " loss: 0.32321406781673434\n", + " mcc: 0.9851115701658352\n", + " node_ip: 192.168.85.249\n", + " pid: 3933632\n", + " should_checkpoint: true\n", + " time_since_restore: 10.841515064239502\n", + " time_this_iter_s: 0.33420538902282715\n", + " time_total_s: 10.841515064239502\n", + " timestamp: 1660748470\n", + " timesteps_since_restore: 0\n", + " training_iteration: 32\n", + " trial_id: 2f206_00009\n", + " warmup_time: 0.0035848617553710938\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624392747879\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [122/500, 0/75] loss: 0.31326228\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m The Current Loss: 0.32321406781673434\n", + "\u001b[2m\u001b[36m(func pid=3933632)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [123/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [124/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [125/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438678741456\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [126/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624392747879\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [127/500, 0/75] loss: 0.31326231\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438440322877\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [128/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439393997194\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [129/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439155578615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [130/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438559532164\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [131/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243796348572\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [132/500, 0/75] loss: 0.31326225\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243903636932\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [133/500, 0/75] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243903636932\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [134/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624382019043\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [135/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [136/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243903636932\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [137/500, 0/75] loss: 0.31326169\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-12\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 137\n", + " loss: 0.31862438797950743\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 27.58850932121277\n", + " time_this_iter_s: 0.1733231544494629\n", + " time_total_s: 27.58850932121277\n", + " timestamp: 1660748472\n", + " timesteps_since_restore: 0\n", + " training_iteration: 137\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438797950743\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [138/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [139/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438797950743\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "== Status ==\n", + "Current time: 2022-08-17 16:01:13 (running for 00:01:48.16)\n", + "Memory usage on this node: 91.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=9\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3215651273727417 | Iter 32.000: -0.3225881889462471 | Iter 16.000: -0.3269789707660675 | Iter 8.000: -0.34472739458084106 | Iter 4.000: -0.3557737797498703 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6523340312334207\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (4 PENDING, 2 RUNNING, 9 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 139 | 0.989998 |\n", + "| traindata_2f206_00010 | RUNNING | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | | | | |\n", + "| traindata_2f206_00011 | PENDING | | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [140/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438440322877\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [141/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438559532164\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [142/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438678741456\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [143/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933939)\u001b[0m [1/500, 0/75] loss: 0.69144946\n", + "Result for traindata_2f206_00010:\n", + " accuracy: 0.665\n", + " date: 2022-08-17_16-01-15\n", + " done: true\n", + " experiment_id: e1d39c7d21af4ca7a307e706666a8950\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.6823769402503967\n", + " mcc: 0.36088818178897286\n", + " node_ip: 192.168.85.249\n", + " pid: 3933939\n", + " should_checkpoint: true\n", + " time_since_restore: 0.6237921714782715\n", + " time_this_iter_s: 0.6237921714782715\n", + " time_total_s: 0.6237921714782715\n", + " timestamp: 1660748475\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00010\n", + " warmup_time: 0.0044193267822265625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933939)\u001b[0m The Current Loss: 0.6823769402503967\n", + "\u001b[2m\u001b[36m(func pid=3933939)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439393997194\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [144/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243903636932\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [145/500, 0/75] loss: 0.31326231\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [146/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624392747879\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [147/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243903636932\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [148/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438797950743\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [149/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [150/500, 0/75] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243951320648\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [151/500, 0/75] loss: 0.31326219\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624392747879\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [152/500, 0/75] loss: 0.31326231\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243903636932\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [153/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439155578615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [154/500, 0/75] loss: 0.31326261\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438678741456\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [155/500, 0/75] loss: 0.37576166\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-18\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 155\n", + " loss: 0.31862439393997194\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 32.74511361122131\n", + " time_this_iter_s: 0.19373774528503418\n", + " time_total_s: 32.74511361122131\n", + " timestamp: 1660748478\n", + " timesteps_since_restore: 0\n", + " training_iteration: 155\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439393997194\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [156/500, 0/75] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624392747879\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [157/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243987083435\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "== Status ==\n", + "Current time: 2022-08-17 16:01:18 (running for 00:01:53.30)\n", + "Memory usage on this node: 90.8/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=10\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3215651273727417 | Iter 32.000: -0.3225881889462471 | Iter 16.000: -0.3269789707660675 | Iter 8.000: -0.34472739458084106 | Iter 4.000: -0.3557737797498703 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (3 PENDING, 2 RUNNING, 10 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 157 | 0.989998 |\n", + "| traindata_2f206_00011 | RUNNING | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | | | | |\n", + "| traindata_2f206_00012 | PENDING | | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [158/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [159/500, 0/75] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243987083435\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [160/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3934072)\u001b[0m [1/500, 0/75] loss: 0.69089395\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439155578615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "Result for traindata_2f206_00011:\n", + " accuracy: 0.495\n", + " date: 2022-08-17_16-01-21\n", + " done: true\n", + " experiment_id: 8dae70a7ec2f4efa94faccd577452da5\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.8182617497444152\n", + " mcc: 0.0\n", + " node_ip: 192.168.85.249\n", + " pid: 3934072\n", + " should_checkpoint: true\n", + " time_since_restore: 0.5485138893127441\n", + " time_this_iter_s: 0.5485138893127441\n", + " time_total_s: 0.5485138893127441\n", + " timestamp: 1660748481\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00011\n", + " warmup_time: 0.0033195018768310547\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [161/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3934072)\u001b[0m The Current Loss: 0.8182617497444152\n", + "\u001b[2m\u001b[36m(func pid=3934072)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438321113584\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [162/500, 0/75] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438797950743\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [163/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438559532164\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [164/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243951320648\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [165/500, 0/75] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243903636932\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [166/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438440322877\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [167/500, 0/75] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438559532164\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [168/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439155578615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [169/500, 0/75] loss: 0.31326213\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-23\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 169\n", + " loss: 0.31862438678741456\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 37.89649963378906\n", + " time_this_iter_s: 0.170393705368042\n", + " time_total_s: 37.89649963378906\n", + " timestamp: 1660748483\n", + " timesteps_since_restore: 0\n", + " training_iteration: 169\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438678741456\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [170/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438678741456\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [171/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243951320648\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "== Status ==\n", + "Current time: 2022-08-17 16:01:23 (running for 00:01:58.46)\n", + "Memory usage on this node: 90.9/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=11\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3215651273727417 | Iter 32.000: -0.3225881889462471 | Iter 16.000: -0.3269789707660675 | Iter 8.000: -0.34472739458084106 | Iter 4.000: -0.3557737797498703 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6739422512054443\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (2 PENDING, 2 RUNNING, 11 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 171 | 0.989998 |\n", + "| traindata_2f206_00012 | RUNNING | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | | | | |\n", + "| traindata_2f206_00013 | PENDING | | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [172/500, 0/75] loss: 0.31326228\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438559532164\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [173/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243903636932\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [174/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934157)\u001b[0m [1/500, 0/150] loss: 0.67718554\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438082695005\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [175/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934157)\u001b[0m [1/500, 100/150] loss: 0.66563106\n", + "Result for traindata_2f206_00012:\n", + " accuracy: 0.515\n", + " date: 2022-08-17_16-01-26\n", + " done: true\n", + " experiment_id: 137e10c08dcc4b8cbe1ef9f1216e57f0\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.6802591121196747\n", + " mcc: 0.08824300313883983\n", + " node_ip: 192.168.85.249\n", + " pid: 3934157\n", + " should_checkpoint: true\n", + " time_since_restore: 0.6765584945678711\n", + " time_this_iter_s: 0.6765584945678711\n", + " time_total_s: 0.6765584945678711\n", + " timestamp: 1660748486\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00012\n", + " warmup_time: 0.003431558609008789\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439155578615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [176/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934157)\u001b[0m The Current Loss: 0.6802591121196747\n", + "\u001b[2m\u001b[36m(func pid=3934157)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438797950743\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [177/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243975162506\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [178/500, 0/75] loss: 0.31326252\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [179/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624392747879\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [180/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624392747879\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [181/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243903636932\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [182/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439632415773\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [183/500, 0/75] loss: 0.31326181\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-28\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 183\n", + " loss: 0.3186243999004364\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 42.96335315704346\n", + " time_this_iter_s: 0.17350220680236816\n", + " time_total_s: 42.96335315704346\n", + " timestamp: 1660748488\n", + " timesteps_since_restore: 0\n", + " training_iteration: 183\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243999004364\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [184/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439155578615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [185/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243975162506\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "== Status ==\n", + "Current time: 2022-08-17 16:01:28 (running for 00:02:03.54)\n", + "Memory usage on this node: 91.0/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=12\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3215651273727417 | Iter 32.000: -0.3225881889462471 | Iter 16.000: -0.3269789707660675 | Iter 8.000: -0.34472739458084106 | Iter 4.000: -0.3557737797498703 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6770060205459595\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 PENDING, 2 RUNNING, 12 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 185 | 0.989998 |\n", + "| traindata_2f206_00013 | RUNNING | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | | | | |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [186/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439155578615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [187/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438797950743\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [188/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438440322877\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934261)\u001b[0m [1/500, 0/75] loss: 0.68796259\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [189/500, 0/75] loss: 0.31326205\n", + "Result for traindata_2f206_00013:\n", + " accuracy: 0.535\n", + " date: 2022-08-17_16-01-32\n", + " done: false\n", + " experiment_id: 0f6fc83a869f43abb643b00d4cf1def1\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.6645839476585388\n", + " mcc: 0.19647891728167047\n", + " node_ip: 192.168.85.249\n", + " pid: 3934261\n", + " should_checkpoint: true\n", + " time_since_restore: 0.617652416229248\n", + " time_this_iter_s: 0.617652416229248\n", + " time_total_s: 0.617652416229248\n", + " timestamp: 1660748492\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00013\n", + " warmup_time: 0.003692150115966797\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934261)\u001b[0m The Current Loss: 0.6645839476585388\n", + "\u001b[2m\u001b[36m(func pid=3934261)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934261)\u001b[0m [2/500, 0/75] loss: 0.68111908\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243987083435\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [190/500, 0/75] loss: 0.31326219\n", + "Result for traindata_2f206_00013:\n", + " accuracy: 0.635\n", + " date: 2022-08-17_16-01-33\n", + " done: true\n", + " experiment_id: 0f6fc83a869f43abb643b00d4cf1def1\n", + " hostname: tesla\n", + " iterations_since_restore: 2\n", + " loss: 0.6261396408081055\n", + " mcc: 0.38804963604607257\n", + " node_ip: 192.168.85.249\n", + " pid: 3934261\n", + " should_checkpoint: true\n", + " time_since_restore: 0.7999439239501953\n", + " time_this_iter_s: 0.18229150772094727\n", + " time_total_s: 0.7999439239501953\n", + " timestamp: 1660748493\n", + " timesteps_since_restore: 0\n", + " training_iteration: 2\n", + " trial_id: 2f206_00013\n", + " warmup_time: 0.003692150115966797\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934261)\u001b[0m The Current Loss: 0.6261396408081055\n", + "\u001b[2m\u001b[36m(func pid=3934261)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243903636932\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [191/500, 0/75] loss: 0.31326166\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-33\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 191\n", + " loss: 0.318624392747879\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 48.027474880218506\n", + " time_this_iter_s: 0.1855309009552002\n", + " time_total_s: 48.027474880218506\n", + " timestamp: 1660748493\n", + " timesteps_since_restore: 0\n", + " training_iteration: 191\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624392747879\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [192/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438678741456\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [193/500, 0/75] loss: 0.31326196\n", + "== Status ==\n", + "Current time: 2022-08-17 16:01:33 (running for 00:02:08.63)\n", + "Memory usage on this node: 90.7/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=13\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3215651273727417 | Iter 32.000: -0.3225881889462471 | Iter 16.000: -0.3269789707660675 | Iter 8.000: -0.34472739458084106 | Iter 4.000: -0.3557737797498703 | Iter 2.000: -0.408509336411953 | Iter 1.000: -0.6739422512054443\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 PENDING, 1 RUNNING, 13 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 193 | 0.989998 |\n", + "| traindata_2f206_00014 | PENDING | | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439632415773\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [194/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243951320648\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [195/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243951320648\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [196/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624392747879\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [197/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [198/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [199/500, 0/75] loss: 0.31326225\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439155578615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [200/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438797950743\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [201/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [202/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243951320648\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [203/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438797950743\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [204/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438678741456\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [205/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439155578615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [206/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439632415773\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [207/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439155578615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [208/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438678741456\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [209/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439632415773\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [210/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624382019043\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [211/500, 0/75] loss: 0.31326231\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439393997194\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [212/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243951320648\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [213/500, 0/75] loss: 0.31326258\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243951320648\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [214/500, 0/75] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438797950743\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [215/500, 0/75] loss: 0.31326246\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439632415773\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [216/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439632415773\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [217/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243987083435\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [218/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438559532164\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [219/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243951320648\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [220/500, 0/75] loss: 0.31326222\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-38\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 220\n", + " loss: 0.3186243951320648\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 53.12860321998596\n", + " time_this_iter_s: 0.17338228225708008\n", + " time_total_s: 53.12860321998596\n", + " timestamp: 1660748498\n", + " timesteps_since_restore: 0\n", + " training_iteration: 220\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243951320648\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [221/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [222/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [1/500, 0/150] loss: 0.69365674\n", + "== Status ==\n", + "Current time: 2022-08-17 16:01:38 (running for 00:02:13.70)\n", + "Memory usage on this node: 92.6/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=13\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3215651273727417 | Iter 32.000: -0.3225881889462471 | Iter 16.000: -0.3269789707660675 | Iter 8.000: -0.34472739458084106 | Iter 4.000: -0.3557737797498703 | Iter 2.000: -0.408509336411953 | Iter 1.000: -0.6739422512054443\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (2 RUNNING, 13 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 222 | 0.989998 |\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | | | | |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439155578615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [223/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [1/500, 100/150] loss: 0.39394668\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.9575\n", + " date: 2022-08-17_16-01-39\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 1\n", + " loss: 0.3880030298233032\n", + " mcc: 0.915256502325232\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 0.7718656063079834\n", + " time_this_iter_s: 0.7718656063079834\n", + " time_total_s: 0.7718656063079834\n", + " timestamp: 1660748499\n", + " timesteps_since_restore: 0\n", + " training_iteration: 1\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439155578615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [224/500, 0/75] loss: 0.31326216\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3880030298233032\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [2/500, 0/150] loss: 0.4975951\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439155578615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [225/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [2/500, 100/150] loss: 0.32734817\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624392747879\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 4\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [226/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3631923866271973\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [3/500, 0/150] loss: 0.36709976\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438797950743\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [227/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [3/500, 100/150] loss: 0.42293867\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243987083435\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [228/500, 0/75] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.33224187672138217\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [4/500, 0/150] loss: 0.31430644\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438797950743\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [229/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [4/500, 100/150] loss: 0.3141554\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243903636932\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [230/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.32978655815124513\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [5/500, 0/150] loss: 0.31432837\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [5/500, 100/150] loss: 0.31347615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [231/500, 0/75] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243975162506\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [232/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.32571832716465\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [6/500, 0/150] loss: 0.31393403\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [6/500, 100/150] loss: 0.31533647\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438797950743\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [233/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439155578615\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3244891506433487\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [7/500, 0/150] loss: 0.31364217\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [234/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243975162506\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [235/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [7/500, 100/150] loss: 0.31473225\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243975162506\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [236/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3245051342248917\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [8/500, 0/150] loss: 0.31356585\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243951320648\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [237/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [8/500, 100/150] loss: 0.31362465\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438678741456\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [238/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [239/500, 0/75] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.32564359188079833\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [9/500, 0/150] loss: 0.31354365\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243951320648\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [240/500, 0/75] loss: 0.31326264\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [9/500, 100/150] loss: 0.31338432\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.32306579887866976\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243987083435\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [241/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [10/500, 0/150] loss: 0.31384698\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243987083435\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 4\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [242/500, 0/75] loss: 0.31326225\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [10/500, 100/150] loss: 0.31389788\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862438917160035\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [243/500, 0/75] loss: 0.31326237\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.32250142872333526\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [11/500, 0/150] loss: 0.31328008\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439632415773\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [244/500, 0/75] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [11/500, 100/150] loss: 0.31338194\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243975162506\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [245/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862439393997194\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [246/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.322434663772583\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [12/500, 0/150] loss: 0.31352481\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624392747879\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [247/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [12/500, 100/150] loss: 0.31334558\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243987083435\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [248/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.32210156381130217\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [13/500, 0/150] loss: 0.31349277\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243975162506\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [249/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [13/500, 100/150] loss: 0.31332248\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243903636932\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [250/500, 0/75] loss: 0.31326178\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-43\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 250\n", + " loss: 0.3186243999004364\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 58.25939869880676\n", + " time_this_iter_s: 0.1700725555419922\n", + " time_total_s: 58.25939869880676\n", + " timestamp: 1660748503\n", + " timesteps_since_restore: 0\n", + " training_iteration: 250\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243999004364\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.34558006763458254\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [14/500, 0/150] loss: 0.31338084\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [251/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244070529938\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [252/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [14/500, 100/150] loss: 0.31342059\n", + "== Status ==\n", + "Current time: 2022-08-17 16:01:43 (running for 00:02:18.82)\n", + "Memory usage on this node: 92.6/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=13\n", + "Bracket: Iter 256.000: -0.32053314264004046 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3215651273727417 | Iter 32.000: -0.3225881889462471 | Iter 16.000: -0.3269789707660675 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (2 RUNNING, 13 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 251 | 0.989998 |\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.32066 | 0.995 | 14 | 0.990048 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243999004364\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [253/500, 0/75] loss: 0.31326252\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3206604504585266\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [15/500, 0/150] loss: 0.31339821\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243975162506\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [254/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [15/500, 100/150] loss: 0.31334019\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243975162506\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [255/500, 0/75] loss: 0.31326202\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-44\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 15\n", + " loss: 0.3199175465106964\n", + " mcc: 0.9900495037128093\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 6.0950398445129395\n", + " time_this_iter_s: 0.42972517013549805\n", + " time_total_s: 6.0950398445129395\n", + " timestamp: 1660748504\n", + " timesteps_since_restore: 0\n", + " training_iteration: 15\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3199175465106964\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [16/500, 0/150] loss: 0.31328967\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243987083435\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [256/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243987083435\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [257/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [16/500, 100/150] loss: 0.31379646\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244022846222\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 4\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [258/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3195202827453613\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [17/500, 0/150] loss: 0.31335658\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862440943717957\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 5\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [259/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [17/500, 100/150] loss: 0.3133283\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243987083435\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [260/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3195787477493286\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [18/500, 0/150] loss: 0.31337157\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243987083435\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [261/500, 0/75] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862440824508664\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [262/500, 0/75] loss: 0.3132624\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [18/500, 100/150] loss: 0.31327039\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3195468205213547\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243987083435\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [263/500, 0/75] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [19/500, 0/150] loss: 0.3132737\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862440943717957\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [264/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [19/500, 100/150] loss: 0.31327653\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244070529938\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [265/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31946499943733214\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [20/500, 0/150] loss: 0.31341106\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624404668808\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [266/500, 0/75] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624404668808\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [267/500, 0/75] loss: 0.31326246\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [20/500, 100/150] loss: 0.31327718\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.319440593123436\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [21/500, 0/150] loss: 0.3134042\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244034767151\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [268/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862440824508664\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [269/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [21/500, 100/150] loss: 0.31333622\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624404668808\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [270/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.319411798119545\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [22/500, 0/150] loss: 0.31335655\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244058609009\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [271/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243987083435\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [22/500, 100/150] loss: 0.31333339\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [272/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3193878591060638\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [23/500, 0/150] loss: 0.31326786\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862440824508664\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [273/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [23/500, 100/150] loss: 0.31327236\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441062927244\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [274/500, 0/75] loss: 0.31326222\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244034767151\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [275/500, 0/75] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.319408221244812\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [24/500, 0/150] loss: 0.31326473\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244058609009\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [276/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [24/500, 100/150] loss: 0.31326619\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624404668808\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [277/500, 0/75] loss: 0.31326231\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31926555693149566\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [25/500, 0/150] loss: 0.31327266\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243975162506\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [278/500, 0/75] loss: 0.31326219\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [25/500, 100/150] loss: 0.31328458\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441062927244\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [279/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.319254075884819\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [26/500, 0/150] loss: 0.31326312\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-48\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 279\n", + " loss: 0.3186243999004364\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 63.31006622314453\n", + " time_this_iter_s: 0.17507076263427734\n", + " time_total_s: 63.31006622314453\n", + " timestamp: 1660748508\n", + " timesteps_since_restore: 0\n", + " training_iteration: 279\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243999004364\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [280/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [26/500, 100/150] loss: 0.31326804\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862440824508664\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [281/500, 0/75] loss: 0.31326196\n", + "== Status ==\n", + "Current time: 2022-08-17 16:01:48 (running for 00:02:23.83)\n", + "Memory usage on this node: 92.3/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=13\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3215651273727417 | Iter 32.000: -0.3225881889462471 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (2 RUNNING, 13 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 280 | 0.989998 |\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.31915 | 0.9925 | 26 | 0.985012 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441182136536\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [282/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3191504979133606\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [27/500, 0/150] loss: 0.31327617\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244058609009\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [283/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [27/500, 100/150] loss: 0.31328326\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244022846222\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [284/500, 0/75] loss: 0.31326258\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31911999106407163\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [28/500, 0/150] loss: 0.31326544\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624404668808\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [285/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [28/500, 100/150] loss: 0.31330907\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-49\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 28\n", + " loss: 0.31912717640399935\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 11.469220399856567\n", + " time_this_iter_s: 0.38771796226501465\n", + " time_total_s: 11.469220399856567\n", + " timestamp: 1660748509\n", + " timesteps_since_restore: 0\n", + " training_iteration: 28\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186243999004364\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [286/500, 0/75] loss: 0.31326231\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31912717640399935\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [29/500, 0/150] loss: 0.31328699\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244034767151\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [287/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [29/500, 100/150] loss: 0.31329188\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244058609009\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [288/500, 0/75] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.319082470536232\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [30/500, 0/150] loss: 0.31326351\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624415397644\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [289/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [30/500, 100/150] loss: 0.31327021\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244070529938\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [290/500, 0/75] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31897239506244657\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [31/500, 0/150] loss: 0.31326616\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624404668808\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [291/500, 0/75] loss: 0.31326231\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [31/500, 100/150] loss: 0.31326532\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862440824508664\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [292/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3189299613237381\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [32/500, 0/150] loss: 0.31328559\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441301345823\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [293/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [32/500, 100/150] loss: 0.31328216\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862440824508664\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [294/500, 0/75] loss: 0.37576178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31884957730770114\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [33/500, 0/150] loss: 0.31326419\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862440943717957\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [295/500, 0/75] loss: 0.31326237\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [33/500, 100/150] loss: 0.31326464\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624404668808\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [296/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31882657051086427\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [34/500, 0/150] loss: 0.31326407\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624404668808\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [297/500, 0/75] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [34/500, 100/150] loss: 0.31328255\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3187689006328583\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [35/500, 0/150] loss: 0.31328315\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441182136536\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [298/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862440824508664\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [299/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [35/500, 100/150] loss: 0.31326991\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3187405502796173\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [36/500, 0/150] loss: 0.31326413\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441062927244\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [300/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [36/500, 100/150] loss: 0.31326479\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441420555115\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [301/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441658973695\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [302/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31874017596244814\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [37/500, 0/150] loss: 0.31326345\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [37/500, 100/150] loss: 0.31326395\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441182136536\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [303/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.318685507774353\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [38/500, 0/150] loss: 0.31328207\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441301345823\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [304/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [38/500, 100/150] loss: 0.31326613\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441658973695\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [305/500, 0/75] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3186573499441147\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [39/500, 0/150] loss: 0.31326765\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244177818298\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [306/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441182136536\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [307/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [39/500, 100/150] loss: 0.31326646\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31854499876499176\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [40/500, 0/150] loss: 0.31326252\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624415397644\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [308/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [40/500, 100/150] loss: 0.31326234\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-53\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 308\n", + " loss: 0.31862441658973695\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 68.36505508422852\n", + " time_this_iter_s: 0.1735379695892334\n", + " time_total_s: 68.36505508422852\n", + " timestamp: 1660748513\n", + " timesteps_since_restore: 0\n", + " training_iteration: 308\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441658973695\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [309/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31853388726711274\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [41/500, 0/150] loss: 0.31326476\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441182136536\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [310/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [41/500, 100/150] loss: 0.3132658\n", + "== Status ==\n", + "Current time: 2022-08-17 16:01:54 (running for 00:02:28.94)\n", + "Memory usage on this node: 92.5/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=13\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3215651273727417 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (2 RUNNING, 13 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 310 | 0.989998 |\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.318534 | 0.995 | 40 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244225502014\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [311/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31849122285842896\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [42/500, 0/150] loss: 0.31326309\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862442135810853\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [312/500, 0/75] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [42/500, 100/150] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441420555115\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [313/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31846828818321227\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [43/500, 0/150] loss: 0.31326255\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244201660156\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [314/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [43/500, 100/150] loss: 0.31326517\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-54\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 43\n", + " loss: 0.3184069633483887\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 16.47959041595459\n", + " time_this_iter_s: 0.32845258712768555\n", + " time_total_s: 16.47959041595459\n", + " timestamp: 1660748514\n", + " timesteps_since_restore: 0\n", + " training_iteration: 43\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244177818298\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [315/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3184069633483887\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [44/500, 0/150] loss: 0.31326297\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244177818298\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [316/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [44/500, 100/150] loss: 0.31326473\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31836917996406555\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [45/500, 0/150] loss: 0.31326702\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441301345823\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [317/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624404668808\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [318/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [45/500, 100/150] loss: 0.31326458\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31838419258594514\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [46/500, 0/150] loss: 0.31326246\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624415397644\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [319/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244070529938\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [46/500, 100/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [320/500, 0/75] loss: 0.31326219\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3183001488447189\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [47/500, 0/150] loss: 0.31326514\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244225502014\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [321/500, 0/75] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [47/500, 100/150] loss: 0.31326306\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244070529938\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [322/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3182873886823654\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [48/500, 0/150] loss: 0.31326681\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441301345823\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [323/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [48/500, 100/150] loss: 0.31326354\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624415397644\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [324/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31820154309272763\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [49/500, 0/150] loss: 0.31327254\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244285106659\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [325/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [49/500, 100/150] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244177818298\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [326/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3182203906774521\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [50/500, 0/150] loss: 0.31326404\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244201660156\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [327/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [50/500, 100/150] loss: 0.31326404\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441658973695\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [328/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31815807402133944\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [51/500, 0/150] loss: 0.31326392\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244249343872\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [329/500, 0/75] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [51/500, 100/150] loss: 0.31326246\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244201660156\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3181974357366562\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [330/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [52/500, 0/150] loss: 0.31326559\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244201660156\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [331/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [52/500, 100/150] loss: 0.31326416\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3181015622615814\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441897392274\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [332/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [53/500, 0/150] loss: 0.31326449\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244177818298\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [333/500, 0/75] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [53/500, 100/150] loss: 0.31326708\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244177818298\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [334/500, 0/75] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3180959939956665\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [54/500, 0/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [54/500, 100/150] loss: 0.31326479\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441658973695\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [335/500, 0/75] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441897392274\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [336/500, 0/75] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3180396568775177\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [55/500, 0/150] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244225502014\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [337/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [55/500, 100/150] loss: 0.3132638\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31801902115345\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [56/500, 0/150] loss: 0.31326187\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-01-58\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 337\n", + " loss: 0.31862442135810853\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 73.43547582626343\n", + " time_this_iter_s: 0.17564082145690918\n", + " time_total_s: 73.43547582626343\n", + " timestamp: 1660748518\n", + " timesteps_since_restore: 0\n", + " training_iteration: 337\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862442135810853\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [338/500, 0/75] loss: 0.31326255\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244177818298\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [339/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [56/500, 100/150] loss: 0.3132658\n", + "== Status ==\n", + "Current time: 2022-08-17 16:01:59 (running for 00:02:33.95)\n", + "Memory usage on this node: 92.6/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=13\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3215651273727417 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (2 RUNNING, 13 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 338 | 0.989998 |\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.317971 | 0.995 | 56 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3179713332653046\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [57/500, 0/150] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624415397644\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [340/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [57/500, 100/150] loss: 0.31326491\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244225502014\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [341/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31795023679733275\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244261264801\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [342/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [58/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [58/500, 100/150] loss: 0.31326422\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244225502014\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [343/500, 0/75] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m 0.3179220497608185\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244201660156\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [59/500, 0/150] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [344/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [59/500, 100/150] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441420555115\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [345/500, 0/75] loss: 0.31326208\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-00\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 59\n", + " loss: 0.31789205014705657\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 21.78721022605896\n", + " time_this_iter_s: 0.32529449462890625\n", + " time_total_s: 21.78721022605896\n", + " timestamp: 1660748520\n", + " timesteps_since_restore: 0\n", + " training_iteration: 59\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31789205014705657\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [60/500, 0/150] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244201660156\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [346/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [60/500, 100/150] loss: 0.31326288\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244177818298\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [347/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31788512885570525\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [61/500, 0/150] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244249343872\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [348/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244249343872\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [349/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [61/500, 100/150] loss: 0.31326327\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31785150587558747\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [62/500, 0/150] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862442135810853\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [350/500, 0/75] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [62/500, 100/150] loss: 0.31326306\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244261264801\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [351/500, 0/75] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31779961824417113\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [63/500, 0/150] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441658973695\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [352/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [63/500, 100/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244249343872\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [353/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3178002274036407\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [64/500, 0/150] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244332790375\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [354/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [64/500, 100/150] loss: 0.31326619\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244285106659\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [355/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31782459020614623\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [65/500, 0/150] loss: 0.31326225\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244297027588\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [356/500, 0/75] loss: 0.31326264\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [65/500, 100/150] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244297027588\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [357/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31774441301822665\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [66/500, 0/150] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244237422943\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [358/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [66/500, 100/150] loss: 0.31326276\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3177459442615509\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244225502014\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [67/500, 0/150] loss: 0.31326306\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [359/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862442135810853\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [360/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [67/500, 100/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3177258789539337\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244285106659\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [361/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [68/500, 0/150] loss: 0.31326309\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862441182136536\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [362/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [68/500, 100/150] loss: 0.3132638\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244285106659\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [363/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31772623777389525\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [69/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [69/500, 100/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244261264801\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [364/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244285106659\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [365/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31771251738071443\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [70/500, 0/150] loss: 0.31326258\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [70/500, 100/150] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624427318573\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [366/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244249343872\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31768216490745543\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [71/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [367/500, 0/75] loss: 0.31326175\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-03\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 367\n", + " loss: 0.3186244297027588\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 78.59783458709717\n", + " time_this_iter_s: 0.17190980911254883\n", + " time_total_s: 78.59783458709717\n", + " timestamp: 1660748523\n", + " timesteps_since_restore: 0\n", + " training_iteration: 367\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244297027588\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [368/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [71/500, 100/150] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3176569890975952\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [72/500, 0/150] loss: 0.31326273\n", + "== Status ==\n", + "Current time: 2022-08-17 16:02:04 (running for 00:02:39.00)\n", + "Memory usage on this node: 93.0/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=13\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (2 RUNNING, 13 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 368 | 0.989998 |\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.317657 | 0.995 | 71 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244261264801\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [369/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244332790375\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [370/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [72/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3175733506679535\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [73/500, 0/150] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244249343872\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [371/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [73/500, 100/150] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443685531616\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [372/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.317607199549675\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [74/500, 0/150] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244237422943\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [373/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [74/500, 100/150] loss: 0.31326252\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244297027588\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [374/500, 0/75] loss: 0.31326234\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.317579385638237\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244308948517\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [75/500, 0/150] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [375/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [75/500, 100/150] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244356632233\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [376/500, 0/75] loss: 0.31326175\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-05\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 75\n", + " loss: 0.3175583851337433\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 27.1081326007843\n", + " time_this_iter_s: 0.33081698417663574\n", + " time_total_s: 27.1081326007843\n", + " timestamp: 1660748525\n", + " timesteps_since_restore: 0\n", + " training_iteration: 75\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3175583851337433\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [76/500, 0/150] loss: 0.31326225\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244249343872\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [377/500, 0/75] loss: 0.31326249\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [76/500, 100/150] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244285106659\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [378/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.317538201212883\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [77/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244285106659\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [379/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [77/500, 100/150] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443447113037\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [380/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3175318670272827\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [78/500, 0/150] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244404315948\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 4\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [381/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [78/500, 100/150] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244308948517\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [382/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3174699664115906\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [79/500, 0/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244285106659\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [383/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [79/500, 100/150] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624427318573\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [384/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31746466815471647\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [80/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244332790375\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [385/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [80/500, 100/150] loss: 0.31326222\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244297027588\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [386/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3174154496192932\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [81/500, 0/150] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443923950196\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [387/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [81/500, 100/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244285106659\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [388/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3174386984109879\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [82/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443447113037\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [389/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [82/500, 100/150] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3174152010679245\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [83/500, 0/150] loss: 0.31326219\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443923950196\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [390/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443685531616\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [391/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [83/500, 100/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.317433403134346\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [84/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244332790375\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [392/500, 0/75] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244404315948\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [84/500, 100/150] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [393/500, 0/75] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443685531616\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31736966788768767\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [394/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [85/500, 0/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443685531616\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [395/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [85/500, 100/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244356632233\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [396/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3173940187692642\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [86/500, 0/150] loss: 0.31326166\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-08\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 396\n", + " loss: 0.31862444639205934\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 83.66557145118713\n", + " time_this_iter_s: 0.171980619430542\n", + " time_total_s: 83.66557145118713\n", + " timestamp: 1660748528\n", + " timesteps_since_restore: 0\n", + " training_iteration: 396\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444639205934\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [397/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [86/500, 100/150] loss: 0.31326187\n", + "== Status ==\n", + "Current time: 2022-08-17 16:02:09 (running for 00:02:44.07)\n", + "Memory usage on this node: 92.6/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=13\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (2 RUNNING, 13 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 397 | 0.989998 |\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.317394 | 0.995 | 85 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244428157806\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [398/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.317322256565094\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [87/500, 0/150] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244356632233\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [399/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244297027588\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [87/500, 100/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [400/500, 0/75] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443447113037\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [401/500, 0/75] loss: 0.31326228\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31732347667217253\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [88/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443804740903\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [402/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [88/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244320869446\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [403/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31727528750896455\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [89/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443923950196\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [404/500, 0/75] loss: 0.31326246\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [89/500, 100/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3173087823390961\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [90/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444162368775\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [405/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444162368775\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [406/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [90/500, 100/150] loss: 0.31326172\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-10\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 90\n", + " loss: 0.31728593111038206\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 32.4042444229126\n", + " time_this_iter_s: 0.33315157890319824\n", + " time_total_s: 32.4042444229126\n", + " timestamp: 1660748530\n", + " timesteps_since_restore: 0\n", + " training_iteration: 90\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31728593111038206\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [91/500, 0/150] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443804740903\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [407/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443804740903\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [91/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [408/500, 0/75] loss: 0.31326225\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31730142176151277\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [92/500, 0/150] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444400787354\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [409/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [92/500, 100/150] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244332790375\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [410/500, 0/75] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31724517047405243\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [93/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244451999664\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [411/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [93/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443685531616\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [412/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3172326761484146\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [94/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244356632233\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [413/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [94/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244428157806\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [414/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3172235453128815\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [95/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244404315948\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [415/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [95/500, 100/150] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244451999664\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3172388207912445\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [416/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [96/500, 0/150] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244451999664\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [417/500, 0/75] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [96/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3172296303510666\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444400787354\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [418/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [97/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244404315948\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [419/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [97/500, 100/150] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244428157806\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [420/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31718226671218874\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [98/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [98/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444162368775\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [421/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244451999664\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [422/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3171681523323059\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [99/500, 0/150] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [99/500, 100/150] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443447113037\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [423/500, 0/75] loss: 0.31326219\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444639205934\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3171724385023117\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [100/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [424/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443923950196\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [425/500, 0/75] loss: 0.31326231\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [100/500, 100/150] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3171622264385223\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [101/500, 0/150] loss: 0.31326169\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-14\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 425\n", + " loss: 0.31862444639205934\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 88.7006185054779\n", + " time_this_iter_s: 0.17435503005981445\n", + " time_total_s: 88.7006185054779\n", + " timestamp: 1660748534\n", + " timesteps_since_restore: 0\n", + " training_iteration: 425\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444639205934\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [426/500, 0/75] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:02:14 (running for 00:02:49.10)\n", + "Memory usage on this node: 92.9/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=13\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (2 RUNNING, 13 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 426 | 0.989998 |\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.317162 | 0.995 | 100 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244523525238\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [427/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [101/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3171326446533203\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [102/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444400787354\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [428/500, 0/75] loss: 0.31326252\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [102/500, 100/150] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862443804740903\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [429/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31714131832122805\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [103/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624449968338\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [430/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [103/500, 100/150] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244559288025\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [431/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3171659630537033\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444877624513\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [104/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [432/500, 0/75] loss: 0.31326222\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [104/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244451999664\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [433/500, 0/75] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31711903512477874\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [105/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244523525238\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [434/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444877624513\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [435/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [105/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3170934051275253\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [106/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244571208954\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [436/500, 0/75] loss: 0.31326208\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [106/500, 100/150] loss: 0.31326169\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-15\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 106\n", + " loss: 0.3171238613128662\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 37.72188353538513\n", + " time_this_iter_s: 0.3255481719970703\n", + " time_total_s: 37.72188353538513\n", + " timestamp: 1660748535\n", + " timesteps_since_restore: 0\n", + " training_iteration: 106\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444400787354\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [437/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3171238613128662\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [107/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244523525238\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [438/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [107/500, 100/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244475841522\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [439/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31706223368644715\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [108/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244475841522\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [440/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [108/500, 100/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244511604309\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [441/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3171377784013748\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [109/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444162368775\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [442/500, 0/75] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [109/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444400787354\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [443/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3170465987920761\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [110/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244535446167\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [444/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [110/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244428157806\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [445/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3170725005865097\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [111/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244475841522\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [446/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [111/500, 100/150] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31704565525054934\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244523525238\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [447/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [112/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244547367096\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [448/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [112/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.318624449968338\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [449/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31698355853557586\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [113/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244535446167\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [113/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [450/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244547367096\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [451/500, 0/75] loss: 0.31326193\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31697099924087524\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [114/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [114/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244523525238\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [452/500, 0/75] loss: 0.31326222\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31696575284004214\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [115/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244523525238\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [453/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [115/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862445950508117\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [454/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3169761824607849\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [116/500, 0/150] loss: 0.31326166\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-19\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 454\n", + " loss: 0.3186244523525238\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 93.74758672714233\n", + " time_this_iter_s: 0.17017459869384766\n", + " time_total_s: 93.74758672714233\n", + " timestamp: 1660748539\n", + " timesteps_since_restore: 0\n", + " training_iteration: 454\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244523525238\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [455/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [116/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:02:19 (running for 00:02:54.15)\n", + "Memory usage on this node: 92.6/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=13\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.3208131813085996 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (2 RUNNING, 13 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 455 | 0.989998 |\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316976 | 0.995 | 115 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862444639205934\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [456/500, 0/75] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3169919937849045\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [117/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862445950508117\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [457/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [117/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244559288025\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [458/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3170242840051651\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [118/500, 0/150] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244535446167\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [459/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [118/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244583129883\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3169435799121857\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [119/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [460/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244547367096\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [461/500, 0/75] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [119/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31696115791797635\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [120/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244511604309\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [462/500, 0/75] loss: 0.31326199\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862445950508117\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [463/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [120/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3169404131174087\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [121/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244535446167\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [464/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [121/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244559288025\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [465/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3169513213634491\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [122/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862445950508117\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [466/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [122/500, 100/150] loss: 0.31326169\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-21\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 122\n", + " loss: 0.31696798264980314\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 42.96403503417969\n", + " time_this_iter_s: 0.32864856719970703\n", + " time_total_s: 42.96403503417969\n", + " timestamp: 1660748541\n", + " timesteps_since_restore: 0\n", + " training_iteration: 122\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244571208954\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [467/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31696798264980314\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [123/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244547367096\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [468/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [123/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244547367096\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [469/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31690354883670807\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244583129883\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [124/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [470/500, 0/75] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [124/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244547367096\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [471/500, 0/75] loss: 0.31326202\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3169535291194916\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [125/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244523525238\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [472/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [125/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244511604309\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [473/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3169177824258804\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [126/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244571208954\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [474/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [126/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244678497314\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [475/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31690934479236604\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [127/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862446188926696\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [476/500, 0/75] loss: 0.3132624\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [127/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862446188926696\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [477/500, 0/75] loss: 0.31326228\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31688363730907443\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [128/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244583129883\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [478/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [128/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244535446167\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [479/500, 0/75] loss: 0.31326181\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3169411104917526\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [129/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244535446167\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [480/500, 0/75] loss: 0.31326231\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [129/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31685912668704985\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [130/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244535446167\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [481/500, 0/75] loss: 0.31326213\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862446188926696\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [482/500, 0/75] loss: 0.3132619\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [130/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31687084555625916\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [131/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244511604309\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [483/500, 0/75] loss: 0.31326196\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244583129883\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [131/500, 100/150] loss: 0.31326169\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-24\n", + " done: false\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 483\n", + " loss: 0.3186244583129883\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 98.7771246433258\n", + " time_this_iter_s: 0.17305874824523926\n", + " time_total_s: 98.7771246433258\n", + " timestamp: 1660748544\n", + " timesteps_since_restore: 0\n", + " training_iteration: 483\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [484/500, 0/75] loss: 0.31326216\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3168659633398056\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "== Status ==\n", + "Current time: 2022-08-17 16:02:24 (running for 00:02:59.17)\n", + "Memory usage on this node: 92.9/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=13\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 4.0/64 CPUs, 4.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (2 RUNNING, 13 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00006 | RUNNING | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 484 | 0.989998 |\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316866 | 0.995 | 131 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244678497314\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [485/500, 0/75] loss: 0.31326205\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [132/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [132/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244654655457\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [486/500, 0/75] loss: 0.313263\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244583129883\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [487/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31690082311630247\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [133/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862445950508117\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [488/500, 0/75] loss: 0.31326231\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [133/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316875159740448\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [134/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244630813599\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [489/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862446665763855\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [490/500, 0/75] loss: 0.31326187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [134/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3168713933229446\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [135/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862446665763855\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 4\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [491/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [135/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244654655457\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [492/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31686886966228484\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [136/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244583129883\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [493/500, 0/75] loss: 0.31326172\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [136/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862445950508117\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [494/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3168912237882614\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [137/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862446427345276\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [495/500, 0/75] loss: 0.31326178\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [137/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244571208954\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [496/500, 0/75] loss: 0.31326175\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3168640738725662\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [138/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244571208954\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [497/500, 0/75] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [138/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-26\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 138\n", + " loss: 0.31683306515216825\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 48.26483368873596\n", + " time_this_iter_s: 0.3258671760559082\n", + " time_total_s: 48.26483368873596\n", + " timestamp: 1660748546\n", + " timesteps_since_restore: 0\n", + " training_iteration: 138\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862445950508117\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [498/500, 0/75] loss: 0.31326184\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31683306515216825\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [139/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.31862445950508117\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [499/500, 0/75] loss: 0.31326219\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [139/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244547367096\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m [500/500, 0/75] loss: 0.3132621\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31683449387550355\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [140/500, 0/150] loss: 0.31326166\n", + "Result for traindata_2f206_00006:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-27\n", + " done: true\n", + " experiment_id: 7cac16520aaf4c4c8b0777fab9923525\n", + " hostname: tesla\n", + " iterations_since_restore: 500\n", + " loss: 0.3186244606971741\n", + " mcc: 0.9899977494936361\n", + " node_ip: 192.168.85.249\n", + " pid: 3933384\n", + " should_checkpoint: true\n", + " time_since_restore: 101.7056999206543\n", + " time_this_iter_s: 0.17305874824523926\n", + " time_total_s: 101.7056999206543\n", + " timestamp: 1660748547\n", + " timesteps_since_restore: 0\n", + " training_iteration: 500\n", + " trial_id: 2f206_00006\n", + " warmup_time: 0.00397491455078125\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m The Current Loss: 0.3186244606971741\n", + "\u001b[2m\u001b[36m(func pid=3933384)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [140/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31681370973587036\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [141/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [141/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31683812618255613\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [142/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [142/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31680436730384826\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [143/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [143/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31682041108608244\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [144/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [144/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167888641357422\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [145/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [145/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31677406013011933\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [146/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [146/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:02:29 (running for 00:03:04.36)\n", + "Memory usage on this node: 90.9/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316777 | 0.995 | 146 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316776841878891\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [147/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [147/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316791233420372\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [148/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [148/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31677682101726534\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [149/500, 0/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [149/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167814368009567\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [150/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [150/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167929220199585\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [151/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [151/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167747575044632\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [152/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [152/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-31\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 152\n", + " loss: 0.3167490804195404\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 53.41407895088196\n", + " time_this_iter_s: 0.3426215648651123\n", + " time_total_s: 53.41407895088196\n", + " timestamp: 1660748551\n", + " timesteps_since_restore: 0\n", + " training_iteration: 152\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167490804195404\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [153/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [153/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31675087988376616\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [154/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [154/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31675556540489197\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [155/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [155/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31679800868034363\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [156/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [156/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167535102367401\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [157/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [157/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31675425589084627\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [158/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [158/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167505258321762\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [159/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [159/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167500710487366\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [160/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [160/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167514604330063\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [161/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [161/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:02:34 (running for 00:03:09.68)\n", + "Memory usage on this node: 91.2/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316752 | 0.995 | 161 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31675223529338836\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [162/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [162/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167467260360718\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [163/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [163/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167501360177994\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [164/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [164/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674783289432523\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [165/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [165/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674667179584504\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [166/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [166/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167480367422104\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [167/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [167/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-36\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 167\n", + " loss: 0.316746621131897\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 58.530168771743774\n", + " time_this_iter_s: 0.3277442455291748\n", + " time_total_s: 58.530168771743774\n", + " timestamp: 1660748556\n", + " timesteps_since_restore: 0\n", + " training_iteration: 167\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316746621131897\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [168/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [168/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167465329170227\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [169/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [169/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167463940382004\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [170/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [170/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167466926574707\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [171/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [171/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316746312379837\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [172/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [172/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167460185289383\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [173/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [173/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167463773488998\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [174/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [174/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167456513643265\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [175/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [175/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167457240819931\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [176/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [176/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674584209918977\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [177/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [177/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:02:40 (running for 00:03:14.99)\n", + "Memory usage on this node: 91.1/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 177 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167454606294632\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [178/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [178/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167451453208923\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [179/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [179/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674513876438143\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [180/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [180/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674508571624754\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [181/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [181/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674510300159453\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [182/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [182/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167451047897339\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [183/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [183/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674504160881045\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-42\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 183\n", + " loss: 0.31674504160881045\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 63.8116569519043\n", + " time_this_iter_s: 0.33059024810791016\n", + " time_total_s: 63.8116569519043\n", + " timestamp: 1660748562\n", + " timesteps_since_restore: 0\n", + " training_iteration: 183\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [184/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [184/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167450201511383\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [185/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [185/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167450827360153\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [186/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [186/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167450112104416\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [187/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [187/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167450928688049\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [188/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [188/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674488306045534\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [189/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [189/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674493849277496\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [190/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [190/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449444532394\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [191/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [191/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449462413788\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [192/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [192/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674494564533234\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [193/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [193/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:02:45 (running for 00:03:20.27)\n", + "Memory usage on this node: 91.0/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 193 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744949221611\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [194/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [194/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449522018433\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [195/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [195/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744954586029\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [196/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [196/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449617385864\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 4\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [197/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [197/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449641227722\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 5\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [198/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [198/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674498319625854\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 6\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [199/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [199/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-47\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 199\n", + " loss: 0.31674498081207275\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 69.0955286026001\n", + " time_this_iter_s: 0.3259727954864502\n", + " time_total_s: 69.0955286026001\n", + " timestamp: 1660748567\n", + " timesteps_since_restore: 0\n", + " training_iteration: 199\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674498081207275\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [200/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [200/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449867725372\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [201/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [201/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449897527695\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [202/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [202/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449849843979\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [203/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [203/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674498558044434\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [204/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [204/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449849843979\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [205/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [205/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449867725372\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [206/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [206/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449849843979\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [207/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [207/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449814081192\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [208/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [208/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449861764908\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [209/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [209/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:02:50 (running for 00:03:25.58)\n", + "Memory usage on this node: 90.9/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 209 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449796199799\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [210/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [210/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674498081207275\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [211/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [211/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449814081192\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [212/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [212/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449814081192\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [213/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [213/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449772357941\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [214/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [214/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449766397476\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [215/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [215/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-52\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 215\n", + " loss: 0.3167449766397476\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 74.4016706943512\n", + " time_this_iter_s: 0.3288600444793701\n", + " time_total_s: 74.4016706943512\n", + " timestamp: 1660748572\n", + " timesteps_since_restore: 0\n", + " training_iteration: 215\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449766397476\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [216/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [216/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674497604370117\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [217/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [217/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674498081207275\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [218/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [218/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449766397476\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [219/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [219/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674497604370117\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [220/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [220/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674497306346894\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [221/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [221/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674497365951537\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [222/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [222/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449724674225\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [223/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [223/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674497067928314\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [224/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [224/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449688911438\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [225/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [225/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:02:55 (running for 00:03:30.88)\n", + "Memory usage on this node: 90.9/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 225 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449671030045\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [226/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [226/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674496829509735\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [227/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [227/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674496829509735\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [228/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [228/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449676990509\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [229/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [229/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744966506958\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [230/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [230/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674496293067933\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [231/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [231/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-02-57\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 231\n", + " loss: 0.3167449647188187\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 79.69094276428223\n", + " time_this_iter_s: 0.3285794258117676\n", + " time_total_s: 79.69094276428223\n", + " timestamp: 1660748577\n", + " timesteps_since_restore: 0\n", + " training_iteration: 231\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449647188187\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [232/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [232/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449617385864\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [233/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [233/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449623346329\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [234/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [234/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674496054649354\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [235/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [235/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449575662613\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [236/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [236/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674496293067933\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [237/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [237/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674496114253997\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [238/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [238/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449599504471\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [239/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [239/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449575662613\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [240/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [240/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449593544006\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [241/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [241/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:03:01 (running for 00:03:36.22)\n", + "Memory usage on this node: 91.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.319578770674192 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 241 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449587583542\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [242/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [242/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674495816230774\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [243/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [243/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449569702148\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [244/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [244/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449527978897\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [245/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [245/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449569702148\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [246/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [246/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-03-02\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 246\n", + " loss: 0.3167449539899826\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 84.72364234924316\n", + " time_this_iter_s: 0.32591867446899414\n", + " time_total_s: 84.72364234924316\n", + " timestamp: 1660748582\n", + " timesteps_since_restore: 0\n", + " training_iteration: 246\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449539899826\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [247/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [247/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449516057968\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [248/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [248/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449522018433\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [249/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [249/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449474334717\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [250/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [250/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449498176575\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [251/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [251/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449498176575\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [252/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [252/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674494862556457\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [253/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [253/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674494862556457\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [254/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [254/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449462413788\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [255/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [255/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674494326114655\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [256/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [256/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674494564533234\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [257/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [257/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:03:06 (running for 00:03:41.49)\n", + "Memory usage on this node: 91.3/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 257 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744943857193\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [258/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [258/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744943857193\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [259/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [259/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674494326114655\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [260/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [260/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449426651001\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [261/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [261/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449420690536\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [262/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [262/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-03-08\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 262\n", + " loss: 0.3167449420690536\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 89.98958396911621\n", + " time_this_iter_s: 0.33092284202575684\n", + " time_total_s: 89.98958396911621\n", + " timestamp: 1660748588\n", + " timesteps_since_restore: 0\n", + " training_iteration: 262\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449420690536\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [263/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [263/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449402809143\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [264/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [264/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674493789672853\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [265/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [265/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449390888214\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [266/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [266/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674493610858917\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [267/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [267/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674493789672853\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [268/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [268/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674493968486783\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [269/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [269/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449373006821\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [270/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [270/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449349164963\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [271/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [271/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449343204498\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [272/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [272/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:03:11 (running for 00:03:46.54)\n", + "Memory usage on this node: 91.2/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 272 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449313402176\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [273/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [273/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674493312835694\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [274/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [274/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674493074417115\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [275/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [275/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744931936264\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [276/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [276/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449313402176\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [277/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [277/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449289560318\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-03-13\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 277\n", + " loss: 0.3167449289560318\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 95.14118385314941\n", + " time_this_iter_s: 0.36550259590148926\n", + " time_total_s: 95.14118385314941\n", + " timestamp: 1660748593\n", + " timesteps_since_restore: 0\n", + " training_iteration: 277\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [278/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [278/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674493074417115\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [279/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [279/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674492716789243\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [280/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [280/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744926571846\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [281/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [281/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744926571846\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [282/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [282/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674492716789243\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [283/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [283/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674492537975313\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [284/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [284/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674492061138154\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [285/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [285/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449241876602\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [286/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [286/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674492061138154\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [287/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [287/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674492478370664\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "== Status ==\n", + "Current time: 2022-08-17 16:03:16 (running for 00:03:51.57)\n", + "Memory usage on this node: 91.3/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 287 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [288/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [288/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674492359161377\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [289/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [289/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674492061138154\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [290/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [290/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449200153351\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [291/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [291/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674492299556734\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [292/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [292/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449164390564\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [293/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [293/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-03-18\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 293\n", + " loss: 0.3167449188232422\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 100.39487195014954\n", + " time_this_iter_s: 0.33101367950439453\n", + " time_total_s: 100.39487195014954\n", + " timestamp: 1660748598\n", + " timesteps_since_restore: 0\n", + " training_iteration: 293\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449188232422\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [294/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [294/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449176311493\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [295/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [295/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449164390564\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [296/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [296/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449152469635\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [297/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [297/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449128627777\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [298/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [298/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449170351028\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [299/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [299/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449128627777\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [300/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [300/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449140548706\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [301/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [301/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674491047859193\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [302/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [302/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:03:21 (running for 00:03:56.61)\n", + "Memory usage on this node: 91.6/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 302 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674491226673124\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [303/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [303/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674491047859193\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [304/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [304/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674490988254544\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [305/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [305/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674490809440614\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [306/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [306/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744909286499\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [307/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [307/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449063062668\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [308/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [308/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-03-23\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 308\n", + " loss: 0.3167449086904526\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 105.43136167526245\n", + " time_this_iter_s: 0.32483506202697754\n", + " time_total_s: 105.43136167526245\n", + " timestamp: 1660748603\n", + " timesteps_since_restore: 0\n", + " training_iteration: 308\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449086904526\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [309/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [309/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674490571022035\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [310/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [310/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744903922081\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [311/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [311/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674490332603455\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [312/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [312/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167449027299881\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [313/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [313/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674490332603455\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [314/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [314/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674490213394163\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [315/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [315/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674490332603455\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [316/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [316/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674489736557004\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [317/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [317/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674489855766297\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [318/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [318/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:03:27 (running for 00:04:01.92)\n", + "Memory usage on this node: 91.3/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 318 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674489736557004\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [319/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [319/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448967695236\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [320/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [320/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674489855766297\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [321/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [321/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674489557743074\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [322/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [322/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674489498138425\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [323/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [323/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448961734772\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [324/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [324/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-03-29\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 324\n", + " loss: 0.3167448914051056\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 110.75987958908081\n", + " time_this_iter_s: 0.3303053379058838\n", + " time_total_s: 110.75987958908081\n", + " timestamp: 1660748609\n", + " timesteps_since_restore: 0\n", + " training_iteration: 324\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448914051056\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [325/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [325/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744892001152\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [326/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [326/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674489319324495\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [327/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [327/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674489080905915\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [328/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [328/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448925971985\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [329/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [329/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674489080905915\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [330/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [330/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674489080905915\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [331/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [331/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674488723278044\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [332/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [332/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674488544464113\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [333/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [333/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674488842487336\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [334/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [334/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:03:32 (running for 00:04:07.24)\n", + "Memory usage on this node: 91.2/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 334 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674488842487336\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [335/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [335/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744886636734\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [336/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [336/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674488306045534\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [337/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [337/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674488604068757\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [338/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [338/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674488544464113\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [339/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [339/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674488484859464\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [340/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [340/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-03-34\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 340\n", + " loss: 0.3167448836565018\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 116.05035591125488\n", + " time_this_iter_s: 0.32315850257873535\n", + " time_total_s: 116.05035591125488\n", + " timestamp: 1660748614\n", + " timesteps_since_restore: 0\n", + " training_iteration: 340\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448836565018\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [341/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [341/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674488246440885\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [342/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [342/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448788881302\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [343/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [343/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674488008022306\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [344/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [344/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674488008022306\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [345/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [345/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674487829208375\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [346/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [346/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448818683624\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [347/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [347/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448794841766\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [348/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [348/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674488008022306\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [349/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [349/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448765039444\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [350/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [350/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:03:37 (running for 00:04:12.51)\n", + "Memory usage on this node: 91.2/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 350 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674487292766573\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [351/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [351/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674487471580504\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [352/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [352/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674487292766573\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [353/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [353/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674487233161924\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [354/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [354/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448711395264\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [355/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [355/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674487233161924\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [356/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [356/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-03-39\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 356\n", + " loss: 0.31674487292766573\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 121.31600856781006\n", + " time_this_iter_s: 0.32636451721191406\n", + " time_total_s: 121.31600856781006\n", + " timestamp: 1660748619\n", + " timesteps_since_restore: 0\n", + " training_iteration: 356\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674487292766573\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [357/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [357/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674486994743345\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [358/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [358/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448669672012\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [359/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [359/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448711395264\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [360/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [360/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448669672012\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [361/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [361/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674486577510835\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [362/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [362/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448663711548\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [363/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [363/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674486458301543\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [364/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [364/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674486577510835\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [365/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [365/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:03:43 (running for 00:04:17.92)\n", + "Memory usage on this node: 91.5/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 365 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674486339092256\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [366/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [366/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448627948761\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [367/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [367/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674486339092256\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [368/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [368/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674486100673677\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [369/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [369/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-03-44\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 369\n", + " loss: 0.3167448592185974\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 126.40614295005798\n", + " time_this_iter_s: 0.40616536140441895\n", + " time_total_s: 126.40614295005798\n", + " timestamp: 1660748624\n", + " timesteps_since_restore: 0\n", + " training_iteration: 369\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448592185974\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [370/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [370/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674486041069033\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [371/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [371/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448616027832\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [372/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [372/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674485564231875\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [373/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [373/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448562383652\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [374/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [374/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448568344116\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [375/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [375/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674485802650454\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 3\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [376/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [376/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448568344116\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [377/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [377/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448568344116\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [378/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [378/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448538541794\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [379/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [379/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:03:48 (running for 00:04:23.04)\n", + "Memory usage on this node: 91.3/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 379 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448514699936\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [380/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [380/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674485266208646\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [381/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [381/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674485027790067\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [382/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [382/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448514699936\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [383/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [383/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674485325813295\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [384/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [384/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-03-49\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 384\n", + " loss: 0.3167448514699936\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 131.52855968475342\n", + " time_this_iter_s: 0.3371148109436035\n", + " time_total_s: 131.52855968475342\n", + " timestamp: 1660748629\n", + " timesteps_since_restore: 0\n", + " training_iteration: 384\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448514699936\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [385/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [385/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448514699936\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [386/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [386/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674484968185423\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [387/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [387/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448514699936\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [388/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [388/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674484968185423\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [389/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [389/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744846701622\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [390/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [390/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744846701622\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [391/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [391/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674484491348265\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [392/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [392/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448461055756\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [393/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [393/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448443174362\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [394/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [394/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:03:53 (running for 00:04:28.15)\n", + "Memory usage on this node: 91.3/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 394 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448395490646\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [395/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [395/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448389530182\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [396/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [396/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674484014511106\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [397/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [397/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744841337204\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [398/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [398/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448395490646\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [399/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [399/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674483776092527\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-03-54\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 399\n", + " loss: 0.31674483776092527\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 136.6484603881836\n", + " time_this_iter_s: 0.3314664363861084\n", + " time_total_s: 136.6484603881836\n", + " timestamp: 1660748634\n", + " timesteps_since_restore: 0\n", + " training_iteration: 399\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [400/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [400/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674484014511106\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [401/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [401/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674483776092527\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [402/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [402/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448318004608\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [403/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [403/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674483358860017\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [404/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [404/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674483835697176\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [405/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [405/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674483239650725\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [406/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [406/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674483239650725\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [407/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [407/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674483358860017\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [408/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [408/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674483299255374\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [409/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [409/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448312044144\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [410/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [410/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:03:58 (running for 00:04:33.41)\n", + "Memory usage on this node: 91.7/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 410 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674483299255374\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [411/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [411/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674483060836794\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [412/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [412/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674482822418215\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [413/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [413/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674482822418215\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [414/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [414/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448264360428\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [415/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [415/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-04-00\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 415\n", + " loss: 0.31674482762813566\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 141.87884068489075\n", + " time_this_iter_s: 0.32447218894958496\n", + " time_total_s: 141.87884068489075\n", + " timestamp: 1660748640\n", + " timesteps_since_restore: 0\n", + " training_iteration: 415\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674482762813566\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [416/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [416/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674482583999636\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [417/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [417/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744824051857\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [418/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [418/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674482583999636\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [419/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [419/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744824051857\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [420/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [420/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674481987953185\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [421/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [421/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674482345581056\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [422/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [422/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674481987953185\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [423/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [423/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448228597641\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [424/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [424/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674482226371764\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [425/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [425/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674482226371764\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [426/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [426/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:04:03 (running for 00:04:38.70)\n", + "Memory usage on this node: 91.6/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 426 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674482107162477\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [427/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [427/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674482107162477\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [428/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [428/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448204755783\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [429/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [429/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.316744818687439\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [430/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [430/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674481809139254\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [431/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [431/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448163032532\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-04-05\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 431\n", + " loss: 0.3167448163032532\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 147.18766975402832\n", + " time_this_iter_s: 0.3270857334136963\n", + " time_total_s: 147.18766975402832\n", + " timestamp: 1660748645\n", + " timesteps_since_restore: 0\n", + " training_iteration: 431\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [432/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [432/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448163032532\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [433/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [433/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448168992996\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [434/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [434/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674481511116026\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [435/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [435/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674481511116026\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [436/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [436/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448115348816\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [437/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [437/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674481332302096\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [438/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [438/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674481213092803\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [439/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [439/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448103427887\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [440/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [440/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448079586029\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [441/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [441/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674480855464937\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [442/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [442/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:04:09 (running for 00:04:43.99)\n", + "Memory usage on this node: 91.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 442 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448055744171\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [443/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [443/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448079586029\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [444/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [444/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674480855464937\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [445/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [445/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674480497837065\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [446/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [446/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674480319023135\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [447/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [447/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-04-10\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 447\n", + " loss: 0.3167448055744171\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 152.4827446937561\n", + " time_this_iter_s: 0.32762575149536133\n", + " time_total_s: 152.4827446937561\n", + " timestamp: 1660748650\n", + " timesteps_since_restore: 0\n", + " training_iteration: 447\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448055744171\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [448/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [448/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448037862778\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [449/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [449/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674480259418486\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [450/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [450/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167448019981384\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [451/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [451/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674480259418486\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [452/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [452/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447990179062\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [453/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [453/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674479961395263\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [454/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [454/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447978258133\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [455/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [455/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674479722976684\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [456/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [456/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674479842185976\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [457/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [457/500, 100/150] loss: 0.31326169\n", + "== Status ==\n", + "Current time: 2022-08-17 16:04:14 (running for 00:04:49.01)\n", + "Memory usage on this node: 91.5/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 457 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447978258133\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [458/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [458/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674479484558105\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [459/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [459/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447936534882\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [460/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [460/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447930574417\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [461/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [461/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447918653488\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [462/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [462/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-04-15\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 462\n", + " loss: 0.3167447936534882\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 157.61927223205566\n", + " time_this_iter_s: 0.3417811393737793\n", + " time_total_s: 157.61927223205566\n", + " timestamp: 1660748655\n", + " timesteps_since_restore: 0\n", + " training_iteration: 462\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447936534882\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [463/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [463/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674478828907016\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [464/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [464/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447888851166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [465/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [465/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447888851166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [466/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [466/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674478769302367\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [467/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [467/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674478769302367\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [468/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [468/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674478828907016\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [469/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [469/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674478590488436\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [470/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [470/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674478769302367\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [471/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [471/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447853088379\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [472/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [472/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:04:19 (running for 00:04:54.22)\n", + "Memory usage on this node: 93.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 472 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674478232860565\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [473/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [473/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674478352069857\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [474/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [474/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674478352069857\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [475/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [475/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447805404663\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [476/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [476/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447817325592\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [477/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [477/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-04-20\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 477\n", + " loss: 0.3167447817325592\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 162.7175350189209\n", + " time_this_iter_s: 0.32738351821899414\n", + " time_total_s: 162.7175350189209\n", + " timestamp: 1660748660\n", + " timesteps_since_restore: 0\n", + " training_iteration: 477\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447817325592\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [478/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [478/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447793483734\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [479/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [479/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447781562805\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [480/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [480/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674477994441985\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [481/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [481/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447763681412\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [482/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [482/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674477458000183\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [483/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [483/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447757720947\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [484/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [484/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447763681412\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [485/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [485/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674477517604827\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [486/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [486/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447763681412\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [487/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [487/500, 100/150] loss: 0.31326166\n", + "== Status ==\n", + "Current time: 2022-08-17 16:04:24 (running for 00:04:59.40)\n", + "Memory usage on this node: 92.4/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=14\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 2.0/64 CPUs, 2.0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (1 RUNNING, 14 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00014 | RUNNING | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 487 | 0.989999 |\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447757720947\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [488/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [488/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447692155838\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [489/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [489/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447692155838\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [490/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [490/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674477100372317\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 2\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [491/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [491/500, 100/150] loss: 0.31326169\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674476981163024\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [492/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [492/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-04-26\n", + " done: false\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 492\n", + " loss: 0.31674476742744445\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 167.8956162929535\n", + " time_this_iter_s: 0.32702136039733887\n", + " time_total_s: 167.8956162929535\n", + " timestamp: 1660748666\n", + " timesteps_since_restore: 0\n", + " training_iteration: 492\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674476742744445\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [493/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [493/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447662353516\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [494/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [494/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447644472122\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [495/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [495/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447638511658\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [496/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [496/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447692155838\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [497/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [497/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447686195374\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [498/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [498/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447638511658\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [499/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [499/500, 100/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.31674476146698\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger times: 0\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [500/500, 0/150] loss: 0.31326166\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m [500/500, 100/150] loss: 0.31326166\n", + "Result for traindata_2f206_00014:\n", + " accuracy: 0.995\n", + " date: 2022-08-17_16-04-28\n", + " done: true\n", + " experiment_id: fb918cfdd1fe4ee08df1ec144f4d9788\n", + " hostname: tesla\n", + " iterations_since_restore: 500\n", + " loss: 0.3167447644472122\n", + " mcc: 0.98999899989999\n", + " node_ip: 192.168.85.249\n", + " pid: 3934350\n", + " should_checkpoint: true\n", + " time_since_restore: 170.68252635002136\n", + " time_this_iter_s: 0.3418099880218506\n", + " time_total_s: 170.68252635002136\n", + " timestamp: 1660748668\n", + " timesteps_since_restore: 0\n", + " training_iteration: 500\n", + " trial_id: 2f206_00014\n", + " warmup_time: 0.004093170166015625\n", + " \n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m The Current Loss: 0.3167447644472122\n", + "\u001b[2m\u001b[36m(func pid=3934350)\u001b[0m trigger Times: 1\n", + "== Status ==\n", + "Current time: 2022-08-17 16:04:29 (running for 00:05:03.90)\n", + "Memory usage on this node: 92.8/251.3 GiB\n", + "Using AsyncHyperBand: num_stopped=15\n", + "Bracket: Iter 256.000: -0.3186243987083435 | Iter 128.000: -0.31971878762428574 | Iter 64.000: -0.3201102578639984 | Iter 32.000: -0.32196231007575987 | Iter 16.000: -0.3251563087105751 | Iter 8.000: -0.33778586000204086 | Iter 4.000: -0.3502905958890915 | Iter 2.000: -0.3880224198102951 | Iter 1.000: -0.6708784818649292\n", + "Resources requested: 0/64 CPUs, 0/4 GPUs, 0.0/122.97 GiB heap, 0.0/56.69 GiB objects (0.0/1.0 accelerator_type:T4)\n", + "Result logdir: /home/jabreu/ray_results/traindata_2022-08-17_15-59-25\n", + "Number of trials: 15/15 (15 TERMINATED)\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "| Trial name | status | loc | batch_size | dropout | hidden_size | lr | loss | accuracy | training_iteration | mcc |\n", + "|-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------|\n", + "| traindata_2f206_00000 | TERMINATED | 192.168.85.249:3931827 | 32 | 0.415247 | 256 | 0.000170935 | 0.320535 | 0.9925 | 500 | 0.985111 |\n", + "| traindata_2f206_00001 | TERMINATED | 192.168.85.249:3931883 | 16 | 0.376144 | 32 | 2.13793e-05 | 0.689436 | 0.615 | 1 | 0.230258 |\n", + "| traindata_2f206_00002 | TERMINATED | 192.168.85.249:3931986 | 8 | 0.331023 | 128 | 0.00030504 | 0.337083 | 0.9725 | 16 | 0.945304 |\n", + "| traindata_2f206_00003 | TERMINATED | 192.168.85.249:3932223 | 8 | 0.438603 | 128 | 0.000643721 | 0.321301 | 0.9925 | 128 | 0.98501 |\n", + "| traindata_2f206_00004 | TERMINATED | 192.168.85.249:3933151 | 16 | 0.499694 | 256 | 4.533e-05 | 0.670878 | 0.7975 | 1 | 0.60189 |\n", + "| traindata_2f206_00005 | TERMINATED | 192.168.85.249:3933287 | 8 | 0.332402 | 32 | 0.00010607 | 0.684941 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00006 | TERMINATED | 192.168.85.249:3933384 | 16 | 0.354435 | 64 | 0.00465713 | 0.318624 | 0.995 | 500 | 0.989998 |\n", + "| traindata_2f206_00007 | TERMINATED | 192.168.85.249:3933423 | 32 | 0.461969 | 32 | 0.000114137 | 0.693848 | 0.4925 | 1 | 0 |\n", + "| traindata_2f206_00008 | TERMINATED | 192.168.85.249:3933523 | 16 | 0.338075 | 128 | 6.63477e-05 | 0.677006 | 0.54 | 1 | 0.186967 |\n", + "| traindata_2f206_00009 | TERMINATED | 192.168.85.249:3933632 | 8 | 0.337134 | 128 | 0.000387222 | 0.323214 | 0.9925 | 32 | 0.985112 |\n", + "| traindata_2f206_00010 | TERMINATED | 192.168.85.249:3933939 | 16 | 0.426184 | 256 | 1.65834e-05 | 0.682377 | 0.665 | 1 | 0.360888 |\n", + "| traindata_2f206_00011 | TERMINATED | 192.168.85.249:3934072 | 16 | 0.432872 | 128 | 0.00862223 | 0.818262 | 0.495 | 1 | 0 |\n", + "| traindata_2f206_00012 | TERMINATED | 192.168.85.249:3934157 | 8 | 0.366441 | 64 | 8.55378e-05 | 0.680259 | 0.515 | 1 | 0.088243 |\n", + "| traindata_2f206_00013 | TERMINATED | 192.168.85.249:3934261 | 16 | 0.394065 | 256 | 6.21042e-05 | 0.62614 | 0.635 | 2 | 0.38805 |\n", + "| traindata_2f206_00014 | TERMINATED | 192.168.85.249:3934350 | 8 | 0.420891 | 256 | 0.000670263 | 0.316745 | 0.995 | 500 | 0.989999 |\n", + "+-----------------------+------------+------------------------+--------------+-----------+---------------+-------------+----------+------------+----------------------+----------+\n", + "\n", + "\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2022-08-17 16:04:29,108\tINFO tune.py:748 -- Total run time: 304.09 seconds (303.84 seconds for the tuning loop).\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Best trial config: {'hidden_size': 256, 'lr': 0.0006702634531750633, 'batch_size': 8, 'dropout': 0.42089114402459704}\n", + "Best trial final validation loss: 0.3167447644472122\n", + "Best trial final validation accuracy: 0.995\n", + "Best trial final validation mcc: 0.98999899989999\n", + "Results in test set:\n", + "--------------------\n", + "- model: cnn\n", + "- mode: one_hot\n", + "- dataset: primer\n", + "--------------------\n", + "Accuracy: 1.000\n", + "MCC: 1.000\n", + "[[203 0]\n", + " [ 0 197]]\n" + ] + } + ], + "source": [ + "os.chdir('../')\n", + "sys.path.append(os.getcwd())\n", + "from src.hyperparameter_tuning import hyperparameter_tuning\n", + "config['do_tuning'] = True\n", + "hyperparameter_tuning(device, config)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We've reached the end of the deep learning pipeline. " + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3.7.13 ('dna-conda': conda)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.13" + }, + "orig_nbformat": 4, + "vscode": { + "interpreter": { + "hash": "ba449ea13c29f64a91968d8f927cecceedd6e605eda30388903386e6cd94168d" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/src/propythia/DNA/notebooks/quick-start-ML.ipynb b/src/propythia/DNA/notebooks/quick-start-ML.ipynb new file mode 100644 index 0000000..88b28b0 --- /dev/null +++ b/src/propythia/DNA/notebooks/quick-start-ML.ipynb @@ -0,0 +1 @@ +{"cells":[{"cell_type":"markdown","metadata":{},"source":["# ProPythia DNA quick start"]},{"cell_type":"markdown","metadata":{},"source":["This is a notebook that explains how to perform every step of the developed Machine Learning modules. They include all the necessary steps to calculate descriptors that will serve as features for the model. The model building and prediction steps were already implemented in ProPythia but will be included as well in this notebook to prove that the calculated features can be used. \n","\n","The steps included in this notebook are:\n","\n","1. Data reading and validation\n","2. Calculation of descriptors from sequences\n","3. Descriptors processing\n","4. Using processed descriptors to train ML models (already implemented in ProPythia)\n"]},{"cell_type":"code","execution_count":1,"metadata":{},"outputs":[],"source":["%load_ext autoreload\n","%autoreload 2\n","\n","import json\n","import sys\n","sys.path.append(\"../\")"]},{"cell_type":"markdown","metadata":{},"source":["## 1. Data reading and validation"]},{"cell_type":"markdown","metadata":{},"source":["(The deep learning pipeline uses the same module to read and validate the sequences.)"]},{"cell_type":"markdown","metadata":{},"source":["This module comprehends functions to read and to validate DNA sequences. First is necessary to create the object ReadDNA."]},{"cell_type":"code","execution_count":2,"metadata":{},"outputs":[],"source":["from read_sequence import ReadDNA\n","reader = ReadDNA()"]},{"cell_type":"markdown","metadata":{},"source":["It is possible to create sequence objects using a single DNA sequence, a *CSV* and a *FASTA* file. The single sequence is going to be validated (check if all letters belong to the DNA alphabet) and the output will be the sequence in upper case."]},{"cell_type":"code","execution_count":3,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["ACGTACGAGCATGCAT\n"]}],"source":["data = reader.read_sequence(\"ACGTACGAGCATGCAT\")\n","print(data)"]},{"cell_type":"markdown","metadata":{},"source":["With *CSV* there must be at least a column named 'sequence' in the file. The labels may also be retrieved and validated if the user wants them, but he must specify the `with_label` parameter as **True** and the column with the labels must be named 'label'."]},{"cell_type":"code","execution_count":4,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":[" sequence\n","0 CCGAGGGCTATGGTTTGGAAGTTAGAACCCTGGGGCTTCTCGCGGA...\n","1 GAGTTTATATGGCGCGAGCCTAGTGGTTTTTGTACTTGTTTGTCGC...\n","2 GATCAGTAGGGAAACAAACAGAGGGCCCAGCCACATCTAGCAGGTA...\n","3 GTCCACGACCGAACTCCCACCTTGACCGCAGAGGTACCACCAGAGC...\n","4 GGCGACCGAACTCCAACTAGAACCTGCATAACTGGCCTGGGAGATA...\n","(2000, 1)\n","----------------------------------------------------------------------------------------------------\n"," sequence label\n","0 CCGAGGGCTATGGTTTGGAAGTTAGAACCCTGGGGCTTCTCGCGGA... 0\n","1 GAGTTTATATGGCGCGAGCCTAGTGGTTTTTGTACTTGTTTGTCGC... 0\n","2 GATCAGTAGGGAAACAAACAGAGGGCCCAGCCACATCTAGCAGGTA... 0\n","3 GTCCACGACCGAACTCCCACCTTGACCGCAGAGGTACCACCAGAGC... 1\n","4 GGCGACCGAACTCCAACTAGAACCTGCATAACTGGCCTGGGAGATA... 1\n","(2000, 2)\n"]}],"source":["filename = \"../datasets/primer/dataset.csv\"\n","data = reader.read_csv(filename, with_labels=False)\n","print(data.head())\n","print(data.shape)\n","\n","print(\"-\" * 100)\n","\n","data = reader.read_csv(filename, with_labels=True)\n","print(data.head())\n","print(data.shape)"]},{"cell_type":"markdown","metadata":{},"source":["The *FASTA* format is similar to the *CSV* format. It always reads the sequence, and the labels only if the user wants them. The *FASTA* format must be one of the following examples:\n","\n","```\n",">sequence_id1\n","ACTGACTGACTGACTGACTGACTGACTGACTGACTGACTG...\n",">sequence_id2\n","ACTGACTGACTGACTGACTGACTGACTGACTGACTGACTG...\n","``` \n","\n","```\n",">sequence_id1,label1\n","ACTGACTGACTGACTGACTGACTGACTGACTGACTGACTG...\n",">sequence_id2,label2\n","ACTGACTGACTGACTGACTGACTGACTGACTGACTGACTG...\n","``` "]},{"cell_type":"code","execution_count":5,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":[" sequence\n","0 CCGAGGGCTATGGTTTGGAAGTTAGAACCCTGGGGCTTCTCGCGGA...\n","1 GAGTTTATATGGCGCGAGCCTAGTGGTTTTTGTACTTGTTTGTCGC...\n","2 GATCAGTAGGGAAACAAACAGAGGGCCCAGCCACATCTAGCAGGTA...\n","3 GTCCACGACCGAACTCCCACCTTGACCGCAGAGGTACCACCAGAGC...\n","4 GGCGACCGAACTCCAACTAGAACCTGCATAACTGGCCTGGGAGATA...\n","(2000, 1)\n","----------------------------------------------------------------------------------------------------\n"," sequence label\n","0 CCGAGGGCTATGGTTTGGAAGTTAGAACCCTGGGGCTTCTCGCGGA... 0\n","1 GAGTTTATATGGCGCGAGCCTAGTGGTTTTTGTACTTGTTTGTCGC... 0\n","2 GATCAGTAGGGAAACAAACAGAGGGCCCAGCCACATCTAGCAGGTA... 0\n","3 GTCCACGACCGAACTCCCACCTTGACCGCAGAGGTACCACCAGAGC... 1\n","4 GGCGACCGAACTCCAACTAGAACCTGCATAACTGGCCTGGGAGATA... 1\n","(2000, 2)\n"]}],"source":["filename = \"../datasets/primer/dataset.fasta\"\n","data = reader.read_fasta(filename, with_labels=False)\n","print(data.head())\n","print(data.shape)\n","\n","print(\"-\" * 100)\n","\n","data = reader.read_fasta(filename, with_labels=True)\n","print(data.head())\n","print(data.shape)"]},{"cell_type":"markdown","metadata":{},"source":["## 2. Calculation of descriptors from sequences"]},{"cell_type":"markdown","metadata":{},"source":["This module comprehends functions to computing different types of DNA descriptors. It receives a sequence object (from previous module) and retrieves a dictionary with name of feature and value. The user can calculate individual descriptors and also calculate all descriptors. It also lets the users to use define the physicochemical indices for the autocorrelation descriptors if the user doesn't want to use the default values or if he wants to add new ones. \n","\n"]},{"cell_type":"markdown","metadata":{},"source":["There are a total of 17 DNA implemented descriptors. They can be found below:\n","\n","\n","\n"," \n"," \n"," \n"," \n"," \n","\n","\n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n","\n","
GroupNameOutput type
Psycho Chemicallengthint
gc_contentfloat
at_contentfloat
Nucleic Acid Compositionnucleic_acid_compositiondict
dinucleotide_compositiondict
trinucleotide_compositiondict
k_spaced_nucleic_acid_pairsdict
kmerdict
accumulated_nucleotide_frequencylist of dict
Autocorrelation and Cross CovarianceDAClist
DCClist
DACClist
TAClist
TCClist
TACClist
Pseudo Nucleic Acid CompositionPseDNCdict
PseKNCdict
"]},{"cell_type":"markdown","metadata":{},"source":["As mentioned above, the user can calculate all descriptors or individual descriptors. To calculate individual descriptors, the user must specify the name/names of the descriptor/descriptors in the `descriptor_list` parameter. If this parameter is not specified, the user will calculate all descriptors."]},{"cell_type":"markdown","metadata":{},"source":["To calculate a single descriptor, called 'nucleic_acid_composition', for a single sequence, the user must do:"]},{"cell_type":"code","execution_count":6,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["{\n"," \"nucleic_acid_composition\": {\n"," \"A\": 0.312,\n"," \"C\": 0.25,\n"," \"G\": 0.25,\n"," \"T\": 0.188\n"," }\n","}\n"]}],"source":["reader = ReadDNA()\n","data = reader.read_sequence(\"ACGTACGAGCATGCAT\")\n","\n","from descriptors import DNADescriptor\n","calculator = DNADescriptor(data)\n","\n","descriptor_list = ['nucleic_acid_composition']\n","result = calculator.get_descriptors(descriptor_list)\n","print(json.dumps(result, indent=4))"]},{"cell_type":"markdown","metadata":{},"source":["To calculate all of them, the user must leave the `descriptor_list` parameter empty."]},{"cell_type":"code","execution_count":7,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["length 16\n","----------------------------------------------------------------------------------------------------\n","gc_content 0.5\n","----------------------------------------------------------------------------------------------------\n","at_content 0.5\n","----------------------------------------------------------------------------------------------------\n","nucleic_acid_composition {'A': 0.312, 'C': 0.25, 'G': 0.25, 'T': 0.188}\n","----------------------------------------------------------------------------------------------------\n","dinucleotide_composition {'AA': 0.0, 'AC': 0.133, 'AG': 0.067, 'AT': 0.133, 'CA': 0.133, 'CC': 0.0, 'CG': 0.133, 'CT': 0.0, 'GA': 0.067, 'GC': 0.133, 'GG': 0.0, 'GT': 0.067, 'TA': 0.067, 'TC': 0.0, 'TG': 0.067, 'TT': 0.0}\n","----------------------------------------------------------------------------------------------------\n","trinucleotide_composition {'AAA': 0.0, 'AAC': 0.0, 'AAG': 0.0, 'AAT': 0.0, 'ACA': 0.0, 'ACC': 0.0, 'ACG': 0.143, 'ACT': 0.0, 'AGA': 0.0, 'AGC': 0.071, 'AGG': 0.0, 'AGT': 0.0, 'ATA': 0.0, 'ATC': 0.0, 'ATG': 0.071, 'ATT': 0.0, 'CAA': 0.0, 'CAC': 0.0, 'CAG': 0.0, 'CAT': 0.143, 'CCA': 0.0, 'CCC': 0.0, 'CCG': 0.0, 'CCT': 0.0, 'CGA': 0.071, 'CGC': 0.0, 'CGG': 0.0, 'CGT': 0.071, 'CTA': 0.0, 'CTC': 0.0, 'CTG': 0.0, 'CTT': 0.0, 'GAA': 0.0, 'GAC': 0.0, 'GAG': 0.071, 'GAT': 0.0, 'GCA': 0.143, 'GCC': 0.0, 'GCG': 0.0, 'GCT': 0.0, 'GGA': 0.0, 'GGC': 0.0, 'GGG': 0.0, 'GGT': 0.0, 'GTA': 0.071, 'GTC': 0.0, 'GTG': 0.0, 'GTT': 0.0, 'TAA': 0.0, 'TAC': 0.071, 'TAG': 0.0, 'TAT': 0.0, 'TCA': 0.0, 'TCC': 0.0, 'TCG': 0.0, 'TCT': 0.0, 'TGA': 0.0, 'TGC': 0.071, 'TGG': 0.0, 'TGT': 0.0, 'TTA': 0.0, 'TTC': 0.0, 'TTG': 0.0, 'TTT': 0.0}\n","----------------------------------------------------------------------------------------------------\n","k_spaced_nucleic_acid_pairs {'AA': 0.0, 'AC': 0.133, 'AG': 0.067, 'AT': 0.133, 'CA': 0.133, 'CC': 0.0, 'CG': 0.133, 'CT': 0.0, 'GA': 0.067, 'GC': 0.133, 'GG': 0.0, 'GT': 0.067, 'TA': 0.067, 'TC': 0.0, 'TG': 0.067, 'TT': 0.0}\n","----------------------------------------------------------------------------------------------------\n","kmer {'AA': 0.0, 'AC': 0.133, 'AG': 0.067, 'AT': 0.133, 'CA': 0.133, 'CC': 0.0, 'CG': 0.133, 'CT': 0.0, 'GA': 0.067, 'GC': 0.133, 'GG': 0.0, 'GT': 0.067, 'TA': 0.067, 'TC': 0.0, 'TG': 0.067, 'TT': 0.0}\n","----------------------------------------------------------------------------------------------------\n","accumulated_nucleotide_frequency [{'A': 0.25, 'C': 0.25, 'G': 0.25, 'T': 0.25}, {'A': 0.375, 'C': 0.25, 'G': 0.25, 'T': 0.125}, {'A': 0.333, 'C': 0.25, 'G': 0.25, 'T': 0.167}]\n","----------------------------------------------------------------------------------------------------\n","DAC [-1.126, -0.614, 0.734, 0.629]\n","----------------------------------------------------------------------------------------------------\n","DCC [-0.901, -0.867, 0.734, 0.706]\n","----------------------------------------------------------------------------------------------------\n","DACC [-1.126, -0.614, 0.734, 0.629, -0.901, -0.867, 0.734, 0.706]\n","----------------------------------------------------------------------------------------------------\n","TAC [0.172, 0.142, 0.182, 0.14]\n","----------------------------------------------------------------------------------------------------\n","TCC [-0.091, -0.132, -0.115, -0.144]\n","----------------------------------------------------------------------------------------------------\n","TACC [0.172, 0.142, 0.182, 0.14, -0.091, -0.132, -0.115, -0.144]\n","----------------------------------------------------------------------------------------------------\n","PseDNC {'AA': 0.0, 'AC': 0.097, 'AG': 0.048, 'AT': 0.097, 'CA': 0.097, 'CC': 0.0, 'CG': 0.097, 'CT': 0.0, 'GA': 0.048, 'GC': 0.097, 'GG': 0.0, 'GT': 0.048, 'TA': 0.048, 'TC': 0.0, 'TG': 0.048, 'TT': 0.0, 'lambda.1': 0.127, 'lambda.2': 0.027, 'lambda.3': 0.12}\n","----------------------------------------------------------------------------------------------------\n","PseKNC {'AAA': 0.0, 'AAC': 0.0, 'AAG': 0.0, 'AAT': 0.0, 'ACA': 0.0, 'ACC': 0.0, 'ACG': 0.052, 'ACT': 0.0, 'AGA': 0.0, 'AGC': 0.026, 'AGG': 0.0, 'AGT': 0.0, 'ATA': 0.0, 'ATC': 0.0, 'ATG': 0.026, 'ATT': 0.0, 'CAA': 0.0, 'CAC': 0.0, 'CAG': 0.0, 'CAT': 0.052, 'CCA': 0.0, 'CCC': 0.0, 'CCG': 0.0, 'CCT': 0.0, 'CGA': 0.026, 'CGC': 0.0, 'CGG': 0.0, 'CGT': 0.026, 'CTA': 0.0, 'CTC': 0.0, 'CTG': 0.0, 'CTT': 0.0, 'GAA': 0.0, 'GAC': 0.0, 'GAG': 0.026, 'GAT': 0.0, 'GCA': 0.052, 'GCC': 0.0, 'GCG': 0.0, 'GCT': 0.0, 'GGA': 0.0, 'GGC': 0.0, 'GGG': 0.0, 'GGT': 0.0, 'GTA': 0.026, 'GTC': 0.0, 'GTG': 0.0, 'GTT': 0.0, 'TAA': 0.0, 'TAC': 0.026, 'TAG': 0.0, 'TAT': 0.0, 'TCA': 0.0, 'TCC': 0.0, 'TCG': 0.0, 'TCT': 0.0, 'TGA': 0.0, 'TGC': 0.026, 'TGG': 0.0, 'TGT': 0.0, 'TTA': 0.0, 'TTC': 0.0, 'TTG': 0.0, 'TTT': 0.0, 'lambda.1': 0.635}\n","----------------------------------------------------------------------------------------------------\n"]}],"source":["descriptor_list = []\n","result = calculator.get_descriptors(descriptor_list)\n","for key, val in result.items():\n"," print(key, val)\n"," print(\"-\" * 100)"]},{"cell_type":"markdown","metadata":{},"source":["It is also possible to calculate the descriptors for the *CSV* and the *FASTA* files, which contains a list of sequences."]},{"cell_type":"code","execution_count":8,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["CCGAGGGCTATGGTTTGGAAGTTAGAACCCTGGGGCTTCTCGCGGACACC\n","{'nucleic_acid_composition': {'A': 0.18, 'C': 0.26, 'G': 0.34, 'T': 0.22}}\n","----------------------------------------------------------------------------------------------------\n","GAGTTTATATGGCGCGAGCCTAGTGGTTTTTGTACTTGTTTGTCGCGTCG\n","{'nucleic_acid_composition': {'A': 0.12, 'C': 0.16, 'G': 0.32, 'T': 0.4}}\n","----------------------------------------------------------------------------------------------------\n","GATCAGTAGGGAAACAAACAGAGGGCCCAGCCACATCTAGCAGGTAGCCT\n","{'nucleic_acid_composition': {'A': 0.34, 'C': 0.26, 'G': 0.28, 'T': 0.12}}\n","----------------------------------------------------------------------------------------------------\n","GTCCACGACCGAACTCCCACCTTGACCGCAGAGGTACCACCAGAGCCCTG\n","{'nucleic_acid_composition': {'A': 0.24, 'C': 0.42, 'G': 0.22, 'T': 0.12}}\n","----------------------------------------------------------------------------------------------------\n","GGCGACCGAACTCCAACTAGAACCTGCATAACTGGCCTGGGAGATATGGT\n","{'nucleic_acid_composition': {'A': 0.28, 'C': 0.26, 'G': 0.28, 'T': 0.18}}\n","----------------------------------------------------------------------------------------------------\n","AGACATTGTCAGAACTTAGTGTGCGCCGCACTGAGCGACCGAACTCCGAC\n","{'nucleic_acid_composition': {'A': 0.26, 'C': 0.3, 'G': 0.26, 'T': 0.18}}\n","----------------------------------------------------------------------------------------------------\n","CCCGGCGAAGGCTGACGAATCCTCGACCGAACTCCAGTGAAGCCAACCGG\n","{'nucleic_acid_composition': {'A': 0.26, 'C': 0.36, 'G': 0.28, 'T': 0.1}}\n","----------------------------------------------------------------------------------------------------\n","AGGCAGGTGGTCGTACAATGTTTTCGAAGAGATAGGGGGCCAGAGGCCTC\n","{'nucleic_acid_composition': {'A': 0.24, 'C': 0.18, 'G': 0.38, 'T': 0.2}}\n","----------------------------------------------------------------------------------------------------\n","TACTGCCTATAGCGAAGAGCGCGAGAGGTATATCGAAGAATACCGAGCAA\n","{'nucleic_acid_composition': {'A': 0.36, 'C': 0.2, 'G': 0.28, 'T': 0.16}}\n","----------------------------------------------------------------------------------------------------\n","CGTATCTTCGTGTGCTCTCCTTTAGAACTGCATCTCTAGAGTCAGAGAGG\n","{'nucleic_acid_composition': {'A': 0.2, 'C': 0.24, 'G': 0.24, 'T': 0.32}}\n","----------------------------------------------------------------------------------------------------\n"]}],"source":["reader = ReadDNA()\n","filename = '../datasets/primer/dataset.csv'\n","data = reader.read_csv(filename=filename, with_labels=True)\n","\n","# get the sequences from the dataframe\n","sequences = data['sequence'].to_list()\n","\n","# specify the descriptor list\n","descriptor_list = ['nucleic_acid_composition']\n","\n","# only for the first 10 sequences\n","for i in range(10):\n"," sequence = sequences[i]\n"," calculator = DNADescriptor(sequence)\n"," \n"," print(sequence)\n"," print(calculator.get_descriptors(descriptor_list))\n"," print(\"-\" * 100)"]},{"cell_type":"markdown","metadata":{},"source":["## 3. Descriptors processing"]},{"cell_type":"markdown","metadata":{},"source":["So far we have seen how to read and validate DNA sequences. We've also seen how to calculate descriptors from a single sequence or multiple sequences. Now, we can use the descriptors to train a model."]},{"cell_type":"markdown","metadata":{},"source":["However, as seen above, when calculating the descriptors for multiple sequences, the result is a list of dictionaries and each dictionary holds the calculated descriptors for a single sequence. So, the next step is to convert this data structure to a dataframe.\n","\n","We can directly convert the list of dictionaries to a dataframe using the `pd.DataFrame()` function. The result of this step would be similar to the following:\n","\n","(considering only the first few columns)\n","\n","| sequence | length | gc_content | at_content | nucleic_acid_composition | ...\n","|----------|--------|------------|------------|--------------------------------------------------|---\n","| ACTGCGAT | 8 | 0.5 | 0.5 | {'A': 0.25, 'C': 0.25, 'T': 0.25, 'G': 0.25} | ...\n","| TTGTTACT | 8 | 0.25 | 0.75 | {'A': 0.125, 'C': 0.125, 'T': 0.125, 'G': 0.625} | ...\n","| ... | ... | ... | ... | ... | ..."]},{"cell_type":"markdown","metadata":{},"source":["As we can see, some of the descriptors are not numerical values (e.g. 'nucleic_acid_composition'). Descriptors that produce dictionaries or lists still need be normalized because the model cannot process data in those forms."]},{"cell_type":"markdown","metadata":{},"source":["To normalize the data, dicts and lists need to \"explode\" into more columns. \n","\n","E.g. dicts:\n","\n","| descriptor_hello |\n","| ---------------- |\n","| {'a': 1, 'b': 2} |\n","\n","will be transformed into:\n","\n","| descriptor_hello_a | descriptor_hello_b |\n","| ------------------ | ------------------ |\n","| 1 | 2 |\n","\n","E.g. lists:\n","\n","| descriptor_hello |\n","| ---------------- |\n","| [1, 2, 3] |\n","\n","will be transformed into:\n","\n","| descriptor_hello_0 | descriptor_hello_1 | descriptor_hello_2 |\n","| ------------------ | ------------------ | ------------------ |\n","| 1 | 2 | 3 |"]},{"cell_type":"markdown","metadata":{},"source":["The `calculate_and_normalize` function will be used to calculate the descriptors and normalize them. It can be found in the `calculate_features.py` file."]},{"cell_type":"code","execution_count":9,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["0 / 2000\n","100 / 2000\n","200 / 2000\n","300 / 2000\n","400 / 2000\n","500 / 2000\n","600 / 2000\n","700 / 2000\n","800 / 2000\n","900 / 2000\n","1000 / 2000\n","1100 / 2000\n","1200 / 2000\n","1300 / 2000\n","1400 / 2000\n","1500 / 2000\n","1600 / 2000\n","1700 / 2000\n","1800 / 2000\n","1900 / 2000\n","Done!\n"]},{"data":{"text/html":["
\n","\n","\n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n","
lengthgc_contentat_contentnucleic_acid_composition_Anucleic_acid_composition_Cnucleic_acid_composition_Gnucleic_acid_composition_Tdinucleotide_composition_AAdinucleotide_composition_ACdinucleotide_composition_AG...accumulated_nucleotide_frequency_0_Gaccumulated_nucleotide_frequency_0_Taccumulated_nucleotide_frequency_1_Aaccumulated_nucleotide_frequency_1_Caccumulated_nucleotide_frequency_1_Gaccumulated_nucleotide_frequency_1_Taccumulated_nucleotide_frequency_2_Aaccumulated_nucleotide_frequency_2_Caccumulated_nucleotide_frequency_2_Gaccumulated_nucleotide_frequency_2_T
0500.600.400.180.260.340.220.0410.0610.061...0.4620.1540.200.120.400.280.1840.1840.3680.263
1500.480.520.120.160.320.400.0000.0200.061...0.3080.3850.200.160.360.280.1580.1320.3160.395
2500.540.460.340.260.280.120.0820.0610.163...0.3850.1540.440.120.360.080.3680.2630.2630.105
3500.640.360.240.420.220.120.0200.1430.082...0.2310.0770.240.440.160.160.2370.4210.2110.132
4500.540.460.280.260.280.180.0820.1020.041...0.3080.0770.320.360.200.120.2890.3420.2110.158
..................................................................
1995500.660.340.160.260.400.180.0000.0200.082...0.5380.1540.120.200.520.160.1840.2110.4210.184
1996500.440.560.220.220.220.340.0410.0610.041...0.3080.3080.200.120.280.400.2370.2110.2630.289
1997500.460.540.180.240.220.360.0200.0610.041...0.1540.2310.160.360.160.320.1320.2630.2110.395
1998500.480.520.280.240.240.240.1020.0610.041...0.4620.0770.240.320.280.160.2370.2890.2890.184
1999500.600.400.220.320.280.180.0410.0820.041...0.2310.1540.240.360.200.200.1840.3680.2630.184
\n","

2000 rows × 247 columns

\n","
"],"text/plain":[" length gc_content at_content nucleic_acid_composition_A \\\n","0 50 0.60 0.40 0.18 \n","1 50 0.48 0.52 0.12 \n","2 50 0.54 0.46 0.34 \n","3 50 0.64 0.36 0.24 \n","4 50 0.54 0.46 0.28 \n","... ... ... ... ... \n","1995 50 0.66 0.34 0.16 \n","1996 50 0.44 0.56 0.22 \n","1997 50 0.46 0.54 0.18 \n","1998 50 0.48 0.52 0.28 \n","1999 50 0.60 0.40 0.22 \n","\n"," nucleic_acid_composition_C nucleic_acid_composition_G \\\n","0 0.26 0.34 \n","1 0.16 0.32 \n","2 0.26 0.28 \n","3 0.42 0.22 \n","4 0.26 0.28 \n","... ... ... \n","1995 0.26 0.40 \n","1996 0.22 0.22 \n","1997 0.24 0.22 \n","1998 0.24 0.24 \n","1999 0.32 0.28 \n","\n"," nucleic_acid_composition_T dinucleotide_composition_AA \\\n","0 0.22 0.041 \n","1 0.40 0.000 \n","2 0.12 0.082 \n","3 0.12 0.020 \n","4 0.18 0.082 \n","... ... ... \n","1995 0.18 0.000 \n","1996 0.34 0.041 \n","1997 0.36 0.020 \n","1998 0.24 0.102 \n","1999 0.18 0.041 \n","\n"," dinucleotide_composition_AC dinucleotide_composition_AG ... \\\n","0 0.061 0.061 ... \n","1 0.020 0.061 ... \n","2 0.061 0.163 ... \n","3 0.143 0.082 ... \n","4 0.102 0.041 ... \n","... ... ... ... \n","1995 0.020 0.082 ... \n","1996 0.061 0.041 ... \n","1997 0.061 0.041 ... \n","1998 0.061 0.041 ... \n","1999 0.082 0.041 ... \n","\n"," accumulated_nucleotide_frequency_0_G \\\n","0 0.462 \n","1 0.308 \n","2 0.385 \n","3 0.231 \n","4 0.308 \n","... ... \n","1995 0.538 \n","1996 0.308 \n","1997 0.154 \n","1998 0.462 \n","1999 0.231 \n","\n"," accumulated_nucleotide_frequency_0_T \\\n","0 0.154 \n","1 0.385 \n","2 0.154 \n","3 0.077 \n","4 0.077 \n","... ... \n","1995 0.154 \n","1996 0.308 \n","1997 0.231 \n","1998 0.077 \n","1999 0.154 \n","\n"," accumulated_nucleotide_frequency_1_A \\\n","0 0.20 \n","1 0.20 \n","2 0.44 \n","3 0.24 \n","4 0.32 \n","... ... \n","1995 0.12 \n","1996 0.20 \n","1997 0.16 \n","1998 0.24 \n","1999 0.24 \n","\n"," accumulated_nucleotide_frequency_1_C \\\n","0 0.12 \n","1 0.16 \n","2 0.12 \n","3 0.44 \n","4 0.36 \n","... ... \n","1995 0.20 \n","1996 0.12 \n","1997 0.36 \n","1998 0.32 \n","1999 0.36 \n","\n"," accumulated_nucleotide_frequency_1_G \\\n","0 0.40 \n","1 0.36 \n","2 0.36 \n","3 0.16 \n","4 0.20 \n","... ... \n","1995 0.52 \n","1996 0.28 \n","1997 0.16 \n","1998 0.28 \n","1999 0.20 \n","\n"," accumulated_nucleotide_frequency_1_T \\\n","0 0.28 \n","1 0.28 \n","2 0.08 \n","3 0.16 \n","4 0.12 \n","... ... \n","1995 0.16 \n","1996 0.40 \n","1997 0.32 \n","1998 0.16 \n","1999 0.20 \n","\n"," accumulated_nucleotide_frequency_2_A \\\n","0 0.184 \n","1 0.158 \n","2 0.368 \n","3 0.237 \n","4 0.289 \n","... ... \n","1995 0.184 \n","1996 0.237 \n","1997 0.132 \n","1998 0.237 \n","1999 0.184 \n","\n"," accumulated_nucleotide_frequency_2_C \\\n","0 0.184 \n","1 0.132 \n","2 0.263 \n","3 0.421 \n","4 0.342 \n","... ... \n","1995 0.211 \n","1996 0.211 \n","1997 0.263 \n","1998 0.289 \n","1999 0.368 \n","\n"," accumulated_nucleotide_frequency_2_G \\\n","0 0.368 \n","1 0.316 \n","2 0.263 \n","3 0.211 \n","4 0.211 \n","... ... \n","1995 0.421 \n","1996 0.263 \n","1997 0.211 \n","1998 0.289 \n","1999 0.263 \n","\n"," accumulated_nucleotide_frequency_2_T \n","0 0.263 \n","1 0.395 \n","2 0.105 \n","3 0.132 \n","4 0.158 \n","... ... \n","1995 0.184 \n","1996 0.289 \n","1997 0.395 \n","1998 0.184 \n","1999 0.184 \n","\n","[2000 rows x 247 columns]"]},"execution_count":9,"metadata":{},"output_type":"execute_result"}],"source":["reader = ReadDNA()\n","filename = '../datasets/primer/dataset.csv'\n","data = reader.read_csv(filename=filename, with_labels=True)\n","\n","# specify the descriptor list\n","descriptor_list = []\n","\n","from calculate_features import calculate_and_normalize\n","fps_x, fps_y = calculate_and_normalize(data)\n","\n","fps_x"]},{"cell_type":"markdown","metadata":{},"source":["The obtained dataframe contains all calculated descriptors for the input dataset. As we can see by the dataframe shape, it now contains 247 columns instead of just 17. This is because the descriptors are now normalized and the data is finally ready to be used by the model. It is also important to note that, regardless of the size of the sequences, the final dataframe will always have the same number of columns since the implemented descriptors produce always the same number of values."]},{"cell_type":"markdown","metadata":{},"source":["## 4. Using processed descriptors to train a model"]},{"cell_type":"markdown","metadata":{},"source":["We've reached the ending point of this tutorial, as everything from now on was already implemented in ProPythia. The next steps will be shown either way to validate the calculated DNA descriptors. The following tasks are the training of the model, obtaining the predictions and then the calculation of feature importance for Random Forest, Support Vector Machine and Linear SVM models."]},{"cell_type":"code","execution_count":10,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["Executing op VarHandleOp in device /job:localhost/replica:0/task:0/device:CPU:0\n","Executing op AssignVariableOp in device /job:localhost/replica:0/task:0/device:CPU:0\n","Executing op VarHandleOp in device /job:localhost/replica:0/task:0/device:CPU:0\n","Executing op AssignVariableOp in device /job:localhost/replica:0/task:0/device:CPU:0\n","Executing op VarHandleOp in device /job:localhost/replica:0/task:0/device:CPU:0\n","Executing op AssignVariableOp in device /job:localhost/replica:0/task:0/device:CPU:0\n","Executing op VarHandleOp in device /job:localhost/replica:0/task:0/device:CPU:0\n","Executing op AssignVariableOp in device /job:localhost/replica:0/task:0/device:CPU:0\n","Executing op VarHandleOp in device /job:localhost/replica:0/task:0/device:CPU:0\n","Executing op AssignVariableOp in device /job:localhost/replica:0/task:0/device:CPU:0\n"]}],"source":["import sys\n","\n","from sklearn.model_selection import train_test_split\n","from sklearn.metrics import make_scorer, matthews_corrcoef\n","from sklearn.preprocessing import StandardScaler\n","\n","sys.path.append('../../../../src/')\n","from propythia.shallow_ml import ShallowML"]},{"cell_type":"code","execution_count":11,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["performing gridSearch...\n","GridSearchCV took 7.82 seconds for 4 candidate parameter settings.\n","GridSearchCV(cv=10,\n"," estimator=Pipeline(steps=[('scl', None),\n"," ('clf',\n"," RandomForestClassifier(random_state=1))]),\n"," n_jobs=10,\n"," param_grid=[{'clf__max_features': ['sqrt'],\n"," 'clf__n_estimators': [100, 250, 500, 750]}],\n"," scoring=make_scorer(matthews_corrcoef))\n","Model with rank: 1\n"," Mean validation score: 0.992 (std: 0.009)\n"," Parameters: {'clf__max_features': 'sqrt', 'clf__n_estimators': 100}\n"," \n","\n","Model with rank: 2\n"," Mean validation score: 0.991 (std: 0.009)\n"," Parameters: {'clf__max_features': 'sqrt', 'clf__n_estimators': 250}\n"," \n","\n","Model with rank: 2\n"," Mean validation score: 0.991 (std: 0.009)\n"," Parameters: {'clf__max_features': 'sqrt', 'clf__n_estimators': 500}\n"," \n","\n","Model with rank: 2\n"," Mean validation score: 0.991 (std: 0.009)\n"," Parameters: {'clf__max_features': 'sqrt', 'clf__n_estimators': 750}\n"," \n","\n","make_scorer(matthews_corrcoef)\n","10\n","Best score (scorer: make_scorer(matthews_corrcoef)) and parameters from a 10-fold cross validation:\n"," MCC score:\t0.992\n"," Parameters:\t{'clf__max_features': 'sqrt', 'clf__n_estimators': 100}\n","\n","0.992034 (0.008825) with: {'clf__max_features': 'sqrt', 'clf__n_estimators': 100}\n","0.990710 (0.008518) with: {'clf__max_features': 'sqrt', 'clf__n_estimators': 250}\n","0.990710 (0.008518) with: {'clf__max_features': 'sqrt', 'clf__n_estimators': 500}\n","0.990710 (0.008518) with: {'clf__max_features': 'sqrt', 'clf__n_estimators': 750}\n"," clf__max_features clf__n_estimators means stds\n","0 sqrt 100 0.992034 0.008825\n","1 sqrt 250 0.990710 0.008518\n","2 sqrt 500 0.990710 0.008518\n","3 sqrt 750 0.990710 0.008518\n"]}],"source":["# fps_x and fps_y are the features and labels calculated from the data in the previous chapter\n","X_train, X_test, y_train, y_test = train_test_split(fps_x, fps_y, stratify=fps_y)\n","\n","scaler = StandardScaler().fit(X_train)\n","X_train = scaler.transform(X_train)\n","X_test = scaler.transform(X_test)\n","\n","ml = ShallowML(X_train, X_test, y_train, y_test, report_name=None, columns_names=fps_x.columns)\n","\n","param_grid = [{'clf__n_estimators': [100, 250, 500, 750], 'clf__max_features': ['sqrt']}]\n","\n","best_rf_model = ml.train_best_model(\n"," model_name=None,\n"," model='rf',\n"," score=make_scorer(matthews_corrcoef),\n"," param_grid=param_grid,\n"," cv=10\n",")"]},{"cell_type":"code","execution_count":12,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":[" precision recall f1-score support\n","\n"," 0 1.00 1.00 1.00 253\n"," 1 1.00 1.00 1.00 247\n","\n"," accuracy 1.00 500\n"," macro avg 1.00 1.00 1.00 500\n","weighted avg 1.00 1.00 1.00 500\n","\n","[[252 1]\n"," [ 0 247]]\n"]},{"data":{"text/plain":["{'Accuracy': 0.998,\n"," 'MCC': 0.9960075855857898,\n"," 'log_loss': 0.04300015871807655,\n"," 'f1 score': 0.997979797979798,\n"," 'roc_auc': 0.9980237154150198,\n"," 'Precision': array([0.494 , 0.99596774, 1. ]),\n"," 'Recall': array([1., 1., 0.]),\n"," 'fdr': 0.004032258064516129,\n"," 'sn': 1.0,\n"," 'sp': 0.9960474308300395}"]},"execution_count":12,"metadata":{},"output_type":"execute_result"}],"source":["scores, report, cm, cm2 = ml.score_testset(best_rf_model)\n","print(report)\n","print(cm) \n","scores"]},{"cell_type":"code","execution_count":13,"metadata":{},"outputs":[{"data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAATUAAAELCAYAAAC4bxHZAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAbl0lEQVR4nO3ce1BU5/0G8IddgmKAKivgEmMtUWHLqEEJmAhGEbnogsYbKUqqVqw1RseoDTEKeKkJaRIr1MuYC9HitNHGSyRUjU0txQvGDCmmiCYIAWUFXHQQvLAu7++PTHYkgnuABTbv7/nMOOORd895vrvmyTl7dnUQQggQEUlC1d0BiIhsiaVGRFJhqRGRVFhqRCQVlhoRSYWlRkRSYakRtVFSUhI2bdpks/1t2rQJwcHBGD16tM32+WNffvklIiIiEBAQgGPHjnXacewBS62NwsLCMGzYMAQEBFh+VVVVdXifJ0+etFFC6zIyMrBixYouO97D7Nu3D7/61a+6O8YDEhISMHToUAQEBCA4OBiLFy9GdXV1m/fj6+uL7777rtWfGwwGZGZmIicnBydOnOhI5IdKT0/HrFmzUFBQgPDw8E47jj1gqbXD9u3bUVBQYPnl5eXVrXnu3bvXrcdvL3vPnZycjIKCAhw5cgR1dXV4/fXXbX6MK1euoHfv3tBoNG1+rJLn74c1lZWVGDx4cJuP8VPEUrORmzdvYtWqVQgJCUFoaCg2bdoEs9kMACgvL8cLL7yA4OBgBAcHY/ny5airqwMArFy5EpWVlVi4cCECAgLw7rvvIj8/H2PGjGm2//vP5jIyMrBkyRKsWLECI0aMwP79+x96fGt8fX2xe/duy+XJn/70J5SXlyMuLg4jRozA0qVL0djYCACWbNu3b0dwcDDCwsLwySefNHsefv/732PUqFEYN24ctm7diqamJgDfn5U9//zz2LhxI4KCgrBs2TKkpKTgq6++QkBAAAIDAwEAx48fx5QpUzBixAg8++yzyMjIsOz/8uXL8PX1xf79+zF27FgEBwdj27Ztlp+bzWZs374d4eHhCAgIwNSpU2EwGAAAJSUlmDt3LoKCghAZGYmcnBxFz0/v3r0RGRmJb775psWf79mzBxMmTEBQUBAWLlxoOXOfNWsWAGDy5MkICAh44HgnT57EvHnzUF1djYCAACQlJQEA/vnPf2LSpEkIDAxEQkICSkpKLI8JCwvDjh07EBMTgyeffLLFYrv/9YyIiEB4eDgqKiosf8d+eC2lJahNxo0bJ06cOPHAn//ud78Ta9asEQ0NDeLatWti2rRp4q9//asQQoiysjKRl5cn7t69K4xGo4iPjxcbNmxodZ+nT58WoaGhrR43PT1d/PKXvxSfffaZMJvN4vbt2w89/o+lp6eL5cuXW7aHDBkifvvb34qbN2+KixcvCn9/f/HCCy+I8vJyUVdXJ6Kjo8W+ffss2XQ6ndi4caO4e/euyM/PF8OHDxclJSVCCCFWrlwpFi5cKG7evCkqKipERESE2LNnjxBCiI8//ljodDqxa9cuYTKZxO3bt8XHH38snn/++Wb5Tp8+LYqLi4XZbBbnz58XTz/9tPjss8+EEEJUVFSIIUOGiNdee03cvn1bnD9/Xvj7+4tvv/1WCCHEu+++K/R6vSgpKRFNTU3i/Pnzora2VjQ0NIgxY8aIv//978JkMomvv/5aBAUFiYsXL7b4HM2ePduS22g0ioSEBLFixQohhBCvvPKKeOedd4QQQpw8eVIEBQWJr7/+Wty9e1esW7dOxMfHN3tuy8rKWjxGS6/1pUuXxPDhw0VeXp5obGwUO3bsEOHh4eLu3buWvwexsbGisrJS3L59u8V9DhkyRMyZM0dcv37dsqa1v7cy4plaO7z44osIDAxEYGAgFi1ahGvXriE3NxerVq1Cr169oNFoMGfOHHz66acAgJ///OcYPXo0nJyc4O7ujrlz5+KLL77oUIYnn3wS4eHhUKlUqK+vf+jxlUhMTISLiwsGDx6MIUOGYPTo0Xj88cfh6uqKMWPGoKioqNn6pUuXwsnJCUFBQXj22Wfxj3/8A2azGTk5OVi+fDlcXFzQv39/zJ07t9mZnKenJxISEuDo6IiePXu2mCU4OBi+vr5QqVTw8/PDpEmTcObMmWZrFi9ejJ49e8LPzw9+fn4oLi4GAOzduxdLly6Fj48PHBwc4Ofnhz59+uD48eN47LHHMG3aNDg6OsLf3x+RkZE4cuRIq8/Jhg0bEBgYiMmTJ8PDwwOvvvrqA2sOHTqEadOmwd/fH05OTnj55Zfx1Vdf4fLly4qf+/vl5OTg2WefxejRo/HII4/gN7/5De7cuYOCggLLmoSEBGi12lafPwBYsGABevfu/dA1snLs7gA/RVu2bMEzzzxj2S4sLMS9e/cQEhJi+bOmpiZotVoAgNFoxIYNG3D27Fk0NDRACAE3N7cOZejXr5/l95WVlQ89vhJ9+/a1/L5Hjx4PbF+7ds2y7ebmhl69elm2vb29UV1djevXr8NkMsHb27vZz+6/kXJ/7tb897//xVtvvYVvvvkGJpMJjY2NiIqKajWvs7Mzbt26BQC4evUqBgwY8MA+r1y5gsLCQsslLvD9pWpsbGyrOVavXo0ZM2Y8NGt1dTX8/f0t248++ih69+6Nqqoq9O/f/+GDtrK/+58/lUoFrVbb7DlU8rq25bWXDUvNBvr16wcnJyecPn0ajo4PPqVvv/02HBwc8Mknn6BPnz44duwY1q1b1+r+nJ2dcefOHcu22WxGbW1tszUODg6Kj29rdXV1uHXrlqXYDAYDBg8ejD59+uCRRx5BZWUlBg0aZPnZ/TdS7s/d0jYALF++HLNnz8Z7772HHj164A9/+AOuX7+uKFu/fv1QXl6OIUOGNPtzrVaLp556CpmZmW2a1RpPT09cuXLFsn3r1i3cuHGj3TePPD09cfHiRcu2EMLqc9gSJWtkxctPG/D09MTo0aPxxhtvoL6+Hk1NTSgvL7dcMjU0NKBXr15wc3NDVVUV3nvvvWaP79u3LyoqKizbv/jFL3D37l0cP34cJpMJ27Zte+ibu9aO3xkyMjLQ2NiIs2fP4vjx44iKioJarUZUVBQ2bdqE+vp6XLlyBZmZmQ89G9JoNKiqqmo2X0NDA372s5+hR48eKCwsRHZ2tuJcM2bMwObNm1FWVgYhBIqLi3H9+nWMHTsWZWVlOHDgAEwmE0wmEwoLC5u9Cd8eMTEx2LdvH86fP4/Gxka88847GDZsmOUs7cevrTXR0dH497//jVOnTsFkMuGDDz6Ak5MTAgICOpTz/xOWmo28+eabMJlMmDhxIp566iksWbIENTU1AL5//6eoqAiBgYFYsGABIiIimj12wYIF2LZtGwIDA/H+++/D1dUVKSkpWL16NcaMGQNnZ2erl20PO76t9e3bF25ubggNDcWKFSuQmpqKJ554AgCwZs0aODs7Izw8HPHx8dDr9Zg2bVqr+xo1ahQGDRqEkJAQBAcHAwBSUlKQnp6OgIAAbNmyBdHR0YqzzZ07F9HR0Zg3bx5GjBiB1157DXfv3oWLiwvef/995OTkIDQ0FCEhIXjrrbc6fCfw6aefxtKlS/HSSy8hJCQEFRUVzT6Yu3jxYiQlJSEwMFDR3VYfHx/88Y9/xPr16zFq1Cj861//wvbt2+Hk5NShnP+fOAjBfySSlMvPz8fKlSuRm5vb3VGIWsQzNSKSitVSS0tLQ1hYGHx9fZu9gXk/s9mMtWvXIjw8HBMmTMDevXttHpSISAmrpTZ+/Hjs3r0bjz32WKtrDh06hPLychw9ehQfffQRMjIy2v05HbJvwcHBvPQku2a11AIDA61+5iUnJwczZsyASqWCu7s7wsPDcfjwYZuFJCJSyibvqRkMhmYfGNRqtbh69aotdk1E1Ca8UUBEUrHJx8+1Wi0qKysxbNgwAA+euSl1/XoDmprk+ISJRuMCo7G+u2PYDOexXzLNolI5oE+fRzu0D5uUWlRUFPbu3YuIiAjcuHEDx44dw+7du9u8n6YmIU2pAZBqFoDz2DOZZukoq5efGzZswJgxY3D16lXMnTsXkyZNAvD9v+pw7tw5AN//e1H9+/dHREQEZs6ciRdffBGPP/545yYnImqBXX2jwGisl+b/OB4erqipudndMWyG89gvmWZRqRyg0bh0bB82ykJEZBdYakQkFZYaEUmFpUZEUmGpEZFUWGpEJBWWGhFJhaVGRFJhqRGRVFhqRCQVlhoRSYWlRkRSYakRkVRYakQkFZYaEUmFpUZEUmGpEZFUWGpEJBWWGhFJhaVGRFJhqRGRVFhqRCQVlhoRSYWlRkRSYakRkVRYakQkFZYaEUmFpUZEUmGpEZFUWGpEJBWWGhFJhaVGRFJhqRGRVFhqRCQVlhoRScVRyaLS0lIkJSXhxo0b6N27N9LS0jBw4MBma4xGI1599VUYDAaYTCaMGjUKq1evhqOjokMQEdmEojO1lJQUxMfH48iRI4iPj0dycvIDa7Zv344nnngChw4dwqFDh/C///0PR48etXlgIqKHsVpqRqMRRUVF0Ov1AAC9Xo+ioiLU1tY2W+fg4ICGhgY0NTWhsbERJpMJXl5enZOaiKgVVq8NDQYDvLy8oFarAQBqtRqenp4wGAxwd3e3rFu0aBFeeuklhISE4Pbt25g1axZGjhzZpjAajUsb49s3Dw/X7o5gU5zHfsk0S0fZ7A2vw4cPw9fXFzt37kRDQwMSExNx+PBhREVFKd6H0ViPpiZhq0jdysPDFTU1N7s7hs1wHvsl0ywqlUOHT26sXn5qtVpUVVXBbDYDAMxmM6qrq6HVaputy8rKQmxsLFQqFVxdXREWFob8/PwOhSMiaiurpabRaKDT6ZCdnQ0AyM7Ohk6na3bpCQD9+/dHbm4uAKCxsRGnTp3C4MGDOyEyEVHrFN39TE1NRVZWFiIjI5GVlYW1a9cCABITE3Hu3DkAwKpVq/Dll18iJiYGU6ZMwcCBAzFz5szOS05E1AIHIYTdvInF99TsF+exXzLN0iXvqRER/ZSw1IhIKiw1IpIKS42IpMJSIyKpsNSISCosNSKSCkuNiKTCUiMiqbDUiEgqLDUikgpLjYikwlIjIqmw1IhIKiw1IpIKS42IpMJSIyKpsNSISCosNSKSCkuNiKTCUiMiqbDUiEgqLDUikgpLjYikwlIjIqmw1IhIKiw1IpIKS42IpMJSIyKpsNSISCosNSKSCkuNiKTCUiMiqbDUiEgqikqttLQUcXFxiIyMRFxcHMrKylpcl5OTg5iYGOj1esTExODatWu2zEpEZJWjkkUpKSmIj4/H5MmTcfDgQSQnJ2PXrl3N1pw7dw5//vOfsXPnTnh4eODmzZtwcnLqlNBERK2xeqZmNBpRVFQEvV4PANDr9SgqKkJtbW2zdR9++CHmzZsHDw8PAICrqyt69OjRCZGJiFpntdQMBgO8vLygVqsBAGq1Gp6enjAYDM3WlZSUoKKiArNmzcJzzz2HrVu3QgjROamJiFqh6PJTCbPZjAsXLiAzMxONjY2YP38+vL29MWXKFMX70GhcbBXHLnh4uHZ3BJviPPZLplk6ymqpabVaVFVVwWw2Q61Ww2w2o7q6Glqtttk6b29vREVFwcnJCU5OThg/fjwKCwvbVGpGYz2amuQ4u/PwcEVNzc3ujmEznMd+yTSLSuXQ4ZMbq5efGo0GOp0O2dnZAIDs7GzodDq4u7s3W6fX65GXlwchBEwmE06fPg0/P78OhSMiaitFH+lITU1FVlYWIiMjkZWVhbVr1wIAEhMTce7cOQDApEmToNFoMHHiREyZMgWDBg3C9OnTOy85EVELHIQdvZvPy0/7xXnsl0yzdMnlJxHRTwlLjYikwlIjIqmw1IhIKiw1IpIKS42IpMJSIyKpsNSISCosNSKSCkuNiKTCUiMiqbDUiEgqLDUikgpLjYikwlIjIqmw1IhIKiw1IpIKS42IpMJSIyKpsNSISCosNSKSCkuNiKTCUiMiqbDUiEgqLDUikgpLjYikwlIjIqmw1IhIKiw1IpIKS42IpMJSIyKpsNSISCosNSKSCkuNiKTCUiMiqSgqtdLSUsTFxSEyMhJxcXEoKytrde2lS5cwfPhwpKWl2SojEZFiikotJSUF8fHxOHLkCOLj45GcnNziOrPZjJSUFISHh9s0JBGRUlZLzWg0oqioCHq9HgCg1+tRVFSE2traB9bu2LEDY8eOxcCBA20elIhICUdrCwwGA7y8vKBWqwEAarUanp6eMBgMcHd3t6wrLi5GXl4edu3aha1bt7YrjEbj0q7H2SsPD9fujmBTnMd+yTRLR1ktNSVMJhPWrFmD119/3VJ+7WE01qOpSdgiUrfz8HBFTc3N7o5hM5zHfsk0i0rl0OGTG6ulptVqUVVVBbPZDLVaDbPZjOrqami1WsuampoalJeXY8GCBQCAuro6CCFQX1+P9evXdyggEVFbWC01jUYDnU6H7OxsTJ48GdnZ2dDpdM0uPb29vZGfn2/ZzsjIwK1bt/DKK690TmoiolYouvuZmpqKrKwsREZGIisrC2vXrgUAJCYm4ty5c50akIioLRyEEHbzJhbfU7NfnMd+yTSLLd5T4zcKiEgqLDUikgpLjYikwlIjIqmw1IhIKiw1IpIKS42IpMJSIyKpsNSISCosNSKSCkuNiKTCUiMiqbDUiEgqLDUikgpLjYikwlIjIqmw1IhIKiw1IpIKS42IpMJSIyKpsNSISCosNSKSCkuNiKTCUiMiqbDUiEgqLDUikgpLjYikwlIjIqmw1IhIKiw1IpIKS42IpMJSIyKpsNSISCosNSKSiqOSRaWlpUhKSsKNGzfQu3dvpKWlYeDAgc3WbNmyBTk5OVCr1XB0dMSyZcsQGhraGZmJiFqlqNRSUlIQHx+PyZMn4+DBg0hOTsauXbuarRk2bBjmzZsHZ2dnFBcXY/bs2cjLy0PPnj07JTgRUUusXn4ajUYUFRVBr9cDAPR6PYqKilBbW9tsXWhoKJydnQEAvr6+EELgxo0btk9MRPQQVkvNYDDAy8sLarUaAKBWq+Hp6QmDwdDqYw4cOIABAwagX79+tktKRKSAosvPtjhz5gw2b96MDz74oM2P1WhcbB2nW3l4uHZ3BJviPPZLplk6ymqpabVaVFVVwWw2Q61Ww2w2o7q6Glqt9oG1BQUFWLlyJbZu3QofH582hzEa69HUJNr8OHvk4eGKmpqb3R3DZjiP/ZJpFpXKocMnN1YvPzUaDXQ6HbKzswEA2dnZ0Ol0cHd3b7ausLAQy5YtQ3p6Ovz9/TsUioiovRyEEFZPjUpKSpCUlIS6ujq4ubkhLS0NPj4+SExMxJIlSzB06FBMmzYNV65cgZeXl+Vxb775Jnx9fRWH4Zma/eI89kumWWxxpqao1LoKS81+cR77JdMsXXL5SUT0U8JSIyKpsNSISCosNSKSCkuNiKTCUiMiqbDUiEgqLDUikgpLjYikwlIjIqmw1IhIKiw1IpIKS42IpMJSIyKpsNSISCosNSKSCkuNiKTCUiMiqbDUiEgqLDUikgpLjYikwlIjIqmw1IhIKiw1IpIKS42IpMJSIyKpsNSISCosNSKSCkuNiKTCUiMiqbDUiEgqLDUikgpLjYikwlIjIqmw1IhIKopKrbS0FHFxcYiMjERcXBzKysoeWGM2m7F27VqEh4djwoQJ2Lt3r62zEhFZpajUUlJSEB8fjyNHjiA+Ph7JyckPrDl06BDKy8tx9OhRfPTRR8jIyMDly5dtHpiI6GEcrS0wGo0oKipCZmYmAECv12P9+vWora2Fu7u7ZV1OTg5mzJgBlUoFd3d3hIeH4/Dhw5g/f77iMCqVQztGsF+cx77JNI8ss9hiDqulZjAY4OXlBbVaDQBQq9Xw9PSEwWBoVmoGgwHe3t6Wba1Wi6tXr7YpTJ8+j7Zpvb3TaFy6O4JNcR77JdMsHcUbBUQkFaulptVqUVVVBbPZDOD7GwLV1dXQarUPrKusrLRsGwwG9OvXz8ZxiYgezmqpaTQa6HQ6ZGdnAwCys7Oh0+maXXoCQFRUFPbu3YumpibU1tbi2LFjiIyM7JzUREStcBBCCGuLSkpKkJSUhLq6Ori5uSEtLQ0+Pj5ITEzEkiVLMHToUJjNZqxbtw4nTpwAACQmJiIuLq7TByAiup+iUiMi+qngjQIikgpLjYikwlIjIqmw1IhIKl1aarJ9MV7JPFu2bMGkSZMQGxuLqVOn4j//+U/XB1VIyTw/uHTpEoYPH460tLSuC9hGSufJyclBTEwM9Ho9YmJicO3ata4NqoCSWYxGIxYsWICYmBhERUUhNTUV9+7d6/qwCqSlpSEsLAy+vr64ePFii2va3QWiCyUkJIgDBw4IIYQ4cOCASEhIeGDN/v37xbx584TZbBZGo1GEhoaKioqKroypmJJ5cnNzxa1bt4QQQpw/f16MHDlS3L59u0tzKqVkHiGEuHfvnpg9e7Z4+eWXxRtvvNGVEdtEyTyFhYUiOjpaVFdXCyGEqKurE3fu3OnSnEoomWXDhg2W16OxsVFMnz5dfPrpp12aU6kvvvhCVFZWinHjxokLFy60uKa9XdBlZ2o/fDFer9cD+P6L8UVFRaitrW22rrUvxtsbpfOEhobC2dkZAODr6wshBG7cuNHVca1SOg8A7NixA2PHjsXAgQO7OKVySuf58MMPMW/ePHh4eAAAXF1d0aNHjy7P+zBKZ3FwcEBDQwOamprQ2NgIk8kELy+v7ohsVWBg4APfSvqx9nZBl5Xaw74Y/+N1Hf1ifFdQOs/9Dhw4gAEDBtjl18eUzlNcXIy8vDzMmTOnG1Iqp3SekpISVFRUYNasWXjuueewdetWCDv76KbSWRYtWoTS0lKEhIRYfo0cObI7IttEe7uANwq6yJkzZ7B582a8/fbb3R2l3UwmE9asWYO1a9da/gP7qTObzbhw4QIyMzPxl7/8Bbm5uTh48GB3x2qXw4cPw9fXF3l5ecjNzcXZs2ft8iqns3VZqcn2xXil8wBAQUEBVq5ciS1btsDHx6eroyqiZJ6amhqUl5djwYIFCAsLw86dO7Fnzx6sWbOmu2K3Sunr4+3tjaioKDg5OcHFxQXjx49HYWFhd0RuldJZsrKyEBsbC5VKBVdXV4SFhSE/P787IttEe7ugy0pNti/GK52nsLAQy5YtQ3p6Ovz9/bsjqiJK5vH29kZ+fj4+//xzfP755/j1r3+NmTNnYv369d0Vu1VKXx+9Xo+8vDwIIWAymXD69Gn4+fl1R+RWKZ2lf//+yM3NBQA0Njbi1KlTGDx4cJfntZV2d4FNb2lY8e2334rp06eLiIgIMX36dFFSUiKEEGL+/PmisLBQCPH9nbXk5GQxfvx4MX78ePG3v/2tKyO2iZJ5pk6dKoKDg0VsbKzlV3FxcXfGbpWSee6Xnp5u13c/lcxjNpvFxo0bRVRUlJg4caLYuHGjMJvN3Rm7RUpm+e6778ScOXOEXq8X0dHRIjU1VZhMpu6M3ar169eL0NBQodPpxDPPPCMmTpwohLBNF/AL7UQkFd4oICKpsNSISCosNSKSCkuNiKTCUiMiqbDUiEgqLDUikgpLjYik8n+8bv1YAhmRqQAAAABJRU5ErkJggg==","text/plain":["
"]},"metadata":{},"output_type":"display_data"},{"data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAAv0AAAGvCAYAAADIaGp1AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOzdeVyVZf74/9c5h3NAAcUUccNUxjBDczsqmqLUTCYiIFpWmoiJ69dSCRcUDQFRUVHSwW2s3FABRcwcpzG1z4hkuX5sssKVNEFx4yDr4fcHP++PyHLARFTez8fDx3Du5bqu+31Oj3nf930tqsLCwkKEEEIIIYQQzy11dTdACCGEEEIIUbUk6RdCCCGEEOI5J0m/EEIIIYQQzzlJ+oUQQgghhHjOSdIvhBBCCCHEc06SfiGEEEIIIZ5zkvQLIYQQQgjxnDOr7gYIIarXzZsGjEZZrqMs9etbceNGZnU346kmMTJNYlQ+iY9pEiPTanqM1GoV9epZlrlfkn4hajijsVCSfhMkPqZJjEyTGJVP4mOaxMg0iVHZJOl/Djg6OnLs2DEsLcu+uwPw8PBg69atWFhYVHmb/v3vf/PDDz8wbdq0EvuSk5NZsGAB8fHxVd6OJ2HZsmW0bt2a/v37k5ycTF5eHq+99hoA165dw9/fnw0bNlRJ3bt27eIf//gH2dnZqFQq2rRpwyeffEKTJk0qXEbktClkpKVXql6rxk3wDwmvbHOFEEIIUU0k6a9BEhISnlhdr7/+Oq+//voTq686ffTRR8rf33//PVlZWUrSb2dnV2UJ//bt21m/fj0rV66kRYsWQNEN1fXr1yuV9Huo1OSoKje8Z/PVK5U6XgghhBDVS5L+auDo6MjkyZP517/+xa1btwgICODNN98kNTUVb29vkpOTAUp8/vbbb4mKiiI/Px+1Wk14eDht2rQpVva5c+cICwvj5s2b5OXlMWLECLy9vZV6778RSElJITQ0lPT0oie8vr6+eHl5ldre9PR0pkyZgsFgICcnBxcXFwICAgDIzc1l6dKlfPfdd6jVauzt7VmxYgXx8fEcOHCA5cuXA7B06VL27NmDnZ0d7dq1Mxmja9euERISwoULFwAYMGAAY8aM4fr168yZM4dLly4BMGrUKDw9PQFwdXXF3d2dI0eOcO3aNaZOncqNGzfYvXs3t2/fZv78+XTp0kWJ66BBgzh69Cg5OTnMmTOHLl26ALBz507WrVsHQPPmzQkODqZ+/focO3aMefPmYTQayc/PZ9y4cQwYMIDp06fj5OSEXq8nJiYGo9HI4cOHcXNzo3///sW+w0OHDrFkyRIKCgp44YUXCA4O5sUXXyQ5OZmwsDBeffVVjh8/jkqlYunSpTg4OJQZo88++4yQkBAl4Qfo1q2bydgKIYQQFVFYWEhm5m3u3cvEaCyo7uaYlJamxmg0VnczqpyZmY569WzRaCqXxkvSX02srKyIi4vjxx9/5OOPP+bNN98s9/jz588za9YsNm3aRIsWLcjNzSU3N7fYMfn5+fj7+7No0SIcHBzIzMzE29ubDh06FEse8/PzGT9+PB9//DFvvfUWADdv3iyz7jp16hAdHY2lpSV5eXmMGjWKQ4cO0bt3b1avXs3ly5eJj49Hp9ORkZFR4vz9+/ezf/9+du7ciYWFBRMmTDAZH39/f1xcXIiKigJQyg0JCaF169asWLGCtLQ0Bg0aRNu2bXnppZeAopuQrVu3curUKT744AM++eQTYmNj2bNnD4sXL2bLli0A3Lp1C0dHR6ZNm8b333/PlClT+Oabb7hw4QIRERHEx8fTsGFDIiMjmTdvHpGRkaxZs4YRI0bg6elJYWEhd+/eLdZmR0dHhg4dSlZWltKtKTU1Vdl/48YNAgIC2LhxI3/5y1/Yvn07/v7+bN++HYDffvuN+fPnExwczN///ndWrlzJ4sWLS43PjRs3+OOPP3j11VdNxrIq6LQabG2tq6Xu6lCTrvVRSYxMkxiVT+Jj2pOO0cWLFyksNNKwYWM0GjNUKtUTrV+UVJR/3CYr6yatWrWq1LmS9FeT/v37A9ChQwfS0tLIyckp9/jDhw/Tu3dv5amuTqdDp9MVO+bChQukpKQwZcoUZVteXh7nzp0rlvSfP3+e/Px8JeEHqFevXpl1FxQUsHDhQo4fP05hYSHXr1/n559/pnfv3nz77bdMnz5dacsLL7xQ4vzk5GT69++vjDkYPHgwK1euLLM+g8HA8ePHWb9+vbLtfrlJSUlMnz4dgIYNG+Li4kJycrKS9N+P6yuvvMK9e/eUa3RyclLeDgBotVoGDhwIQNeuXbGwsODcuXMcPXoUFxcXGjZsCMDQoUPx8PAAip6ir169mitXrtCzZ89KJ9wnT56kTZs2/OUvfwHA29ubTz/9lMzMopkGWrZsSdu2bYGi38W3335bZlmFhdU7UCk3r4D09LumD3wO2Npa15hrfVQSI9MkRuWT+JhWHTG6cycTO7tmgJqCgkLg6R4ka2amJj//+X/SX6uWNdeu3Szxe1CrVdSvb1XmeZL0VxNzc3MANBoNUPT03czMrFgy9+CNQEWSvMLCQurVq2ey735lE8b169dz584dtm/fjrm5ObNnz1baVtF2PU4PP2l48PPDcb3/Wa1Wk5+fX24bVSqV8r+l8fHxwdXVlcOHDzNv3jx69uzJ5MmTK9zu8soGit3EmWpvgwYNsLOz49SpU8r4ASGEEOLxKkRVyTFfouo96hsX+SafIg0aNCAvL4+LFy8CsHv3bmXfa6+9xqFDh5Q+7rm5ucoT4vtatmyJhYUFO3fuVLalpKSUOK5Vq1aYmZnx9ddfK9vK695z9+5dbG1tMTc359q1a/z73/9W9rm6uvLFF18oXY1K697j7OzM119/TVZWFgUFBcTFxZUbB0tLSzp27Mjnn3+ubLtfrrOzM1u3bgWKxhocPHjwkfqx5+XlkZiYCMAPP/xATk4OLVu2xNnZmYMHDypjHbZt20aPHj2AojckzZs3Z+jQoXzwwQecPn26RLlWVlYluv3c17FjR/773/+SkpICwI4dO2jbti1WVmXflZdn/PjxhIeHF3uD8d1333Hy5MlKlZNQaGRzJf9ZNa74QGEhhBBCVD950v8UMTMzIzAwkJEjR9K0adNiyWyLFi2YN28ekydPpqCgAI1GQ3h4OI6OjsXOj46OJiwsjHXr1mE0Gqlfvz6RkZEl6lm5ciXBwcGsXLkSlUqFr6+vMiD2YcOHD+ejjz7C09OTRo0a4ezsrOzz8/Nj8eLFeHp6otVqefHFF5XBu/f17duXEydO4OnpScOGDenWrRvXrl0rNxYRERF8+umnDBgwALVazYABA/Dz82PWrFkEBQXh7u4OFPX9b926dUXCW4yNjQ0XL15kyJAhZGdns2TJEnQ6Ha1bt2bq1Kn4+voCYG9vT3BwMAAbNmwgOTkZrVaLTqdj1qxZJcp94403SEhIwMPDQxnIe98LL7zAwoUL8ff3Jz8/nxdeeIFFixZVuu33DR06FAsLCyZNmkR2djZqtVqZsrMyPl6wROY1FkIIUSERs6aTWQUzuFV0Kmgfn/dYteofmJtX/fTjpdmzJxEnp/Y0b/5itdT/Z6gKq7tzsBBP2MOzItV0N25kStJfDulrbJrEyDSJUfkkPqZVR4z++OMijRoVT27njvqA96qgy8/mQiNz1335p8qo6j79BQUFfPTRON59dzg9e/aqsnoqorTvRvr0CyGEEEKI58Jrr3Vh375D1K5dm8GD3fnb397ixx+Pkp6exoQJk7hx4wb/+tde7ty5w8yZc3j11Y5cvXqFDz8czltvuXPy5DFycnKYOnU6r77aEYCvv97Nli0bUKlUNGnSjICAmdSr9wJ79iTyzTf7qFfPhvPnz/Pmm29x9ux/iYyMYM2avzNhwke88EJ9Fi8OJzv7Hrm5uQwc6MXbb78HQGjoXHQ6HZcvXyIt7RqvvNKOWbM+RaVSkZmZyfLli/n5559QqdS8+moHpkyZRl5eHqtXr+TEiR/Jy8vHwcGBqVNnULt27T8dO0n6hWLs2LFcvXq12LbGjRsTHR1dJfUdPHiQJUuWlNg+ZcoUXFxcqqROgGbNmj0zT/mrK0ZCCCHEsyAvL49Vq9bz3/+e4f/9vzGMGzeJNWu+5N///hfR0Z/x978Xrbtz+/ZtHBz+wsSJH3P8+I/MnRvI1q07SU29RHT0Z6xbt5EGDRqwZs3fWbp0EcHB8wE4ffoEn3++haZNmwHw3XcHiz3pz8oyEBm5Ep1OR1ZWFn5+I+ja1ZkWLVoCcO5cCpGRK1Gr1Ywc+T4//JCMXt+d5csXU6tWLT7/fAtqtZpbt24BsGnTF1haWrJmTdFbj5Url7Nhw3rGjDE93bkpkvQLRVUl92VxcXGRxNUEiZEQQghRttdf/ysAL73UhuzsbF5//W8AtGnzMr///n9r5Wi1Wt58s2icXceOnTE3N+fSpYucOPEjzs49adCgAQAeHoPw8XlPOa9duw5Kwl+a7OxsPvssnN9++wWVSs316+n89tsvStLfq1cfZSZBR0dHfv89Fb0eDh/+jrVrN6JWF3WVsrGxAeA//zmEwWDgwIH9AOTl5fKXv1R+7GJpJOkXQgghhBDPpPvTXd+fqvv+Z7VaTUFBRabqLm0q8P/7u3btWuXWv2rVCl54oT7/+McmzMzMmDx5QrHFU83NH5yOW0NBQfkrGxcWwtSp0+ncWV/ucY9Ckn4hSuHo6MixY8eUBcWqS2FhIV9++SXbtm2jsLAQo9FIly5dCAgIoE6dOqSnp7No0SJ+/PFHatWqhZmZGe+99x5vv/12heuInDaFjLT0SrWrorMsCCGEEE+DvLw8/vWvvbz5Zn9OnjxObm4uzZu/iEqlYtOmL7hx4zr16zcgMXEnXbp0LbMcS0tLDIb/mwo9M/MuDg6tMTMz49y53zh58gR//Ws/k+3p0aMXW7Z8yccff4JKpeLWrVvY2Njw2mu92bp1E05O7TA3tyAry0BaWpry5uDPkKRfiGp2f2G20kRGRnL06FG++OILGjRogNFo5JtvvuH27dtotVqGDRvGoEGDCA8PR61Wc+fOHfbs2VOp+j1UanIqORPD5iqYrk0IIYSoKnXr1iU19TKjR48gJyebuXND0Wq1tGrlwJgxE5g8ecL/P5C3KZ98MrPMcgYOHMSKFZFs2bKB8eM/YsSIUcybF8S+fV/TtGlTOnToWKH2/L//N4XlyxczfPg7aDQaOnbsxMcff8KwYT6sW7eKDz/84P/v+qPC13f0Y0n6ZcpOIUpx/0l/rVq1CA8P5/r164SHhxMUFIROp+PChQtcvnyZv/71r/Tt25eoqCj++OMPRowYwYgRIwA4d+4cYWFh3Lx5k7y8PEaMGIG3t7dS/ieffMLBgwfp3LkzH3/8cYk2GAwGnJ2d2blzJ61atSqxf/v27cTGxiqLlT2qH0aPJaeST/ofx9RqzwqZStA0iZFpEqPySXxMe1qm7KzuefrLU9aUnfdn7/nqq3+XctazSabsFOIxysnJYcaMGTRt2pTFixcrff5+/fVXvvjiCwoKCnB1deXu3bts3LiR9PR0+vXrx+DBgzE3N8ff359Fixbh4OBAZmYm3t7edOjQAQcHBwCMRiMbNmwos/6UlBR0Ol2pCT/AmTNnaN++/eO/8ArQaTXY2lpXS93VoSZd66OSGJkmMSqfxMe0Jx2jtDQ1ZmbF3wRPD1/4RNtQWQ+3F0CjKXpiXtq+Z5Vara7070GSfiHK8OGHH+Lm5saoUaOKbX/jjTeUgUItW7bExcUFtVqNnZ0dderU4Y8//qCwsJCUlBSmTJminJeXl8e5c+eUpN/Ly6vc+k29hKvOl3S5eQU15qmcPIE0TWJkmsSofBIf06ojRkajsUoXu3rcynrS37BhI7766ptn6lpMMRqNJX4P8qRfiEfUrVs3vvvuO959991ii2Lcn3oLimYLePhzQUEBKpWKevXqkZCQUGb5phba+Mtf/kJOTg7nz5+nZcuSffmcnJyIi4urzCUJIYQQooZ6ft5zCPGYTZw4kR49evDhhx+SmZlp+oQHtGzZEgsLC3bu3KlsS0lJqVQ5lpaW+Pj4EBQUxI0bN4Cip/u7du3i0qVLuLm5kZGRwdq1a5Wn/nfu3OGLL76oVFsTCo1sruQ/q8ZNKlWHEEKIZ5GKwsLn5+n48+JR3/TLk34hyuHn54eFhQU+Pj6sXbu2wueZmZkRHR1NWFgY69atw2g0Ur9+fSIjIytV/5QpU/j8888ZPnw4UPQfepcuXejTpw+1a9dmw4YNLFq0iNdffx1LS0tlys7K+HjBEoxGGc8vhBCiOJ3Oglu3rmNtXQ+NxqzEfPbiySssLMRguIOZmc70wQ+R2XuEqOFu3MiUpL8c0tfYNImRaRKj8kl8TKuOGBUWFpKZeZt79zIxGstfVOppoFarMRqf/zcTZmY66tWzRaMp/uxe+vQLIYQQQohKU6lUWFvbYG1tU91NqRC5eSyfJP1CVLPt27ezcePGEtvDw8N5+eWXq6FFQgghhHjeSNIvRDUbMmQIQ4YMqe5mCCGEEOI5Jkm/EDVc5LQpZFRyRd7HsXKiEEIIIZ4cSfqFeIijoyPHjh3D0tKyWttRWFjIxo0b2bp1K/n5+VhYWFC/fn0mTJhAp06dlOM2bdpEcHAwO3fufKTuQB4qNTmqys3eu7kKlmAXQgghRNWRpF+IapSfn4+ZWen/GUZGRnL06FHWrVuHnZ0dAElJSfzyyy/Fkv64uDi6d+9OXFwcs2bNeiLtFkIIIcSzRZJ+IcpgNBoJDw/n+vXrhIeHExQUhE6n48KFC1y+fJm//vWv9O3bl6ioKP744w9GjBjBiBEjADh37hxhYWHcvHmTvLw8RowYgbe3N1D0JuGTTz7h4MGDdO7cmY8//rhE3QaDgX/84x8kJCQoCT+As7Mzzs7OyuezZ89y8+ZNoqKi8Pb2JiAgAJ2u8nP3VpZOq8HW1rrK63la1KRrfVQSI9MkRuWT+JgmMTJNYlQ2SfqFKEVOTg4zZsygadOmLF68WFmQ5Ndff+WLL76goKAAV1dX7t69y8aNG0lPT6dfv34MHjwYc3Nz/P39WbRoEQ4ODmRmZuLt7U2HDh1wcHAAim4oNmzYUGb9KSkpmJub06pVq3LbGRsbi6enJ02bNuXll1/mm2++oX///o8vEGXIzSuoMdOiyRRwpkmMTJMYlU/iY5rEyLSaHiOZp1+IR/Dhhx/i5ubGqFGjim1/4403lCfpLVu2xMXFBbVajZ2dHXXq1OGPP/6gsLCQlJQUpkyZopyXl5fHuXPnlKTfy8urUu25c+cOw4cPJzc3FwcHBz777DNyc3PZvXs3W7duVcqMi4t7Ikm/EEIIIZ4tkvQLUYpu3brx3Xff8e6771K7dm1lu7m5ufK3RqMp8bmgoACVSkW9evVISEgos/wHyyyNg4MDOTk5XLhwgRYtWlCnTh0SEhL49ttv+cc//gHA/v37yczMxMfHByh6e3D9+nWuXr1K48aNH+WyhRBCCPGcqtyUHULUEBMnTqRHjx58+OGHZGZmVurcli1bYmFhwc6dO5VtKSkplSrH0tKSkSNHMmvWLK5du6Zsv3fvnvJ3XFwcQUFB7N+/n/3793PgwAEGDRrEjh07KtXehEIjmyv5z6pxk0rVIYQQQojqJU/6hSiDn58fFhYW+Pj4sHbt2gqfZ2ZmRnR0NGFhYaxbtw6j0Uj9+vWJjIysVP2TJ0/miy++wNfXl4KCAurVq0edOnWYMGEC165d4+jRoyxevLjYOe7u7syYMYNx48Yp4xBM+XjBEozGwkq1TQghhBDPFlVhYaH8v70QNdiNG5mS9Jejpg8MqwiJkWkSo/JJfEyTGJlW02NkaiCvdO8RQgghhBDiOSfde4SoRtu3b2fjxo0ltoeHhz/S6rpCCCGEEKWRpF+IajRkyBCGDBlS3c0QQgghxHNOkn4harjIaVPISEv/0+VYNW6Cf0j4Y2iREEIIIR436dP/BDg6OmIwGPDw8CA7O7tK6nB1deWXX3555PPv3LnDmjVrim0LDAzkhx9+KPX46dOnl9ot5Xlz7do1hg8frnyOiooiNzdX+bxs2TL27NlTJXVfv36d6dOn8/rrr+Ph4YG7uzvBwcHcvVt8kNLgwYPx8PB45Ho8VGreewz/Mq9e+bOXLIQQQogqIkn/E5SQkICFhUV1N6NUd+7cKTEtZWhoKF26dKmmFj0d7Ozs2LBhg/L5s88+Iy8vT/n80UcfVckKuPfu3eP999/nxRdfZN++fSQkJBAbG0v9+vW5ceOGctyvv/7KjRs3uHfvHmfOnHns7RBCCCHE80G691SBffv2sWTJEmxsbOjdu7ey3dHRkWPHjmFpaYmrqyseHh4cPnyY9PR0fH19GTZsWInjHv58/PhxFi5ciMFgACAgIIDXXnutWP1paWmEhIRw5coVcnJycHNzY+zYsQCcOnWK0NBQsrKyqF27NoGBgbRv3155guzh4UGtWrWIiYlh+PDh+Pr60rdvX65du0ZAQAA3b96kWbNmFBQUKPVlZmYyf/58zp49S05ODt26dWPGjBloNJoyYxQbG8uXX34JgFarZdWqVTRo0ICdO3eybt06AJo3b05wcDD169cnPj6e3bt3Y21tzdmzZ7Gzs2P27NksXLiQixcv4uTkREREBCqViunTp2NmZkZqaipXr15Fr9cTFBSETqfj+vXrzJkzh0uXLgEwatQoPD09MRqNBAcHc+TIEXQ6HbVr1yYmJobU1FS8vb1JTk7m008/BWDo0KGo1Wo2bNhAWFgYTk5ODBs2DIPBQEhICKdPnwZg4MCB+Pn5ATB8+HCcnJw4ceIEaWlpvPXWW/j7+5cZn927d2NjY8O4ceOUbebm5kyYMKFEHD08PNDpdMTFxfHKK6+UWWZV02k12NpaV1v9Vel5va7HSWJkmsSofBIf0yRGpkmMyiZJ/2N248YNZs+ezZYtW2jVqlWJLjMPys7OZuvWraSmpuLu7o6Xl5eS6Jfm1q1bTJw4kaioKDp16kRBQUGpq7xOmzaN8ePHo9fryc3NxcfHh3bt2qHX65k0aRJhYWH06NGDpKQkJk2axL59+wgKCsLb25uEhIRS6w4JCUGv1zNx4kQuX77MwIED6dWrFwDz589Hr9cTGhqK0WjE39+fuLg43n777VLLSk5OZtWqVWzevBlbW1sMBgNmZmb88ssvREREEB8fT8OGDYmMjGTevHnKolanT58mMTGRRo0aMWbMGKZOncrGjRupVasWXl5eJCUl0aNHDwBOnjxJTEwM5ubm+Pn5sW3bNoYNG0ZISAitW7dmxYoVpKWlMWjQINq2bUt+fj5JSUl8/fXXqNVqbt++XaLdc+bMYfPmzcTExJT6Pa1cuRKj0UhiYiIGg4F33nkHR0dHXFxcALh69SqbNm3CYDDwxhtvMHjwYFq0aFFqjM6cOUP79u1L3XdfXl4eiYmJxMTEoNVq8fT0ZPr06eh0unLPqyq5eQXP5fzINX3e54qQGJkmMSqfxMc0iZFpNT1GMk//E3bixAnatm1Lq1atAHjnnXfKPPZ+t5BmzZpRp04d/vjjD5NlOzg40KlTJwA0Gg1169YtdkxWVhbff/89ISEheHh4MGTIENLS0khJSeH8+fNotVolMXZ2dkar1XL+/HmT15WcnKzMMmNvb4+zs7Oyb//+/axbtw4PDw+8vLw4c+ZMuWUeOHAADw8PbG1tAbC0tMTc3Jzk5GRcXFxo2LAhUPREPSkpSTmvU6dONGrUCICXX36Zzp07Y21tjZmZGW3atOHixYvKsf3798fS0hIzMzM8PT05cuQIAElJSQwdOhSAhg0b4uLiQnJyMvb29hQUFBAYGMjOnTtNxqM0SUlJDBkyBJVKhZWVFW5ubsXa369fP9RqNdbW1jg4OChvGypi586deHh44Orqqowh+Pbbb2nZsiXNmzencePGtG3bln/961+P1HYhhBBCPN/kSf9jVpkFjs3NzZW/NRqN0mVGo9Eo5eTk5FSqbKPRiEqlIjY2Fq1WW2zfzz//jEqlKnFOadsqo7CwkJUrV2Jvb/+nyymvLQ/Hq6z4mSr34TpUKhXW1tZ89dVXJCcnk5SUREREBDt27PjT7X/wc0XbC9C2bVvi4+OVz56ennh6ejJp0iRlMHhcXBy//fYbrq6uQNENX1xcHG5ubpVqd0KhkYxCY6XOKY1V4yZ/ugwhhBBCVA1J+h+zjh07EhgYyIULF2jRogXbt2+vdBn29vacPn0aZ2dnEhMTi5U9a9Ysjh8/TseOHZXuPQ8+7beysqJz586sXr1a6f999epVzMzMaNWqFbm5uRw5coTu3btz5MgR8vPzadGiBVlZWWRnZ5Ofn4+ZWcmfRffu3YmLi2P8+PFcvny5WFcaV1dXVq9ezdy5c9FoNGRkZGAwGMq8Cejbty+BgYEMHTqUBg0aYDAY0Gq1ODs7s2bNGtLT07G1tWXbtm1KHZW1d+9eRowYgU6nY9euXfTt2xcoeruxdetWJk2aRHp6OgcPHsTHx4eMjAw0Gg29e/emZ8+eHDhwgMuXLytvHe6ztLQkMzOz1O49PXr0IDY2lk6dOmEwGNizZw8BAQGP1P4BAwawdu1aVq9ezahRo5QbwfsJf1paGkePHuXQoUNYWRW9ysvJyaFXr15cuXKFJk0qnoB/vGAJRmPFb1aFEEII8eyRpP8xq1+/PvPmzWPs2LHY2NjQr1+/Spcxc+ZMgoKCsLW1pU+fPsp2GxsboqKiCA8PJysrC7VazbRp00okxhEREcyfPx93d3egKFENDQ3F1taW5cuXFxvIu2zZMnQ6HTqdDnd3d9zd3albty4xMTHFygwMDCQgIIC9e/fSsmVLevbsWay9ixYtwsPDA5VKhVarZebMmWUm/V27dsXPz4+RI0eiUqnQ6XRER0fTunVrpk6diq+vL1B08xMcHFzp+AHo9XomTJjAlStX0Ov1yviCWbNmERQUpMTG39+f1q1bc+bMGWbPnk1+fj4FBQX07t2bDh06cOVK8WkofX19+eCDD7CwsCg2qw/A+PHjmTdvnlL2wIEDiw3krozatWuzceNGFi9ezF//+lfq1KmDhYUFTk5O9OrVix07dtC7d28l4YeiNwmvv/468fHxTJw48ZHqFUIIIcTzSVVYmf4oQjwDpk+frsyoI0y7cSNTnvSXo6YPDKsIiZFpEqPySXxMkxiZVtNjJAN5hRBCCCGEqOGke4+oMmPHjuXq1avFtjVu3Jjo6OgqrTc8PLxKy3+cqitGQgghhKhZJOkXVUYSV9MkRkIIIYR4EiTpL8XDK+I+a550n/YHV60tz7Vr1/D39y8xALaqbNmyhZycHHx8fErsi4+P58CBAyxfvvyx1lkV15iSkkL//v2ZMWNGsWspLCzkyy+/ZNu2bRQWFmI0GunSpQsBAQHUqVOnwuVHTptCRlr6Y2svFE3f6R/y7LxxEUIIIZ53kvSLJ8bOzu6JJfwA77777hOr677yrrGs6VBNiY2NVaZMfTDpj4yM5OjRo3zxxRc0aNAAo9HIN998w+3btyuV9Huo1OSoHu/wns1Xr5g+SAghhBBPjCT95TAajYSHh3P9+nXCw8PR6XQljrlx4wZTp07lxo0bQNE88DNnziQ+Pp7ExESsrKy4ePEiNjY2LFq0CDs7O86ePcunn37KvXv3yMnJ4e2331aSubt37xIWFsb//u//olKp6NKlC0FBQeTm5rJ06VKOHj1KXl4eL730EnPnzsXS0pJr164REBDAzZs3adasWbmLPsH/PZkfOnQoBw8e5N69e4SGhtKlSxeSk5NZsGCBsjDUw59jY2P58ssvAdBqtaxatapE+SdPniQiIgKDwQDApEmT6NOnT4k3AsePH2fhwoXKcQEBAbz22multvlRYhYVFUVWVhbTpk0jNzeXkJAQkpOTsbOzU1ZMLo+rqytubm4cO3aMtLQ0RowYobw9WbBgAd9//z15eXnUq1ePsLAwmjZtWuIaHR0d+eSTTzh48CCdO3emd+/ezJs3D6PRSH5+PuPGjWPAgAFltiE/P5/ExEQ2bdrE6NGjOX36NO3atcNgMLB+/Xp27txJgwYNAFCr1fztb38zeV1CCCGEqHkk6S9DTk4OM2bMoGnTpixevLjMlWITExNp0qQJn3/+OQC3b99W9v3444/s3LmTVq1a8dlnnxEaGsry5ctp2rQpn3/+OTqdDoPBwJAhQ+jVqxcODg6EhYVRu3ZtEhISUKvVZGRkALB27Vqsra2JjY0FYNGiRaxevZrJkycTEhKCXq9n4sSJXL58mYEDB9KrV69yr+/WrVt06NCByZMns2vXLiIiIkrMzf+w5ORkVq1axebNm7G1tcVgMGBmZqYsGAVw584d5syZw+rVq2nYsCFpaWkMHjyY3bt3l6h/4sSJREVF0alTJ2WhsbI8SswetHXrVlJTU9m9ezf5+fm8//77NGvWrNzrBbh+/TqbNm3i+vXreHp60qVLF9q0acPo0aOZNm0aANu3byciIoKlS5eWWobRaFSe/o8bN44RI0bg6elJYWEhd++WP7XYgQMHePHFF3nxxRfx8vIiLi6Odu3akZKSgk6nq9DNS3XQaTXY2lpXdzMem+fpWqqKxMg0iVH5JD6mSYxMkxiVTZL+Mnz44Ye4ubkxatSoco979dVXWb9+PQsWLKBr167FnlR37txZScqGDBmiLNqUnZ3N3LlzOXv2LCqVirS0NH7++WccHBz49ttviY+PR60u6m7xwgsvALB//34yMzP55z//CUBubi5t2rQBipLxWbNmAUULWjk7O5u8vtq1ayur1Hbo0IEFCxaYPOfAgQN4eHhga2sLUOqYh+PHj5Oamsro0aOVbSqViosXL1KvXj1l24kTJ3BwcKBTp04AaDSaYisLP+xRYvag5ORkPD090Wq1aLVaBg4cyLFjx0xe8+DBgwFo0KABffr04fvvv6dNmzYcOnSIzZs3k5WVRX5+frlleHl5KX9369aN1atXc+XKFXr27Mmrr75a7rmxsbHK+Z6ennh6ejJjxgye9uU1cvMKnpu5kmv6vM8VITEyTWJUPomPaRIj02p6jEzN0y9Jfxm6devGd999x7vvvkvt2rXLPK5jx47s3LmTw4cPk5CQwOrVq9myZUuJ4woLC5W3BUuWLMHW1pbw8HDMzMzw9fUlJyen3PYUFhYyZ86cCiX0FfFgVyW1Wq0krhqNplhCaapdpbXT0dGRTZs2ldiXmppa7LjKeJSYPdyuP+v+d/j7778zf/58YmNjsbe359ixY/j7+5d53oO/Hx8fH1xdXTl8+DDz5s2jZ8+eTJ48udTzrl+/zn/+8x9+/vlnVq5cCcC9e/fYt28frq6u5OTkcP78eVq2bPmnr00IIYQQzzdZnKsMEydOpEePHnz44Yfldju5fPkyVlZWuLm5MWPGDM6cOYPRaATg2LFjXLhwASiaLaZbt25AUR/0Ro0aYWZmxi+//MIPP/yglNe3b1/WrVunJKn3u6q4urry+eefK11pMjMzSUlJAVAGed5vT1JS0iNft729PZcvX+b27dsUFhby1VdfFWtbQkIC169fB8BgMJCbm1vs/I4dO3Lx4kWOHDmibDt16lSJpLtjx46kpKRw/PhxAAoKCop1jXrYo8TsQc7OziQkJJCfn092dnaJ7kZl2bFjh1LmoUOH6Nq1K5mZmWi1WmxtbTEajSa7RT3o/PnzNG/enKFDh/LBBx9w+vTpcut+8803OXDgAPv372f//v2EhYURFxeHpaUlPj4+BAUFKeNJCgsL2bVrF5cuXapwewASCo1sfsz/rBo3qVQbhBBCCFG15El/Ofz8/LCwsMDHx4e1a9diY2NT4pjvv/+e9evXo9FoMBqNfPrpp0o3E71eT1RUFL/++qsykBeK+nUHBASwa9cumjdvjl6vV8qbMWMGYWFhDBgwAI1GQ9euXZk1axZ+fn589tlnDB48GJVKhUqlYuLEiTg4OBAYGEhAQAB79+6lZcuW9OzZ85Gv2c7OjpEjRzJo0CCaNWtGu3bt+PXXXwHo2rUrfn5+jBw5EpVKhU6nKzHPfN26dVm5ciWLFi0iLCyMvLw87O3tSxxnY2NDVFQU4eHhZGVloVarmTZtGj169Ci1XY8Sswe9/fbbnD17Fjc3Nxo1aoRer+f33383GY/GjRvz3nvvkZ6ezpgxY3B0dASgX79+uLm50aRJE/R6fbGbkPJs2LCB5ORktFotOp2uRDsftGPHDmXcwH2vv/46c+bMITU1lSlTpvD5558zfPhwoCjp79KlC3369KlQW+77eMESjManu7uQEEIIIf4cVeHT3jn4GVVV88CLJ8fV1ZXo6Gheeuml6m5KlbpxI1OS/nLU9D6iFSExMk1iVD6Jj2kSI9NqeoxM9emX7j1CCCGEEEI856R7TwUFBQVx8uTJYts0Go0yf/3DBg0axKBBg55E08pU2TY/DQYNGlRinYFXX32V4ODgKqlv+/btbNy4scT28PBw9u/fXyV1PuxZ/J6EEEII8WyR7j1C1HDSvad8Nf11cUVIjEyTGJVP4mOaxMi0mh4jmbJTCFGuyGlTyEhLf6xlWjVugn9I+GMtUwghhBCPTpL+J8TR0ZFjx47x3nvvsXXrViwsLB57HX924OmdO3fYunVrsYW1AgMD8fLyokuXLiWOnz59Ok5OTgwbNuyR2/wsuHbtGv7+/sqqulFRUYwZM0ZZ62DZsmW0bt2a/v37P/a609PTWbRoET/++CO1atXCzMyM9957j7fffhuA7777jhUrVpCRkYGZmRn29vZMmTJFmWWoIjxUanJUj3d4z+arVx5reUIIIYT4cyTpf8ISEhKquwllunPnDmvXri2W9IeGhlZji54OdnZ2SsIP8Nlnn+Hr66sk/R999FGV1Hvv3j2GDRvGoEGDCA8PR61Wc+fOHfbs2QPA//zP/xAYGMiKFSto164dAD/99BPp6emVSvqFEEII8fyTpL+K7Nu3jyVLlmBjY0Pv3r2V7fef+FtaWuLq6oqHhweHDx8mPT0dX19f5an5g8c9/Pn48eMsXLgQg8EAQEBAAK+99lqx+tPS0ggJCeHKlSvk5OTg5ubG2LFjgaLFskJDQ8nKyqJ27doEBgbSvn17goODuXv3Lh4eHtSqVYuYmBiGDx+Or68vffv25dq1awQEBHDz5k2aNWtWbMBtZmYm8+fP5+zZs+Tk5NCtWzdmzJiBRqMpM0axsbF8+eWXAGi1WlatWkWDBg3YuXMn69atA6B58+YEBwdTv3594uPj2b17N9bW1pw9exY7Oztmz57NwoULuXjxIk5OTkRERKBSqZg+fTpmZmakpqZy9epV9Ho9QUFB6HQ6rl+/zpw5c5RFrEaNGoWnpydGo5Hg4GCOHDmCTqejdu3axMTEkJqaire3N8nJyXz66acADB06FLVazYYNGwgLC1PeeBgMBkJCQpRFtwYOHIifnx8Aw4cPx8nJiRMnTpCWlsZbb71V7kq+u3fvxsbGhjFjxijb6tSpw9ChQwFYsWIF48ePVxJ+gLZt25ZZnhBCCCFqLkn6q8CNGzeYPXs2W7ZsoVWrVqxZs6bMY7Ozs9m6dSupqam4u7vj5eWlJPqluXXrFhMnTiQqKopOnTpRUFBQ6orB06ZNY/z48ej1enJzc/Hx8aFdu3bo9XomTZpEWFgYPXr0ICkpiUmTJrFv3z6CgoLw9vYu821ESEgIer2eiRMncvnyZQYOHEivXr0AmD9/Pnq9ntDQUIxGI/7+/sTFxSndUB6WnJzMqlWr2Lx5M7a2thgMBmW13YiICOLj42nYsCGRkZHMmzePyMhIAE6fPk1iYiKNGjVizJgxTJ06lY0bN1KrVi28vLxISkpSFvg6efIkMTExmJub4+fnx7Zt2xg2bBghISG0bt2aFStWkJaWxqBBg2jbti35+fkkJSXx9ddfo1arS10heM6cOWzevJmYmJhSv6eVK1diNBpJTEzEYDDwzjvv4OjoiIuLCwBXr15l06ZNGAwG3njjDQYPHkyLFi1KjdGZM2do3759qfug6Kl+UFBQmfurk06rwdbWurqb8dg8T9dSVSRGpkmMyifxMU1iZJrEqGyS9FeBEydO0LZtW1q1agXAO++8Q0RERKnH3u8H3qxZM+rUqcMff/yBg4NDuWU7ODjQqVMnoGhqx7p16xY7Jisri++//56MjAxlm8FgICUlhQYNGqDVapXE2NnZGa1Wy/nz58u92YCiRP3+CrL29vY4Ozsr+/bv38+pU6dYv349UHQzY2dnV2ZZBw4cwMPDA1tbWwCl7uTkZFxcXGjYsCFQ9ETdw8NDOa9Tp040atQIgJdffpmmTZtibV30H3ibNm24ePGicm39+/dXyvX09GTfvn0MGzaMpKQkpk+fDkDDhg1xcXEhOTkZT09PCgoKCAwMpFu3bvTt27fceJQmKSmJmTNnolKpsLKyws3NjaSkJCXp79evH2q1GmtraxwcHLh06VKZSf+zPLFWbl7BczODQk2fDaIiJEamSYzKJ/ExTWJkWk2PkczeUw0qk6yZm5srf2s0GqXLjEajUcrJycmpVNlGoxGVSkVsbCxarbbYvp9//hmVSlXinNK2VUZhYSErV67E3t7+T5dTXlsejldZ8TNV7sN1qFQqrK2t+eqrr0hOTiYpKYmIiAh27Njxp9v/4OeKthfAycmJuLi4Mve3bduWU6dO8fLLL1eqjUIIIYSoeWRF3irQsWNHfvrpJy5cuAAULQBVWfb29kq/8MTExGJlp6SkcPz4cQAKCgpKdEOxsrKic+fOrF69Wtl29epV0tPTadWqFbm5uRw5cgSAI0eOkJ+fT4sWLbCysiI7O5v8/PxS29S9e3clCb18+TJJSUnKPldXV1avXq0ksRkZGVy+fLnM6+vbty8JCQlcv34dKHoTkZubi7OzMwcPHiQ9vWgKyW3btilP7itr7969ZGVlkZ+fz65du+jWrRtQ9HZj69atQNHsOAcPHqRbt25kZGSQnZ1N79698ff3x9rautRrsLS0LLVLFUCPHj2IjY2lsLCQzMxM9uzZU+yNSGW4ubmRkZHB2rVrlZu9O3fu8MUXXwAwbtw4Vq5cyZkzZ5RzTp06xcGDBytVT0Khkc2P+Z9V4yaPdM1CCCGEqBrypL8K1K9fn3nz5jF27FhsbGzo169fpcuYOXMmQUFB2Nra0qdPH2W7jY0NUVFRhIeHk5WVhVqtZtq0aSUS44iICObPn4+7uztQlKiGhoZia2vL8uXLiw3kXbZsGTqdDp1Oh7u7O+7u7tStW5eYmJhiZQYGBhIQEMDevXtp2bIlPXv2LNbeRYsW4eHhgUqlQqvVMnPmzDKf/Hft2hU/Pz9GjhyJSqVCp9MRHR1N69atmTp1Kr6+vkDRzc+jrsar1+uZMGECV65cQa/XK+MLZs2aRVBQkBIbf39/WrduzZkzZ5g9ezb5+fkUFBTQu3dvOnTowJUrxaef9PX15YMPPsDCwqLYrD4A48ePZ968eUrZAwcOLDaQuzJq167Nhg0bWLRoEa+//jqWlpbKlJ0AvXv3Jjg4mODgYG7duoWZmRnNmjVj6tSplarn4wVLZHEuIYQQ4jknK/KK51JNWUPgcZAVectX0/uIVoTEyDSJUfkkPqZJjEyr6TEy1adfuvcIIYQQQgjxnJPuPaJKjR07lqtXrxbb1rhxY6Kjo6u03vDw8Cot/3GqrhgJIYQQouaQpF9UKUlcTZMYCSGEEKKqSdIvRA0XOW0KGWnpj71cq8ZN8A95dt64CCGEEM8z6dP/hDg6OmIwGPDw8CA7O7tK6nB1deWXX3555PPv3LlTYvXgwMBAfvjhh1KPnz59Ohs3bnzk+p4V165dY/jw4crnqKgocnNzlc/Lli1jz549VVb/wYMHcXR05Jtvvim2PS8vj2XLlvHmm2/i5ubGW2+9RXh4OHl5eZUq30Ol5r0q+Jd59YrpyoUQQgjxREjS/4QlJCRgYWFR3c0o1Z07d1i7dm2xbaGhoXTp0qWaWvR0sLOzKzY152effVYssf7oo4+UlZWrQlxcHN27dyc2NrbY9hkzZvDbb78RFxfHV199xa5du2jZsmWxGxIhhBBCCJDuPVVm3759LFmyBBsbm2LztDs6OnLs2DEsLS1xdXXFw8ODw4cPk56ejq+vrzLF5IPHPfz5+PHjLFy4EIPBAEBAQACvvfZasfrT0tIICQnhypUr5OTk4ObmxtixY4GiBZwenKc/MDCQ9u3bExwczN27d/Hw8KBWrVrExMQwfPhwfH196du3L9euXSMgIICbN2/SrFmzYqvJZmZmMn/+fM6ePUtOTg7dunVjxowZaDSaMmMUGxvLl19+CYBWq2XVqlU0aNCAnTt3sm7dOgCaN29OcHAw9evXJz4+nt27d2Ntbc3Zs2exs7Nj9uzZLFy4kIsXL+Lk5ERERAQqlYrp06djZmZGamoqV69eRa/XExQUhE6n4/r168yZM4dLly4BMGrUKDw9PTEajQQHB3PkyBF0Oh21a9cmJiaG1NRUvL29SU5O5tNPPwVg6NChqNVqNmzYQFhYmDI9qMFgICQkRFlYbeDAgfj5+QEwfPhwnJycOHHiBGlpabz11lv4+/uX+zu6efMmSUlJfP3117i5uZGeno6trS0XLlzgm2++4eDBg1hZWSkxfOedd8ot70nSaTXY2lpXdzMei+flOqqSxMg0iVH5JD6mSYxMkxiVTZL+KnDjxg1mz57Nli1baNWqVYkuMw/Kzs5m69atpKam4u7ujpeXl5Lol+bWrVtMnDiRqKgoOnXqREFBQamrw06bNo3x48ej1+vJzc3Fx8eHdu3aodfrmTRpEmFhYfTo0YOkpCQmTZrEvn37CAoKwtvbm4SEhFLrDgkJQa/XM3HiRC5fvszAgQPp1asXAPPnz0ev1xMaGorRaMTf35+4uDhlQayHJScns2rVKjZv3oytrS0GgwEzMzN++eUXIiIiiI+Pp2HDhkRGRjJv3jwiIyMBOH36NImJiTRq1IgxY8YwdepUNm7cSK1atfDy8iIpKUlZqOzkyZPExMRgbm6On58f27ZtY9iwYYSEhNC6dWtWrFhBWloagwYNom3btuTn5ysJtlqtLrHSMcCcOXPYvHkzMTExpX5PK1euxGg0kpiYiMFg4J133sHR0REXFxegaGXkTZs2YTAYeOONNxg8eDAtWrQo8/tOSEigb9++NGjQgL/+9a/s3LmT0aNH89NPP/Hiiy9St27dMs+tbrl5Bc/FfMk1fd7nipAYmSYxKp/ExzSJkWk1PUYyT381OHHiBG3btqVVq1YA5T59vd8tpFmzZtSpU4c//vjDZNkODg506tQJAI1GUyLxy8rK4vvvvyckJAQPDw+GDBlCWloaKSkpnD9/Hq1WqyTGzs7OaLVazp8/b/K6kpOTGTJkCFC0Uq6zs7Oyb//+/axbtw4PDw+8vLw4c+ZMuWUeOHAADw8PbG1tgaIVg83NzUlOTsbFxYWGDRsCRU/Uk5KSlPM6depEo0aNAHj55Zfp3Lkz1tbWmJmZ0aZNGy5evKgc279/f2UVW09PT44cOQJAUlISQ4cOBaBhw4a4uLiQnJyMvb09BQUFBAYGsnPnTpPxKE1SUhJDhgxBpVJhZWWFm5tbsfb369cPtVqNtbU1Dg4OytuGssTHx+Pl5QWAl5cXcXFxj9QuIYQQQtRs8qS/ClRmkWNzc3Plb41Go3SZ0Wg0Sjk5OTmVKttoNKJSqYiNjUWr1Rbb9/PPP6NSqUqcU9q2yigsLGTlypXY29v/6XLKa8vD8SorfqbKfbgOlUqFtbU1X331FcnJySQlJREREcGOHTv+dPsf/FzR9kLRW42UlBQCAwOVbWlpaRw7doy2bdty8eJFbt++/aef9icUGskoNP6pMkpj1bjJYy9TCCGEEI9Gkv4q0LFjRwIDA7lw4QItWrRg+/btlS7D3t6e06dP4+zsTGJiYrGyZ82axfHjx+nYsaPSvefBxM/KyorOnTuzevVqJkyYABR1KzEzM6NVq1bk5uZy5MgRunfvzpEjR8jPz6dFixZkZWWRnZ1Nfn4+ZmYlfxrdu3cnLi6O8ePHc/ny5WJdaVxdXVm9ejVz585Fo9GQkZGBwWAo8yagb9++BAYGMnToUBo0aIDBYECr1eLs7MyaNWuUvuvbtm1T6qisvXv3MmLECHQ6Hbt27aJv375A0duNrVu3MmnSJNLT0zl48CA+Pj5kZGSg0Wjo3bs3PXv25MCBA1y+fFl563CfpaUlmZmZpXbv6dGjB7GxsXTq1AmDwcCePXsICAh4pPbHxcXx4YcfMnnyZGXbqlWriIuLIzQ0FFdXV4KCgggNDcXKyoqCggI2btzI4MGDy+0i9rCPFyzBaKz4jaoQQgghnj2S9FeB+vXrM2/ePMaOHYuNjQ39+vWrdBkzZ84kKCgIW1tb+vTpo2y3sbEhKiqK8PBwsrKyUKvVTJs2rURiHBERwfz583F3dweKEtXQ0FBsbW1Zvnx5sYG8y5YtQ6fTodPpcHd3x93dnbp16xITE1OszMDAQAICAti7dy8tW7akZ8+exdq7aNEiPDw8UKlUaLVaZs6cWWbS37VrV/z8/Bg5ciQqlQqdTkd0dDStW7dm6tSp+Pr6AkU3P8HBwZWOH4Ber2fChAlcuXIFvV6vjC+YNWsWQUFBSmz8/f1p3bo1Z86cYfbs2eTn51NQUEDv3r3p0KEDV64Un3rS19eXDz74AAsLi2Kz+gCMHz+eefPmKWUPHDiw2EDuisrJyWHPnj1s2bKl2PYBAwYwcOBAAgMDCQ8PZ8WKFXh7e6PVajEajbi4uKDT6SpdnxBCCCGeb6rCyvRFEeIZMX36dGVGHVG+Gzcy5Ul/OWr6wLCKkBiZJjEqn8THNImRaTU9RjKQVwghhBBCiBpOuveIKjV27FiuXr1abFvjxo2Jjo6u0nrDw8OrtPzHqbpiJIQQQoiaQ5J+UaUkcTVNYiSEEEKIqiZJvxAPeXg15OpSWFjIl19+ybZt2ygsLMRoNNKlSxcCAgKoU6cO169fJyIigqNHj2JlZYXRaESv1zN58mSsrSu+ImHktClkpKVX4ZUUTd/pH/LsvH0RQgghnjeS9AtRjcqaHhUgMjKSo0eP8sUXX9CgQQOMRiPffPMNt2/fRqvV8v777+Pp6UloaCgajYacnBzWrl3LjRs3KpX0e6jU5KiqdnjP5qtXTB8khBBCiCojSb8QZTAajYSHh3P9+nXCw8MJCgpCp9Nx4cIFLl++zF//+lf69u1LVFQUf/zxByNGjGDEiBEAnDt3jrCwMG7evEleXh4jRozA29sbKHqT8Mknn3Dw4EE6d+7Mxx9/XKJug8HA+vXr2blzJw0aNABArVbzt7/9DYDt27djY2PDuHHjlHPMzc2VdRmEEEIIIR4kSb8QpcjJyWHGjBk0bdqUxYsXK6vq/vrrr3zxxRcUFBTg6urK3bt32bhxI+np6fTr14/Bgwdjbm6Ov78/ixYtwsHBgczMTLy9venQoQMODg5A0Q3Fw3P8PyglJQWdTkerVq1K3X/mzBnat2//+C+8iui0GmxtK/724WnzLLf9SZEYmSYxKp/ExzSJkWkSo7JJ0i9EKT788EPc3NwYNWpUse1vvPGGsvhVy5YtcXFxQa1WY2dnR506dfjjjz8oLCwkJSWFKVOmKOfl5eVx7tw5Jen38vIqt/7KLp+xc+dO1q9fz927d/H396d///6VOr+q5eYVPLNzJ9f0eZ8rQmJkmsSofBIf0yRGptX0GJmap1+SfiFK0a1bN7777jveffddateurWw3NzdX/tZoNCU+FxQUoFKpqFevHgkJCWWW/2CZpfnLX/5CTk4O58+fp2XLliX2t23blvj4eOWzp6cnnp6eTJo0iezs7ApdoxBCCCFqDkn6hSjFxIkT2bRpEx9++CGrV6/GyqrsO+eHtWzZEgsLC3bu3ImnpydQ1F3Hzs6uwuVYWlri4+NDUFAQkZGR1K9fn8LCQhITE+nQoQMDBgxg7dq1rF69mlGjRqHRaCgsLHykhD+h0EhGobHS51WGVeMmVVq+EEIIIconSb8QZfDz88PCwgIfHx/Wrl1b4fPMzMyIjo4mLCyMdevWYTQaqV+/PpGRkZWqf8qUKXz++ecMHz4cKOry06VLF/r06UPt2rXZuHEjixcv5q9//St16tTBwsICJycnevXqVal6Pl6wBKOxct2JhBBCCPFsURVWtvOwEOK5cuNGpiT95ajpfUQrQmJkmsSofBIf0yRGptX0GJnq01+1k3MLIYQQQgghqp107xGiGm3fvp2NGzeW2B4eHs7LL79cDS0SQgghxPNIkn4hqtGQIUMYMmRIdTdDCCGEEM85SfqFqOEip00hIy39idVn1bgJ/iHhT6w+IYQQQkjSX0JUVBRZWVlMmzat3OO++eYbGjZs+EirosbHx3PgwAGWL1/+qM2sNFdXV6Kjo3nppZce6fw7d+6wdetWRo8erWwLDAzEy8uLLl26lDh++vTpODk5MWzYsEeqb+nSpfzzn//khRdeYPPmzY9UxrPk/PnzBAUFkZ6ejpmZGe3atWPOnDlYWFiUe953333HihUryMjIwMzMDHt7e6ZMmYKjo2OF6/ZQqclRPbnhPZuvXnlidQkhhBCiiCT9j+ibb77BycnpkZL+Z9GdO3dYu3ZtsaQ/NDS0yupbv349Bw4c4IUXXiixr6CgAI1GU2V1VwetVsuMGTNo27YtRqORKVOmsG7dOiZMmFDmOf/zP/9DYGAgK1asoF27dgD89NNPpKenVyrpF0IIIcTz74kl/VOnTuX8+fPk5eXRvHlzwsLCqFu3LrGxsXz55ZdAUeKzatUqGjRowLfffktUVBT5+fmo1WrCw8OxsrLC29ub5ORkAFJTU5XP9/9+++23+e6778jOziYiIoKYmBhOnjyJhYUFK1euxNbWtsTT/LKe7p89e5ZPP/2Ue/fukZOTw9tvv42Pjw/fffcd+/fv5/Dhw2zfvp2RI0fi6enJjh072Lx5MwUFBVhZWTF37lxatWpFbm4uISEhJCcnY2dnR6tWrUzGy9XVFQ8PDw4fPkx6ejq+vr7KU3NHR0eOHTuGpaVlic/Hjx9n4cKFGAwGAAICAnjttdeKlZ2WlkZISAhXrlwhJycHNzc3xo4dC8CpU6cIDQ0lKyuL2rVrExgYSPv27QkODubu3bt4eHhQq1YtYmJiGD58OL6+vvTt25dr164REBDAzZs3adasGQUFBUp9mZmZzJ8/n7Nnz5KTk0O3bt2YMWNGmYn7e++9R05ODiNGjOC1116jT58+hIWF0aVLF06fPs24ceN48cUXCQsL4+bNm+Tl5TFixAi8vb0B2LdvH0uWLMHGxobevXuzbNkyjh07xs2bN8v8/QAcPHiQv//97+Tm5ipJeIcOHUhOTiYsLIxXX32V48ePo1KpWLp0KQ4ODgCl/oY/++wz7O3tGTVqFFCUjE+ePJm9e/eiUqlKXHOzZs2Uv9VqNe3btyclJaXc38iKFSsYP368kvBD0Uq9QgghhBAPe2JJf2BgoPLUdunSpaxZs4ZevXqxatUqNm/ejK2tLQaDATMzM86fP8+sWbPYtGkTLVq0IDc3l9zcXG7dulVuHbdu3aJz585MnTqVtWvX4uPjw4YNGwgJCWHu3Lls3LiRyZMnV7jNTZs25fPPP0en02EwGBgyZAi9evWiV69euLq6Fuu+8sMPP/D111+zadMmdDodBw8eZObMmcTExLB161ZSU1PZvXs3+fn5vP/++8WSvLJkZ2cr57q7u+Pl5aUk+mVd/8SJE4mKiqJTp04UFBSQmZlZ4rhp06Yxfvx49Ho9ubm5+Pj40K5dO/R6PZMmTSIsLIwePXqQlJTEpEmT2LdvH0FBQXh7e5OQkFBq3SEhIej1eiZOnMjly5cZOHCgskjU/Pnz0ev1hIaGYjQa8ff3Jy4ujrfffrvUsjZv3oyjoyMxMTFYWlqSnJzML7/8wty5c5k9ezb5+fm8/fbbLFq0CAcHBzIzM/H29qZDhw7Y2Ngwe/ZstmzZQqtWrVizZo3JOANcunSJlStXsm7dOqysrPj1118ZPXo0Bw4cAOC3335j/vz5BAcH8/e//52VK1eyePFikpOTS/0NDx8+nLFjx+Lr64tKpWLjxo289957pSb8D8vOziYuLo4pU6aUe9xPP/1EUFBQha7vaaLTarC1ta7uZlTKs9be6iAxMk1iVD6Jj2kSI9MkRmV7Ykl/QkICiYmJ5OXlkZWVRYsWLSgoKMDDwwNbW1sAJaE9fPgwvXv3pkWLFgDodDp0Op3JpL927dr06dMHgFdeeYVGjRop0x6+8sorHD58uFJtzs7OZu7cuZw9exaVSkVaWho///yz8oT3Qfv37+fnn39WZmIpLCzkzp07ACQnJ+Pp6YlWq0Wr1TJw4ECOHTtmsv7+/fsDRU+B69Spwx9//FFq3fedOHECBwcHOnXqBIBGo6Fu3brFjsnKyuL7778nIyND2WYwGEhJSaFBgwZotVp69OgBgLOzM1qtlvPnz5d7s3H/GmfNmgWAvb09zs7OxWJz6tQp1q9fDxTF1c7OzuT1P+jFF1+kY8eOAFy4cIGUlJRiSXFeXh7nzp1DrVbTtm1b5W3KO++8Q0REhMnyv/vuOy5dusT777+vbMvPz+f69esAtGzZUnmK3qFDB7799lsADhw4UOpv2MHBAXt7ew4dOkSHDh3Yv38/M2bMMNmO/Px8Jk+eTPfu3Xn99ddNHv8sys0reKYWT6npi71UhMTINIlR+SQ+pkmMTKvpMTK1ONcTSfp/+OEHtmzZQkxMDC+88AKJiYls27atzOPLWiTYzMys2L6cnJxi+3U6nfK3Wq0u9lmj0ShdTjQaDUajscxy7luyZAm2traEh4djZmaGr69vmccWFhbi7e3NRx99VOHrMcXc3LzM9t8v88H2VKQeo9GISqUiNjYWrVZbbN/PP/9c6pPoijydLk9hYSErV67E3t7+kcuoXbt2sfLq1atX6luHb775pswyTP1+evXqxcKFC0ucl5KSUuK3lZ+fb7LNw4cPZ8uWLaSkpPC3v/0Na+vynz4UFBTg7+9P3bp1lRuo8rRt25ZTp07JfP5CCCGEMOmJTNlx584drKyssLGxITc3l7i4OAD69u1LQkKC8jTVYDCQm5vLa6+9xqFDh7hw4QIAubm5ZGZm0qBBA/Ly8rh48SIAu3fvfqT2NG/enDNnzmA0GsnMzFS6cDzs7t27NGrUCDMzM3755Rd++OEHZZ+VlRV37/7f3aSrqysJCQn88ccfQFEC97//+79A0RPzhIQE8vPzyc7OfuR232dvb8/p06cBSExMVLZ37NiRlJQUjh8/rrTh9u3bxc61srKic+fOrF69Wtl29epV0tPTlfEHR44cAeDIkSPk5+fTokULrKysyM7OLjPZ7d69u/K9Xr58maSkpGKxWb16tXLTkpGRweXLlx/5+lu2bImFhQU7d+5UtqWkpJCZmUnHjh356aeflN/O9u3blWPK+/307NmT7777jl9//VXZdurUKZNtKes3DODi4sL58+dZv3497733XrnlGI1Gpk+fjkajITQ0tEI3WuPGjWPlypWcOXOmWJsPHjxo8twHJRQa2fwE/1k1blKp9gkhhBDiz3siT/p79+7Nrl27eOutt7Czs8PJyYnTp0/TtWtX/Pz8GDlyJCqVCp1OR3R0NC1atGDevHlMnjxZmaklPDwcR0dHAgMDGTlyJE2bNqVbt26P1J6//e1vfP3117i5ufHiiy/yyiuvlHrcuHHjCAgIYNeuXTRv3hy9Xq/sGzhwIDNmzGDv3r3KQN6PP/6YcePGUVBQQF5eHv369cPJyYm3336bs2fP4ubmRqNGjdDr9fz++++P1HaAmTNnEhQUhK2trdKdCcDGxoaoqCjCw8PJyspCrVYzbdo0pbvOfREREcyfPx93d3egqEtKaGgotra2LF++vNhA3mXLlindq9zd3XF3d6du3brExMQUKzMwMJCAgAD27t1Ly5Yt6dmzZ7H2Llq0CA8PD1QqFVqtlpkzZz7yk38zMzOio6MJCwtj3bp1GI1G6tevT2RkJPXr12fevHmMHTsWGxsb+vXrV+y8sn4/LVq0YNGiRQQGBpKdnU1eXh6dOnUyOTtTWb/hBg0aoFar8fT05NChQ7Rp06bccg4dOsSuXbt46aWXGDRoEACdOnVizpw5ZZ7Tu3dvgoODCQ4O5tatW5iZmdGsWTOmTp1akTAqPl6wBKPx0d5GCSGEEOLZoCp81L4nQjwjHp7t6EkaOXIkb7/9Nm+99dYTr7uibtzIlKS/HDW9j2hFSIxMkxiVT+JjmsTItJoeI1N9+p/cijxC1CCnT5/mjTfewNramjfffLO6myOEEEKIGk4W56pG27dvZ+PGjSW2h4eH14jBmWPHjuXq1avFtjVu3Jjo6OjHWs/Zs2cfa3kV0a5du1IHFQcFBXHy5Mli2zQaDfHx8aWW89///pfp06eX2D5s2DBlpighhBBCCFOke48QNZx07ylfTX9dXBESI9MkRuWT+JgmMTKtpsfoqZiyUwjx9IqcNoWMtPRqq9+qcRP8Q8KrrX4hhBCiJpCk/wm4P5D0vffeY+vWrVhYWDz2OlxdXYmOjuall156pPPv3LnD1q1bGT16tLItMDAQLy8vunTpUuL46dOnF1uR+Hl17do1/P392bBhAwBRUVGMGTNGmbd/2bJltG7dWllI7XFKT09n0aJF/Pjjj9SqVQszMzPee+89ZSXj//mf/2HlypWkpaVRp04dNBoN7777rjL7T0V5qNTkqKpveM/mq1eqrW4hhBCippCk/wkqbTGpp8WdO3dYu3ZtsaQ/NDS0Glv0dLCzs1MSfoDPPvsMX19fJekvbTG2x+HevXsMGzaMQYMGER4ejlqt5s6dO+zZswcoSvhnzpzJ8uXL6dChAwCXLl0iNja2StojhBBCiGebJP1VYN++fSxZsgQbGxt69+6tbH9w6khXV1c8PDw4fPgw6enp+Pr6Kk/NH55i8sHPx48fZ+HChRgMBgACAgJ47bXXitWflpZGSEgIV65cIScnBzc3N8aOHQsULd704Dz8gYGBtG/fnuDgYO7evYuHhwe1atUiJiaG4cOH4+vrS9++fbl27RoBAQHcvHmTZs2aKQttAWRmZjJ//nzOnj1LTk4O3bp1Y8aMGWg0mjJjFBsby5dffgmAVqtl1apVNGjQgJ07d7Ju3TqgaBG14OBg6tevT3x8PLt378ba2pqzZ89iZ2fH7NmzWbhwIRcvXsTJyYmIiAhUKhXTp0/HzMyM1NRUrl69il6vJygoCJ1Ox/Xr15kzZw6XLl0CYNSoUXh6emI0GgkODubIkSPodDpq165NTEwMqampeHt7k5yczKeffgrA0KFDUavVbNiwgbCwMOWNh8FgICQkRFk4beDAgfj5+QFFq/M6OTlx4sQJ0tLSeOutt/D39y8zPrt378bGxoYxY8Yo2+rUqcPQoUMBWLFiBePHj1cS/vvxmjJlSpllPq10Wg22tuWvVlzdnvb2PQ0kRqZJjMon8TFNYmSaxKhskvQ/Zjdu3GD27Nls2bKFVq1asWbNmjKPzc7OZuvWraSmpuLu7o6Xl1e5c8nfunWLiRMnEhUVRadOnSgoKCAzM7PEcdOmTWP8+PHo9Xpyc3Px8fGhXbt26PV6Jk2aRFhYGD169CApKYlJkyaxb98+goKC8Pb2LvNtREhICHq9nokTJ3L58mUGDhxIr169AJg/fz56vZ7Q0FCMRiP+/v7ExcUp3VAelpyczKpVq9i8eTO2trYYDAZl1eOIiAji4+Np2LAhkZGRzJs3j8jISKBoGszExEQaNWrEmDFjmDp1Khs3bqRWrVp4eXmRlJSkLER28uRJYmJiMDc3x8/Pj23btjFs2DBCQkJo3bo1K1asIC0tjUGDBtG2bVvy8/NJSkri66+/Rq1Wl1jJGGDOnDls3ryZmJiYUr+nlStXYjQaSUxMxGAw8M477+Do6IiLiwtQtPLxpk2bMBgMvPHGGwwePJgWLVqUGqMzZ86UuzDYTz/9RFBQUJn7nyW5eQVP9cCrmj4wrCIkRqZJjMon8TFNYmRaTY+RzNP/hJ04cYK2bdvSqlUrAN55550yj73fD7xZs2bUqVOHP/74w2TZDg4OdOrUCSia6rFu3brFjsnKyuL7778nJCQEDw8PhgwZQlpaGikpKZw/fx6tVqskxs7Ozmi1Ws6fP2/yupKTk5UpIu3t7XF2dlb27d+/n3Xr1uHh4YGXlxdnzpwpt8wDBw7g4eGBra0tULQisLm5OcnJybi4uNCwYUOg6Il6UlKScl6nTp1o1KgRAC+//DKdO3fG2toaMzMz2rRpw8WLF5Vj+/fvj6WlJWZmZnh6enLkyBEAkpKSlKflDRs2xMXFheTkZOzt7SkoKCAwMJCdO3eajEdpkpKSGDJkCCqVCisrK9zc3Iq1v1+/fqjVaqytrXFwcFDeNpSmspNqTZo0iYEDBxZbCVkIIYQQ4j550v+YVSZZMzc3V/7WaDRKlxmNRqOUk5OTU6myjUYjKpWK2NhYtFptsX0///wzKpWqxDmlbauMwsJCVq5cib29/Z8up7y2PByvsuJnqtyH61CpVFhbW/PVV1+RnJxMUlISERER7Nix40+3/8HPFW0vgJOTE3FxcWXuf/nllzl9+rSynsPy5csxGAzKDWFlJBQaySg0Vvq8x8WqcZNqq1sIIYSoKSTpf8w6duxIYGAgFy5coEWLFmzfvr3SZdjb23P69GmcnZ1JTEwsVvasWbM4fvw4HTt2VLr3PPi038rKis6dO7N69WomTJgAFHUrMTMzo1WrVuTm5nLkyBG6d+/OkSNHyM/Pp0WLFmRlZZGdnU1+fj5mZiV/Ft27dycuLo7x48dz+fLlYl1pXF1dWb16NXPnzkWj0ZCRkYHBYCjzJqBv374EBgYydOhQGjRogMFgQKvV4uzszJo1a0hPT8fW1pZt27YpdVTW3r17GTFiBDqdjl27dtG3b1+g6O3G1q1bmTRpEunp6Rw8eBAfHx8yMjLQaDT07t2bnj17cuDAAS5fvqy8dbjP0tKSzMzMUrv39OjRg9jYWDp16oTBYGDPnj0EBAQ8Uvvd3NxYs2YNa9euZdSoUahUKu7cucOOHTsYMWIE48ePZ/bs2bRp00bpBnTv3r1HquvjBUtknn4hhBDiOSdJ/2NWv3595s2bx9ixY7GxsaFfv36VLmPmzJkEBQVha2tLnz59lO02NjZERUURHh5OVlYWarWaadOmlUiMIyIimD9/Pu7u7kBRohoaGoqtrS3Lly8vNpB32bJl6HQ6dDod7u7uuLu7U7duXWJiYoqVGRgYSEBAAHv37qVly5bFupHMnDmTRYsW4eHhgUqlQqvVMnPmzDKT/q5du+Ln58fIkSNRqVTodDqio6Np3bo1U6dOxdfXFyi6+QkODq50/AD0ej0TJkzgypUr6PV6ZXzBrFmzCAoKUmLj7+9P69atOXPmDLNnzyY/P5+CggJ69+5Nhw4duHKl+HSSvr6+fPDBB1hYWBSb1Qdg/PjxzJs3Tyl74MCBxQZyV0bt2rXZsGEDixYt4vXXX1e6Kr333nsA9O7dm+DgYMLCwkhPT6dBgwbodLpHjpcQQgghnm+yIq947tSUNQQeF1mRt3w1fWBYRUiMTJMYlU/iY5rEyLSaHiMZyCuEEEIIIUQNJ917RJUZO3YsV69eLbatcePGREdHV2m94eHhVVr+41RdMRJCCCFEzSJJv6gykriaJjESQgghxJMgSb94Lrm6uioDlI1GI+PGjcPNze1PlXPv3j3+8pe/MHr0aGVqzPj4eGbMmMHSpUuVdRfi4+M5cOAAy5cvByA9PZ1Fixbx448/UqtWLWVAblmLl0HRKsdLly7l0KFDylSfAwYMUFZWPn36NJGRkZw/fx4bGxsKCwsZMGAAo0aNqvQ1Rk6bQkZaeqXPexKsGjfBP+TZeXMjhBBCPK0k6RfPreXLl/PSSy/x008/MXToUJydnXnhhRceuRyAffv24efnx7p163j11VcBaNq0KcuWLeNvf/tbielO7927x7Bhwxg0aBDh4eGo1Wru3LnDnj17yqyvsLCQMWPG4OjoyFdffYVOpyM7O1uZ/vXs2bOMHj2aBQsWKKv9pqens2rVqkpfG4CHSk2O6ukc3rP56hXTBwkhhBDCpKfz/+mFeIzatm2LpaUlsbGxvPXWW3h4eODu7k5KSgoA586d48MPP8Tb25uBAweWuyjW3/72N4YOHcq6deuUbU5OTrRs2ZLY2NgSx+/evRsbGxvGjBmDWl30n1udOnWUVYFLk5SUxO+//86MGTPQ6XQAWFhYMHz4cADWrFnDkCFDlIQfwNbWllmzZlUiKkIIIYSoSeRJv3juHTlyhJycHFatWsXu3btp3Lgxubm5FBQUkJ+fj7+/P4sWLcLBwYHMzEy8vb3p0KEDDg4OpZb36quvsn///mLbJk+ejJ+fH56ensW2nzlzRlk8q6LOnDlD27ZtS6yofN9PP/30SOs/PIt0Wg22ttbV3Yynog1PO4mRaRKj8kl8TJMYmSYxKpsk/eK5NWnSJMzNzbGysiIqKorNmzczY8YMXn/9dfr06YO9vT2//fYbKSkpTJkyRTkvLy+Pc+fOlZn0l7a0haOjI3q9ng0bNlC/fv1yjzWlsueEhIRw9OhRbty4wfbt22ncuHGl63xa5eYVVPucyzV93ueKkBiZJjEqn8THNImRaTU9Rqbm6ZekXzy3HuyLD9CjRw9Onz7NkSNH+OCDD5g7dy5NmjShXr16JCQkVLjc06dP07p16xLbP/roI4YOHcro0aOVbU5OTuV2FyqNk5MTmzdvJj8/v8QYAYCXX36Z06dP88YbbwAo3Xq6detGQUFBpeoSQgghRM0gSb+oEfLz87ly5Qrt27enffv2XLp0if/+97/07NkTCwsLdu7cqXTNSUlJwc7ODiurknfL33zzDVu2bGHt2rUl9tnb2/Pmm2/y5Zdf4uTkBICbmxtr1qxh7dq1jBo1CpVKxZ07d9ixYwcjRowota3Ozs40atSI8PBwAgIC0Ol05OTk8MUXX+Dn58fo0aPx8fGhc+fO9O7dG4Dc3FyMRuMjxSah0EhG4aOdW9WsGjep7iYIIYQQzwVJ+kWNYDQamT59Onfv3kWlUtG4cWOmTp2KmZkZ0dHRhIWFsW7dOoxGI/Xr1ycyMlI5d9KkScqUnQ4ODqxevZoOHTqUWs/48ePZsWOH8rl27dps2LCBRYsW8frrr2NpaalM2VkWlUrF2rVrWbx4Mf3796dWrVoAuLu7A9CmTRtWrVrFsmXLmDt3Li+88AJarZaxY8fSsGHDSsfm4wVLMBor3w1JCCGEEM8OVeGjdDoWQjw3btzIlKS/HDW9j2hFSIxMkxiVT+JjmsTItJoeI1N9+mXKTiGEEEIIIZ5z0r1HiGoSFBTEyZMni23TaDTEx8dXU4uEEEII8bySpF+IahIcHFzdTRBCCCFEDSFJvxA1XOS0KWSkpVd3M0yyatwE/5Dw6m6GEEII8UySpP8hjo6OHDt2DEtLy+puyiOZPn06Tk5ODBs27InUl5qaire3N8nJyeUed+3aNfz9/dmwYcMTadeWLVvIycnBx8enxL74+HgOHDjA8uXLH2udj/saMzMzWbp0KYcOHcLc3ByAAQMGMHbsWKBovYDIyEjOnz+PjY0NhYWFDBgwgFGjRlWqHg+VmhzV0z+8Z/PVK9XdBCGEEOKZJUm/eCLs7OyeWMIP8O677z6xuu4r7xrLWmirLIWFhYwZMwZHR0e++uordDod2dnZbN++HYCzZ88yevRoFixYgIuLCwDp6emsWrXqz1+IEEIIIZ47kvSXwWg0Eh4ezvXr1wkPD0en05U45saNG0ydOpUbN24ARYsqzZw5k/j4eBITE7GysuLixYvY2NiwaNEi7OzsOHv2LJ9++in37t0jJyeHt99+W3kafffuXcLCwvjf//1fVCoVXbp0ISgoiNzcXJYuXcrRo0fJy8vjpZdeYu7cuVhaWnLt2jUCAgK4efMmzZo1M7ki6/0n80OHDuXgwYPcu3eP0NBQunTpQnJyMgsWLFAGkj78OTY2li+//BIArVZbaoJ58uRJIiIiMBgMQNEc93369CnxRuD48eMsXLhQOS4gIIDXXnut1DY/SsyioqLIyspi2rRp5ObmEhISQnJyMnZ2drRq1arcGAG4urri5ubGsWPHSEtLY8SIEcrbkwULFvD999+Tl5dHvXr1CAsLo2nTpiWu0dHRkU8++YSDBw8qC2nNmzcPo9FIfn4+48aNY8CAAaXWn5SUxO+//87nn3+OVqsFwMLCguHDhwOwZs0ahgwZoiT8ALa2tsrqvEIIIYQQD5KkvxQ5OTnMmDGDpk2bsnjxYlQqVanHJSYm0qRJEz7//HMAbt++rez78ccf2blzJ61ateKzzz4jNDSU5cuX07RpUz7//HN0Oh0Gg4EhQ4bQq1cvHBwcCAsLo3bt2iQkJKBWq8nIyABg7dq1WFtbExsbC8CiRYtYvXo1kydPJiQkBL1ez8SJE7l8+TIDBw6kV69e5V7frVu36NChA5MnT2bXrl1EREQQExNT7jnJycmsWrWKzZs3Y2tri8FgwMzMjOzsbOWYO3fuMGfOHFavXk3Dhg1JS0tj8ODB7N69u0T9EydOJCoqik6dOlFQUEBmZmaZdT9KzB60detWUlNT2b17N/n5+bz//vs0a9as3OsFuH79Ops2beL69et4enrSpUsX2rRpw+jRo5k2bRoA27dvJyIigqVLl5ZahtFoVJ7+jxs3jhEjRuDp6UlhYSF375Y9l/CZM2do27atkvA/7KeffqJfv34mr+F5otNqsLW1rpa6q6veZ4nEyDSJUfkkPqZJjEyTGJVNkv5SfPjhh7i5uZnsG/3qq6+yfv16FixYQNeuXYs9qe7cubPyRHnIkCHKaqrZ2dnMnTuXs2fPolKpSEtL4+eff8bBwYFvv/2W+Ph41Oqi/tUvvPACAPv37yczM5N//vOfAOTm5tKmTRugKBm//3TX3t4eZ2dnk9dXu3Zt+vbtC0CHDh1YsGCByXMOHDiAh4cHtra2AKWOeTh+/DipqamMHj1a2aZSqbh48SL16tVTtp04cQIHBwc6deoEFE1TWbdu3TLrfpSYPSg5ORlPT0+0Wi1arZaBAwdy7Ngxk9c8ePBgABo0aECfPn34/vvvadOmDYcOHWLz5s1kZWWRn59fbhleXl7K3926dWP16tVcuXKFnj178uqrr5Z5XmXXzAsJCeHo0aPcuHGD7du307hx40qd/yzIzSuolkVXavpiLxUhMTJNYlQ+iY9pEiPTanqMTC3OJUl/Kbp168Z3333Hu+++S+3atcs8rmPHjuzcuZPDhw+TkJDA6tWr2bJlS4njCgsLlbcFS5YswdbWlvDwcMzMzPD19SUnJ6fc9hQWFjJnzpwKJfQV8WBXJbVarSSuGo2mWLJpql2ltdPR0ZFNmzaV2JeamlrsuMp4lJg93K4/6/53+PvvvzN//nxiY2Oxt7fn2LFj+Pv7l3neg78fHx8fXF1dOXz4MPPmzaNnz55Mnjy51POcnJzYvHlzmWMBXn75ZU6fPs0bb7wBoNz4devWzWQXLyGEEELUPE//lB3VYOLEifTo0YMPP/yw3G4nly9fxsrKCjc3N2bMmMGZM2cwGo0AHDt2jAsXLgBFs8V069YNKOqD3qhRI8zMzPjll1/44YcflPL69u3LunXrlCT1flcVV1dXPv/8c6UrTWZmJikpKQB0796duLg4pT1JSUmPfN329vZcvnyZ27dvU1hYyFdffVWsbQkJCVy/fh0Ag8FAbm5usfM7duzIxYsXOXLkiLLt1KlTJZLujh07kpKSwvHjxwEoKCgo1jXqYY8Sswc5OzuTkJBAfn4+2dnZJboblWXHjh1KmYcOHaJr165kZmai1WqxtbXFaDSa7Bb1oPPnz9O8eXOGDh3KBx98wOnTp8s81tnZmUaNGhEeHq7EOScnh9WrVwMwevRotm7dyqFDh5RzcnNzld9fZSQUGtn8DPyzatyk0tcmhBBCiCLypL8Mfn5+WFhY4OPjw9q1a7GxsSlxzPfff8/69evRaDQYjUY+/fRTpZuJXq8nKiqKX3/9VRnIC0X9ugMCAti1axfNmzdHr9cr5c2YMYOwsDAGDBiARqOha9euzJo1Cz8/Pz777DMGDx6MSqVCpVIxceJEHBwcCAwMJCAggL1799KyZUt69uz5yNdsZ2fHyJEjGTRoEM2aNaNdu3b8+uuvAHTt2hU/Pz9GjhyJSqVCp9MRHR1d7Py6deuycuVKFi1aRFhYGHl5edjb25c4zsbGhqioKMLDw8nKykKtVjNt2jR69OhRarseJWYPevvttzl79ixubm40atQIvV7P77//bjIejRs35r333iM9PV2ZSQegX79+uLm50aRJE/R6fbGbkPJs2LCB5ORktFotOp2u3EG3KpWKtWvXsnjxYvr370+tWrUAlG5ibdq0YdWqVSxbtoy5c+fywgsvoNVqGTt2LA0bNqxQe+77eMESjMY//zZECCGEEE8vVeHj6PsgiqmqeeDFk+Pq6kp0dDQvvfRSdTelyt24kSlJfzlqeh/RipAYmSYxKp/ExzSJkWk1PUam+vRL9x4hhBBCCCGec9K9pwKCgoI4efJksW0ajUaZv/5hgwYNYtCgQU+iaWWqbJufBoMGDSoxCPXVV18lODi4Surbvn07GzduLLE9PDyc/fv3V0mdD3sWvychhBBCPHuke48QNZx07ylfTX9dXBESI9MkRuWT+JgmMTKtpsdIpuwUQpQrctoUMtLSq7sZ1cKqcRP8Q8KruxlCCCFElZOkXzzTXF1d0el06HQ6jEYj48aNw83N7ZHKuT9w9969e0ycOJGGDRsSEhJCYGAgX331Ff/85z9p0qRo2sjp06fj5OTEsGHDgKKpSZcuXcqlS5eoVasW9erVY9KkScVmGnrYhQsXWLJkCf/7v/9L3bp1KSgowMXFhY8//hiNRgMUTWfap08f2rVrx8qVK0uUkZKSQv/+/ZkxYwY+Pj6Vvm4AD5WaHFXNHN6z+eqV6m6CEEII8UTUzP+nF8+V5cuXs2vXLhYuXMiMGTNKnau/ou7evYuvry8tW7YkLCxMSb5tbW2Jiooq9ZyzZ88yZswYfH19+fe//83u3bsJCQkptx1paWm8//779OnTh/3797Njxw42b97MvXv3iq1/cOjQIRo2bMiPP/6orJHwoNjY2GJrNQghhBBClEae9IvnRtu2bbG0tCQ2NpYdO3YoT/8jIyNxcHDg3LlzhIWFcfPmTfLy8hgxYgTe3t7K+Tdu3CAgIIC+ffvy0UcfFSt76NChbNy4kd9++42//OUvxfatWbOGwYMH06tXL2Xbiy++yIsvvlhmWzdt2kS3bt2KDfi2srIqMXd/XFwcQ4cO5cSJEyQkJDBq1ChlX35+PomJiWzatInRo0dz+vRp2rVrV7mg1XA6rQZbW2uTx1XkmJpOYmSaxKh8Eh/TJEamSYzKJkm/eG4cOXKEnJwcVq1axe7du2ncuDG5ubkUFBSQn5+Pv78/ixYtwsHBgczMTLy9venQoQMODg4AfPzxx7z33nslEn6A2rVrM2bMGJYuXcqKFSuK7fvpp5/o169fpdr6008/mVxILSMjg+TkZMLDw2nVqhWzZ88ulvQfOHBAubnw8vIiLi5Okv5Kys0rMDnoq6YPDKsIiZFpEqPySXxMkxiZVtNjJPP0i+fepEmT8PDwICoqiqioKLp3786MGTPYsGED165do1atWly4cIGUlBSmTJmCh4cH77//Pnl5eZw7d04px8XFhT179pCWllZqPfdX9n14is3HMQHW6tWr8fDwwMXFhWPHjgGQkJBA3759sbKyonPnzhQUFHDixAnlnNjYWLy8vADw9PTk66+/Jicn50+3RQghhBDPH3nSL555y5cvL7Zybo8ePTh9+jRHjhzhgw8+YO7cuTRp0oR69eqRkJBQZjkffvgh3377LcOHD2fDhg00bNiw2H6tVsv/+3//j8WLFysDegFeeeUVTp06xRtvvFHhNrdt25bTp08rn/38/PDz82PQoEHk5eUBRSs7Z2Rk4OrqChSNN4iLi6NDhw5cv36d//znP/z888/KAN979+6xb98+3N3dK9wOIYQQQtQMkvSL50p+fj5Xrlyhffv2tG/fnkuXLvHf//6Xnj17YmFhwc6dO/H09ASKZr6xs7PDyur/XoWNGTOGwsLCMhN/d3d31q1bR2pqKk5OTkDRzYKPjw/du3enR48eAJw7d47//ve/Zc4k9N577+Hl5VWsPQUFBUrCf+rUKe7evcv//M//oFKpALh27RoDBgxg5syZ7NixgzfffJOIiAilzN27dxMbG1vppD+h0EhGobFS5zwvrBo3MX2QEEII8RyQpF88V4xGI9OnT+fu3buoVCoaN27M1KlTMTMzIzo6mrCwMNatW4fRaKR+/fpERkaWKGPs2LFK4v/ll18W26dWq5k8eTJjx45VtrVp04bo6GiWLl1KUFBQsSk7y2JnZ8fGjRtZsmQJy5cvx8bGBp1OxxtvvMErr7zCokWLcHNzUxL+++e8/PLL/POf/2THjh1MmzatWJmvv/46c+bMITU1lWbNmlU4Zh8vWCKLcwkhhBDPOVmRV4gaTlbkLV9NHxhWERIj0yRG5ZP4mCYxMq2mx0gG8gohhBBCCFHDSfceIarQoEGDKCgoKLbt1VdfJTg4uJpaJIQQQoia6JGT/suXL6NWq2natOnjbI8Qz5X4+PjqboIQQgghRMWT/ilTpjBs2DA6depEXFwcn376KWq1msDAQIYMGVKVbXzqREVFMWbMGHQ6Xan7PTw82Lp1KxYWFo+9bldXV6Kjo4tNUVkZd+7cYevWrYwePVrZFhgYiJeXF126dClx/PTp03FycmLYsGGP3OZnwbVr1/D392fDhg1Aye942bJltG7dmv79+z+2Os+ePUtAQAAAt2/fJjMzU7mJfvvtt/Hw8GDp0qUcOnQIc3NzAAYMGKAMIj516hRLly7l0qVLxQYP6/X6SrUjctoUMtLSH9t1PcusGjfBPyS8upshhBBCPHYVTvqTkpIIDy/6P8PPP/+c9evXU6dOHSZMmFDjkv7PPvsMX1/fEkl/fn4+ZmZm5c4FX93u3LnD2rVriyX9oaGh1diip4OdnZ2S8EPJ77i0VXr/LEdHR+W3Eh8fz4EDB1i+fDlQtODXsGHDcHR05KuvvkKn05Gdnc327duBohuGMWPGsHDhQnr16gXAxYsX+fnnnyvdDg+VmhyVDO8B2Hz1SnU3QQghhKgSFU768/Ly0Ol0XLt2jVu3btG5c2cArl+/XmWNexp9+umnAAwdOlTp3tS4cWMuXLjAzZs3iY+Px9HRkWPHjmFpaYmrqyseHh4cPnyY9PR0fH19lafmDx738Ofjx4+zcOFCDAYDAAEBAbz22mvF2pKWlkZISAhXrlwhJycHNze3Yk+BQ0NDycrKonbt2gQGBtK+fXuCg4O5e/cuHh4e1KpVi5iYGIYPH46vry99+/bl2rVrBAQEcPPmTZo1a1asP3pmZibz58/n7Nmz5OTk0K1bN2bMmIFGoykzXrGxscq0l1qtllWrVtGgQQN27tzJunXrAGjevDnBwcHUr1+f+Ph4du/ejbW1NWfPnsXOzo7Zs2ezcOFCLl68iJOTExEREahUKqZPn46ZmRmpqalcvXoVvV5PUFAQOp2O69evM2fOHC5dugTAqFGj8PT0xGg0EhwczJEjR9DpdNSuXZuYmBhSU1Px9vYmOTm5xHe8YcMGwsLClDceBoOBkJAQZXGtgQMH4ufnB8Dw4cNxcnLixIkTpKWl8dZbb+Hv71/p31lSUhK///47n3/+OVqtFgALCwuGDx8OwJo1axg8eLCS8AO8+OKLvPjii5WuSwghhBDPvwon/S+//DKrVq3i999/p0+fPkBRl4gHFzaqCebMmcPmzZuJiYnB0tKS6dOnc/z4cTZu3Ejt2rVLPSc7O5utW7eSmpqKu7s7Xl5eSqJfmlu3bjFx4kSioqLo1KkTBQUFZGZmljhu2rRpjB8/Hr1eT25uLj4+PrRr1w69Xs+kSZMICwujR48eJCUlMWnSJPbt20dQUBDe3t5lvo0ICQlBr9czceJELl++zMCBA5XEcv78+ej1ekJDQzEajfj7+xMXF8fbb79dalnJycmsWrWKzZs3Y2tri8FgwMzMjF9++YWIiAji4+Np2LAhkZGRzJs3T5kz//Tp0yQmJtKoUSPGjBnD1KlT2bhxI7Vq1cLLy4ukpCRlEayTJ08SExODubk5fn5+bNu2jWHDhhESEkLr1q1ZsWIFaWlpDBo0iLZt25Kfn09SUhJff/01arWa27dvm/yOH7Zy5UqMRiOJiYkYDAbeeecdHB0dcXFxAeDq1ats2rQJg8HAG2+8weDBg2nRokWZ33dpzpw5Q9u2bZWE/2E//fQT/fr1q1SZwjSdVoOtrXWJ7aVtE8VJjEyTGJVP4mOaxMg0iVHZKpz0h4aGsmzZMszMzJR+yMePH6/06p/Po379+pWZ8ANKP/BmzZpRp04d/vjjDxwcHMo8/sSJEzg4ONCpUycANBoNdevWLXZMVlYW33//PRkZGco2g8FASkoKDRo0QKvVKomxs7MzWq2W8+fPl3uzAUWJ+qxZswCwt7fH2dlZ2bd//35OnTrF+vXrgaKbGTs7uzLLOnDgAB4eHtja2gIodScnJ+Pi4qKsdjt06FA8PDyU8zp16kSjRo2AopvNpk2bYm1d9B9xmzZtuHjxonJt/fv3V8r19PRk3759DBs2jKSkJKZPnw5Aw4YNcXFxITk5GU9PTwoKCggMDKRbt2707du33HiUJikpiZkzZ6JSqbCyssLNzY2kpCQl6e/Xrx9qtRpra2scHBy4dOlSpZN+U8tnyPIaVSM3r6DEHM81fd7nipAYmSYxKp/ExzSJkWk1PUam5umvcNLfvHlzFi9eXGxbv3795GkjlJvwA8ogTChK4O93mdFoNErylpOToxxTkYTOaDSiUqmIjY0t8TT4559/LraS632lbauMwsJCVq5cib29/Z8up7y2PByvsuJnqtyH61CpVFhbW/PVV1+RnJxMUlISERER7Nix40+3/8HPFW1veZycnNi8ebMyTuRhr7zyCqdOneKNN96odNlCCCGEqHkqnPQXFhayfft2vvrqKzIyMkhMTOTo0aOkp6c/1hlNngWWlpZkZmaafGpuir29PadPn8bZ2ZnExERle8eOHZk1axbHjx+nY8eOSveeB5/2W1lZ0blzZ1avXs2ECROAom4lZmZmtGrVitzcXI4cOUL37t05cuQI+fn5tGjRgqysLLKzs8tMJrt3705cXBzjx4/n8uXLxbrSuLq6snr1aubOnYtGoyEjIwODwVDmTUDfvn0JDAxk6NChNGjQAIPBgFarxdnZmTVr1pCeno6trS3btm1T6qisvXv3MmLECHQ6Hbt27VKe3Ds7O7N161YmTZpEeno6Bw8exMfHh4yMDDQaDb1796Znz54cOHCAy5cvK28d7ivvO+7RowexsbF06tQJg8HAnj17lLdfj4uzszONGjUiPDycgIAAdDodOTk5fPHFF/j5+fHhhx/i4+ND9+7dldidO3eO//73v7i5uVWqroRCIxmFxsfa/meVVeMm1d0EIYQQokpUOOlftmwZhw8fZsSIEcyZMweARo0aMX/+/BqX9Pv6+vLBBx9gYWHxp9YpmDlzJkFBQdja2irjJABsbGyIiooiPDycrKws1Go106ZNK5EYR0REMH/+fKWLlaWlJaGhodja2rJ8+fJiA3mXLVuGTqdDp9Ph7u6Ou7s7devWJSYmpliZgYGBBAQEsHfvXlq2bEnPnj2LtXfRokV4eHigUqnQarXMnDmzzKS/a9eu+Pn5MXLkSFQqFTqdjujoaFq3bs3UqVPx9fUFim5+HnWxKr1ez4QJE7hy5Qp6vV4ZXzBr1iyCgoKU2Pj7+9O6dWvOnDnD7Nmzyc/Pp6CggN69e9OhQweuXCk+a8uD3/GDs/oAjB8/nnnz5illDxw4kN69ez9S+8uiUqlYu3Ytixcvpn///tSqVQtAqbNNmzZER0ezdOlSgoKCik3ZWVkfL1iC0SjdhYQQQojnmaqwgp2DXVxc2LFjBy+88AJ6vZ6jR49SWFhI165dOXr0aFW3U4gSasoaAlXtxo1MSfrLUdP7iFaExMg0iVH5JD6mSYxMq+kxMtWnv8KTcxcUFChdHe73XzYYDCb7swshhBBCCCGqV4W79/Tu3Zv58+czc+ZMoKiP/7Jlyx5p9hPxfBk7dixXr14ttq1x48ZER0dXab33F4t7FlRXjIQQQgghoBLdezIzM5k2bRqHDh0iPz8fc3NzevbsyYIFC2rcXP1CPE+ke0/5avrr4oqQGJkmMSqfxMc0iZFpNT1Gj2XKzoKCAvbu3cuSJUvIzMzk999/p3Hjxsr860IIIYQQQoinV4WSfo1GQ3h4OIMHD8bc3Jz69etXdbueSlFRUYwZMwadTlfqfg8PD7Zu3YqFhcVjr9vV1ZXo6GheeumlRzr/zp07bN26ldGjRyvbAgMD8fLyokuXLiWOrymDZK9du4a/v78yQ8/D3/GyZcto3bp1lcxQdf36dSIiIjh69ChWVlYYjUb0ej2TJ09WFiMDGDx4MHl5eaWuonz79m1ee+01hg4dSmBg4CO1I3LaFDLS0h/5Op53Oq2G3LzKr7Vwn1XjJviHPDtd0YQQQjyfKtynv2/fvuzfvx9XV9eqbM9T7bPPPsPX17dE0n9/zvvSkrKnxZ07d1i7dm2xpD80NLQaW/R0sLOzKzYl58Pf8UcffVQl9d67d4/3338fT09PQkND0Wg05OTksHbtWm7cuKEk/b/++is3btxAq9Vy5swZXnnllWLlJCYm0qFDB7766is++eSTMm9Iy+OhUpOjqvCY/ponvxD+RHw2X71i+iAhhBCiilU46c/JyWHSpEl07NiRRo0aFVuBdOHChVXSuKfJp59+CsDQoUNRq9U0bdqUxo0bc+HCBW7evEl8fDyOjo4cO3YMS0tLXF1d8fDw4PDhw6Snp+Pr66s8NX/wuIc/Hz9+nIULF2IwGAAICAjgtddeK9aWtLQ0QkJCuHLlCjk5Obi5uTF27FgATp06VWx+/sDAQNq3b09wcDB3797Fw8ODWrVqERMTw/Dhw/H19aVv375cu3aNgIAAbt68SbNmzYqtIpuZmcn8+fM5e/YsOTk5dOvWjRkzZqDRaMqMV2xsLF9++SUAWq2WVatW0aBBA3bu3Mm6deuAolWeg4ODqV+/PvHx8ezevRtra2vOnj2LnZ0ds2fPZuHChVy8eBEnJyciIiJQqVRMnz4dMzMzUlNTuXr1Knq9nqCgIHQ6HdevX2fOnDlcunQJgFGjRuHp6YnRaCQ4OJgjR46g0+moXbs2MTExpKam4u3tTXJyconveMOGDYSFhSlvPAwGAyEhIZw+fRoomp/fz88PgOHDh+Pk5MSJEydIS0vjrbfewt/fv8z47N69GxsbG8aNG6dsMzc3VxZaezCOHh4e6HQ64uLiSiT9cXFxBAQEsGrVKvbv3y8rZAshhBCiVBVO+l966aVH7lryPJgzZw6bN28mJiYGS0tLpk+fzvHjx9m4cWOZ05ZmZ2ezdetWUlNTcXd3x8vLq9xVfG/dusXEiROJioqiU6dOykq8D5s2bRrjx49Hr9eTm5uLj48P7dq1Q6/XM2nSJMLCwujRowdJSUlMmjSJffv2ERQUhLe3d5lvI0JCQtDr9UycOJHLly8zcOBAevXqBcD8+fPR6/WEhoZiNBrx9/cnLi5OWQjrYcnJyaxatYrNmzdja2uLwWDAzMyMX375hYiICOLj42nYsCGRkZHMmzePyMhIAE6fPk1iYiKNGjVizJgxTJ06lY0bN1KrVi28vLyKrQ588uRJYmJiMDc3x8/Pj23btjFs2DBCQkJo3bo1K1asIC0tjUGDBtG2bVvy8/NJSkri66+/Rq1Wc/v2bZPf8cNWrlyJ0WgkMTERg8HAO++8g6OjIy4uLkDRisibNm3CYDDwxhtvMHjwYFq0aFFqjM6cOUP79u1L3XdfXl4eiYmJxMTEoNVq8fT0ZPr06crT/J9//pnbt2/TvXt30tPTiYuLk6T/KaTTarC1tTZ94DOuJlzjnyUxKp/ExzSJkWkSo7JVOOmfOHFiVbbjmdSvX79y1ym43w+8WbNm1KlThz/++AMHB4cyjz9x4gQODg506tQJKBpLUbdu3WLHZGVl8f3335ORkaFsMxgMpKSk0KBBA7RarZIYOzs7o9VqOX/+fLk3G1CUqM+aNQsoWiHX2dlZ2bd//35OnTrF+vXrgaKbGTs7uzLLOnDgAB4eHspA7/t1Jycn4+LiQsOGDYGiJ+oeHh7KeZ06daJRo0YAvPzyyzRt2lTp5tKmTRsuXryoXFv//v2Vcj09Pdm3bx/Dhg0jKSmJ6dOnA9CwYUNcXFxITk7G09OTgoICAgMD6dat2yNNNZuUlMTMmTNRqVRYWVnh5uZGUlKSkvT369cPtVqNtbU1Dg4OXLp0qcyk/2E7d+5k/fr13L17F39/f/r378+3335Ly5Ytad68OQBt27blX//6F25ubsD/vQVQqVT87W9/IyQkhGvXrpX73YgnLzev4LmfTaKmz5hRERKj8kl8TJMYmVbTY/RYZu+BooSnLA8miDWJqYXJzM3Nlb81Go3SZUaj0XB/ptScnBzlmIrMnmo0GlGpVMTGxqLVaovt+/nnn4t1u7qvtG2VUVhYyMqVK7G3t//T5ZTXlofjVVb8TJX7cB0qlQpra2u++uorkpOTSUpKIiIigh07dvzp9j/4uaLthaIEPj4+Xvns6emJp6cnkyZNIjs7GyjquvPbb78p42iysrKIi4vDzc2N3NxcEhMTMTc3V97e5OXlsWPHDqWrlxBCCCHEfRVO+h+eGeTmzZvk5eVhZ2fHv//978fesKeRpaUlmZmZJp+am2Jvb8/p06dxdnYmMTFR2d6xY0dmzZrF8ePH6dixo9K958Gn/VZWVnTu3JnVq1cr/b+vXr2KmZkZrVq1Ijc3lyNHjtC9e3eOHDlCfn4+LVq0ICsri+zsbGXQ8cO6d+9OXFwc48eP5/Lly8W60ri6urJ69Wrmzp2LRqMhIyMDg8FQ5k1A3759CQwMZOjQoTRo0ACDwYBWq8XZ2Zk1a9aQnp6Ora0t27ZtU+qorL179zJixAh0Oh27du1Sntw7OzuzdetWJk2aRHp6OgcPHsTHx4eMjAw0Gg29e/emZ8+eHDhwgMuXLytvHe4r7zvu0aMHsbGxdOrUCYPBwJ49ewgICHik9g8YMIC1a9eyevVqRo0apdwI3k/409LSOHr0KIcOHVLWwcjJyaFXr15cuXKFEydO0KpVK7Zs2aKUefz4caZNm1bppD+h0EhGofGRrqMmeByz9wghhBDVrcJJ//79+4t9Ligo4O9///ufToCfJb6+vnzwwQdYWFjQtGnTRy5n5syZBAUFYWtrS58+fZTtNjY2REVFER4eTlZWFmq1mmnTppVIjCMiIpg/fz7u7u5AUaIaGhqKra0ty5cvLzaQd9myZeh0OnQ6He7u7ri7u1O3bl1iYmKKlRkYGEhAQAB79+6lZcuW9OzZs1h7Fy1apHQl0Wq1zJw5s8ykv2vXrvj5+TFy5EhUKhU6nY7o6Ghat27N1KlT8fX1BYpufoKDgx8phnq9ngkTJnDlyhX0er0yvmDWrFkEBQUpsfH396d169acOXOG2bNnk5+fT0FBAb1796ZDhw5cuVJ8ZpUHv+MHZ/UBGD9+PPPmzVPKHjhwIL17936k9teuXZuNGzeyePFi/vrXv1KnTh0sLCxwcnKiV69e7Nixg969exdb+M7c3JzXX3+d+Ph4Tpw4obTjvo4dO2I0Gjl69Ch6vb7Cbfl4wRJZnKscNf11sRBCiOdDhVfkLU1+fj4uLi785z//eZxtEqJcNWUNgSdFVuQtnyT9pkmMTJMYlU/iY5rEyLSaHiNTffr/1OTc//nPf/50f3EhhBBCCCFE1apw9x4XF5diCf69e/fIzc0lKCioShomnn5jx47l6tWrxbY1btyY6OjoKq03PPzZWd20umIkhBBCCPGgCnfv+f7774t9rlWrFi1btizW51gI8eyR7j3lq+mviytCYmSaxKh8Eh/TJEam1fQYPbYpO0+fPs2oUaNKbF+/fj0jR458tNYJIapd5LQpZKSlV3cznlp/dvaeslg1boJ/yLPz1koIIcSzrcJJ/4oVK0pN+v/+97/XmKQ/KiqKMWPGKCuiPszDw4OtW7diYWHx2Ot2dXUlOjr6kVdFvnPnDlu3bmX06NHKtsDAQLy8vOjSpUuJ42vKYNlr167h7++vzNTz8He8bNkyWrdurSy09jhdv36diIgIjh49ipWVFUajEb1ez+TJk5VFyQAGDx5MXl5eidWUT506xdKlS7l06RK1atWiXr16TJo0qVIz9wB4qNTkqP7U8J7nW34hVEF8Nl+9YvogIYQQ4jExmfTfX5TLaDRy5MiRYgtIpaam1qgpOz/77DN8fX1LJP33575/OCl7mty5c4e1a9cWS/pDQ0OrsUVPBzs7u2JTcz78HX/00UdVUu+9e/d4//338fT0JDQ0FI1G8/+1d+9xVVXr4v8/i8tCbqYpICppsc0bmZIkYIqi7UhFQNTIrUlooGZUSoiiaIRgioGy8eBtd05eNhogJpp1OqVdBGrvLDii7q9oKgcSFExZKre1fn/wc26XCAu8C8/79er1knkZY8xnGT5rzjGfQVVVFZs2beLChQtK0v///t//48KFC5iamnLkyBH69+8PwPHjxwkJCWHlypUMGzYMgNOnT3Ps2LF7Ml4hhBBCPNoMJv3XF+Wqqqpi0aJFynaVSoWNjQ2LFy++d6N7iLz//vsABAQEYGRkRLdu3bC3t+e3336joqKCjIwMevfuzc8//4ylpSWenp74+Phw6NAhysrKCAoKUu6a33jczT8fPnyYlStXotFoAAgPD+eFF17QG0tpaSkxMTEUFxdTVVXF2LFjlQWZ8vLy9Or0R0ZGMmDAAKKjo7l8+TI+Pj6Ym5uTmprKtGnTCAoKYuTIkZw7d47w8HAqKiro3r273mqylZWVxMXFcfz4caqqqhgyZAgLFy7E2Ni40XilpaXxySefAGBqasr69evp3LkzmZmZbN68GYAnnniC6OhoOnXqREZGBllZWVhbW3P8+HHs7OxYsmQJK1eu5PTp0zg5OREfH49KpSIiIgITExOKioooKSnBxcWFqKgo1Go158+fZ+nSpZw5cwaAGTNm4Ovri1arJTo6mpycHNRqNRYWFqSmplJUVIS/vz+5ubkNPuMtW7YQGxurPPHQaDTExMSQn58P1NfpDw4OBmDatGk4OTnxyy+/UFpayssvv0xYWFij8cnKyqJDhw7Mnj1b2WZmZqYsuHZjHH18fFCr1aSnpytJ/8aNG5k4caKS8AP06NGDHj16NNqneLioTY2xsbE2fOAjojVdy70iMWqaxMcwiZFhEqPGGUz6ry/KFR4ezsqVK+/5gB5WS5cuZfv27aSmpmJpaUlERASHDx9m69atWFhY3PKca9eusWPHDoqKivD29sbPz6/JJyMXL15k7ty5JCUl4ezsrKzIe7MFCxYwZ84cXFxcqK6uJjAwkGeeeQYXFxdCQ0OJjY3F3d2d7OxsQkND+fLLL4mKisLf37/RpxExMTG4uLgwd+5czp49y/jx45WEMi4uDhcXF5YvX45WqyUsLIz09HRlQayb5ebmsn79erZv346NjQ0ajQYTExP+9a9/ER8fT0ZGBra2tiQmJvLBBx+QmJgI1L83smfPHrp06UJISAjz589n69atmJub4+fnp7dK8K+//kpqaipmZmYEBwezc+dOpk6dSkxMDL169SI5OZnS0lImTJhAv379qK2tJTs7m88//xwjIyP++OMPg5/xzdatW4dWq2XPnj1oNBpeeeUVevfujYeHB1C/MvK2bdvQaDSMHj2aiRMn0rNnz1vG6MiRIwwYMOCW+66rqalhz549pKamYmpqiq+vLxEREajVagoKCvDy8mryfPFwq66pazUvnLX1l+eaQ2LUNImPYRIjw9p6jO5anf62nPA3xsvLq9GEH1DmgXfv3p327dvz+++/N9neL7/8gqOjI87OzgAYGxvz2GOP6R1z5coVfvzxR2JiYvDx8WHSpEmUlpZSWFjIqVOnMDU1VRJjNzc3TE1NOXXqlMFryc3NZdKkSUD9Srlubm7Kvq+//prNmzfj4+ODn58fR44cabLNAwcO4OPjg42NDVC/YrCZmRm5ubl4eHhga2sL1N9Rvz59DMDZ2ZkuXboA0LdvX5577jmsra0xMTGhT58+nD59Wjl2zJgxWFpaYmJigq+vLzk5OUD9dLSAgAAAbG1t8fDwIDc3FwcHB+rq6oiMjCQzM9NgPG4lOzubSZMmoVKpsLKyYuzYsXrj9/LywsjICGtraxwdHZWnDc2RmZmJj48Pnp6e7Nu3D4BvvvmGJ598kieeeAJ7e3v69evHf//3fwNwB2vqCSGEEKINavaLvJWVlSQlJfHTTz9RUVGhl3QcOHDgXoztoddUwg/10zWuMzY2VqbMGBsbK/GrqqpSjmlOIqfValGpVKSlpWFqaqq379ixY7dcLO1OF1DT6XSsW7cOBweHO26nqbHcHK/G4meo3Zv7UKlUWFtbs3fvXnJzc8nOziY+Pp5du3bd8fhv/Lm54wXo168fGRkZys++vr74+voSGhrKtWvXAEhPT+fEiRN4enoC9V/40tPTGTt2LP379ycvL4/Ro0e36BqEEEII0TY1O+lftmwZ586dY86cObz33nusWrWKzZs389JLL93L8T1ULC0tqaysvOOXlx0cHMjPz8fNzY09e/Yo2wcNGsTixYs5fPgwgwYNUqb33Hi338rKiueee44NGzYo879LSkowMTHhqaeeorq6mpycHFxdXcnJyaG2tpaePXty5coVrl27prx0fDNXV1fS09OZM2cOZ8+e1ZtK4+npyYYNG1i2bBnGxsaUl5ej0Wga/RIwcuRIIiMjCQgIoHPnzmg0GkxNTXFzc2Pjxo2UlZVhY2PDzp07lT5aav/+/UyfPh21Ws1nn33GyJEjgfqnGzt27CA0NJSysjIOHjxIYGAg5eXlGBsbM3z4cIYOHcqBAwc4e/as8tThuqY+Y3d3d9LS0nB2dkaj0bBv3z7Cw8Nva/zjxo1j06ZNbNiwgRkzZihfBK8n/KWlpfz00098++23yloYVVVVDBs2jOLiYmbOnElgYCCurq5KDE+ePMnRo0cZO3Zsi8ayW6elXKe9retoC+5lyU4hhBDifml20v/DDz+wb98+OnbsiLGxMaNHj+aZZ55h1qxZBAYG3sMhPjyCgoJ47bXXaNeuHd26dbvtdhYtWkRUVBQ2NjaMGDFC2d6hQweSkpJYsWIFV65cwcjIiAULFjRIjOPj44mLi8Pb2xuoT1SXL1+OjY0Na9eu1XuRd82aNajVatRqNd7e3nh7e/PYY4+Rmpqq12ZkZCTh4eHs37+fJ598kqFDh+qNd9WqVfj4+KBSqTA1NWXRokWNJv3PP/88wcHBvP7666hUKtRqNSkpKfTq1Yv58+cTFBQE1H/5iY6Ovq0Yuri48Oabb1JcXIyLi4vyfsHixYuJiopSYhMWFkavXr04cuQIS5Ysoba2lrq6OoYPH87AgQMpLtYvm3jjZ3xjVR+AOXPm8MEHHyhtjx8/nuHDh9/W+C0sLNi6dSurV6/mxRdfpH379rRr1w4nJyeGDRvGrl27GD58uN7id2ZmZowaNYqMjAzmzp1LSkoKCQkJREVF6ZXsbKl3PvxIFudqQlufIyqEEKJ1aPaKvEOGDOGHH37AxMSE4cOHk5WVhZWVFYMHD+bnn3++1+MUQtFW1hC4X2RF3qZJ0m+YxMgwiVHTJD6GSYwMa+sxumsr8vbp04effvoJNzc3Bg8ezPvvv4+lpWWj1UmEEEIIIYQQD4dm3+k/e/YsOp2OJ554gvLyclavXo1Go2Hu3Ln86U9/utfjFA+hWbNmUVJSorfN3t6elJSUBzSih8+jECO509+0tn7nqDkkRoZJjJom8TFMYmRYW4+RoTv9zU76hRCtkyT9TWvr/4g0h8TIMIlR0yQ+hkmMDGvrMbpr03t0Oh2ffvopWVlZVFRUsGfPHn766SfKysqUevRCPCienp7KC8tarZbZs2e3uIrN9XZSUlJ4+umnuXr1KnPnzsXW1paYmBgiIyPZu3cvX3zxBV271ldeufn9gry8PBISEjhz5ozey7UuLi4N+vruu++Ij48H4Pz582i1WqWa0Ny5cxk4cCCrVq3in//8J+bm5piYmDBlyhTlpeXvvvuO5ORkysvLMTExwcHBgXnz5tG7d+8WXXPignmUl5a1OFZtxb2q3nOnrOy7Ehaz4kEPQwghxCOi2Un/mjVrOHToENOnT2fp0qUAdOnShbi4OEn6xUNh7dq1PP300xQUFBAQEICbmxuPP/74bbV1+fJlgoOD6d+/P5GRkUo9fhsbG5KSkoiLi2twzvHjxwkJCWHlypXKasanT5/m2LFjt+xj2LBhynFJSUlcuXKFBQsWAHD16lV8fX2ZMGECK1aswMjIiEuXLikLd33//fdERkaSnJzMM888A0BBQQFlZWUtTvp9VEZUqZq9Tl/bU6uDhzA+20uKDR8khBBC/P+anfTv2rWLXbt28fjjj7Ns2TKgfqXZs2fP3quxCXFb+vXrh6WlJWlpaezatUu5+5+YmIijoyMnT54kNjaWiooKampqmD59Ov7+/sr5Fy5cIDw8nJEjR/L222/rtR0QEMDWrVs5ceJEg3dZNm7cyMSJE5VEHqBHjx706NGjxdeQlZVFhw4dCAkJUba1b99eWW04OTmZOXPmKAn/9esWQgghhLiVZif9dXV1yoJF1+96ajQag6vSCnG/5eTkUFVVxfr168nKysLe3p7q6mrq6uqora0lLCyMVatW4ejoSGVlJf7+/gwcOBBHR0cA3nnnHaZMmdIg4Yf6+vohISEkJCSQnJyst6+goAAvL6+7cg1HjhxhwIABje4vKCggKirqrvQlHk1qU2NsbKwf9DAUD9NYHlYSo6ZJfAyTGBkmMWpcs5N+Dw8P4uLiWLRoEVA/x3/NmjXKSqhCPGihoaGYmZlhZWVFUlIS27dvZ+HChYwaNYoRI0bg4ODAiRMnKCwsZN68ecp5NTU1nDx5Ukn6PTw82LdvH6+++mqDFXsBJk+ezMcff8yvv/6qt/1uvhMv79cLQ6pr6h6aF9ba+stzzSExaprExzCJkWFtPUZ3/CJvWVkZNjY2LFy4kPDwcAYPHkxNTQ2DBg1i6NChfPjhh3d1wELcrutz+q9zd3cnPz+fnJwcXnvtNZYtW0bXrl3p2LEju3fvbrSdmTNn8s033zBt2jS2bNnSIPE3NTXlrbfeYvXq1coLvQD9+/cnLy+P0aNH3/G1ODk5kZ6e3uj+fv36kZeXR9++fe+4LyGEEEK0fgaT/pdeeomff/4ZKysr1q1bxxtvvMFbb72Fvb09NjY292OMQrRYbW0txcXFDBgwgAEDBnDmzBmOHj3K0KFDadeuHZmZmfj6+gJQWFiInZ0dVlb//nYcEhKCTqdrNPH39vZm8+bNFBUV4eTkBNR/WQgMDMTV1RV3d3cATp48ydGjR1tcSWjs2LFs3LiRTZs2MWPGDFQqFZcuXWLXrl1Mnz6d2bNns2TJEpycnOjfvz9QXzmooqICDw+PFvW1W6elXKdt0TltycNcvUcIIYRoLoNJ/83TDH799dcm5xoL8TDQarVERERw+fJlVCoV9vb2zJ8/HxMTE1JSUoiNjWXz5s1otVo6depEYmJigzZmzZqlJP6ffPKJ3j4jIyPeffddZs2apWzr06cPKSkpJCQkEBUVpVeys6UsLCzYsmULq1atYtSoUVhaWiolOwGGDx9OdHQ00dHRXLx4ERMTE7p37878+fNb3Nc7H34kdfqb0NYfFwshhGgdDC7O5ezszM8//6z8/Pzzz/Pjjz/e84EJIe4PWZyraZL0GyYxMkxi1DSJj2ESI8PaeozueE5/XV0dOTk5yh3/2tpavZ8B3Nzc7sJQhRBCCCGEEPeCwaS/U6dOSsUegA4dOuj9rFKp+J//+Z97MzohWokJEyZQV6c/L/zZZ58lOjr6AY1ICCGEEG2JwaT/66+/vh/jEKJVy8jIeNBDEEIIIUQb9vCtLS+EEEIIIYS4q5q9OJf4t6SkJEJCQlCr1bfc7+Pjw44dO2jXrt1d79vT05OUlBS9evQtcenSJXbs2MEbb7yhbIuMjMTPz4/Bgwc3OD4iIgInJyemTp1622N+FJw7d46wsDC2bNkCNPyM16xZQ69evRgzZsxd77usrIxVq1bxz3/+E3Nzc6VKz4ULF9i/fz8AZ86c4fHHH1fKiqakpFBWVkZCQgJnzpzRqxTk4uLSov4TF8yjvLTsrl9Xa/GwluxsKSv7roTFrHjQwxBCCPGASNJ/G/76178SFBTUIOmvra3FxMSkyYWfHrRLly6xadMmvaR/+fLlD3BEDwc7Ozsl4YeGn/Hbb799T/q9evUqU6dOZcKECaxYsQIjIyMuXbrEvn37mD17NrNnzwZg2rRpBAUFKStgHz9+nJCQEFauXMmwYcMAOH36NMeOHWvxGHxURlSp5KFfo2p10Aris72k+EEPQQghxAMkSX8Lvf/++wAEBARgZGREt27dsLe357fffqOiooKMjAx69+7Nzz//jKWlJZ6envj4+HDo0CHKysoICgpS7prfeNzNPx8+fJiVK1ei0WgACA8P54UXXtAbS2lpKTExMRQXF1NVVcXYsWOVuvF5eXksX76cK1euYGFhQWRkJAMGDCA6OprLly/j4+ODubk5qampegnluXPnCA8Pp6Kigu7du+u9fFpZWUlcXBzHjx+nqqqKIUOGsHDhQoyNjRuNV1pamlLj3tTUlPXr19O5c2cyMzPZvHkzAE888QTR0dF06tSJjIwMsrKysLa25vjx49jZ2bFkyRJWrlzJ6dOncXJyIj4+HpVKRUREBCYmJhQVFVFSUoKLiwtRUVGo1WrOnz/P0qVLOXPmDAAzZszA19cXrVZLdHQ0OTk5qNVqLCwsSE1NpaioCH9/f3Jzcxt8xlu2bCE2NlZ54qHRaIiJiSE/Px+A8ePHExwcDNQn505OTvzyyy+Ulpby8ssvExYW1mh8srKy6NChAyEhIcq29u3bExAQ0PhfQmDjxo1MnDhRSfgBevToQY8ePZo8TwghhBBtkyT9LbR06VK2b99OamoqlpaWREREcPjwYbZu3YqFhcUtz7l27Ro7duygqKgIb29v/Pz8lET/Vi5evMjcuXNJSkrC2dmZuro6KisrGxy3YMEC5syZg4uLC9XV1QQGBvLMM8/g4uJCaGgosbGxuLu7k52dTWhoKF9++SVRUVH4+/s3+jQiJiYGFxcX5s6dy9mzZxk/frySWMbFxeHi4sLy5cvRarWEhYWRnp7O5MmTb9lWbm4u69evZ/v27djY2KDRaDAxMeFf//oX8fHxZGRkYGtrS2JiIh988IGyQFZ+fj579uyhS5cuhISEMH/+fLZu3Yq5uTl+fn5kZ2crK97++uuvpKamYmZmRnBwMDt37mTq1KnExMTQq1cvkpOTKS0tZcKECfTr14/a2lqys7P5/PPPMTIy4o8//jD4Gd9s3bp1aLVa9uzZg0aj4ZVXXqF3797KSrglJSVs27YNjUbD6NGjmThxIj179rxljI4cOXJbi90VFBTg5eXV4vNE26U2NcbGxvqetX8v224tJEZNk/gYJjEyTGLUOEn67wIvL69GE35AmQfevXt32rdvz++//46jo2Ojx//yyy84Ojri7OwMgLGxMY899pjeMVeuXOHHH3+kvLxc2abRaCgsLKRz586YmpoqibGbmxumpqacOnWqyS8bUJ+oL168GAAHBwe9NRi+/vpr8vLy+Pjjj4H6LzN2dnaNtnXgwAF8fHywsbEBUPrOzc3Fw8MDW1tboP6Ouo+Pj3Kes7MzXbp0AaBv375069YNa+v6/4n79OnD6dOnlWsbM2aM0q6vry9ffvklU6dOJTs7m4iICABsbW3x8PAgNzcXX19f6urqiIyMZMiQIcp0mZbIzs5m0aJFqFQqrKysGDt2LNnZ2UrS7+XlhZGREdbW1jg6OnLmzJlGk34Da+M16nbPE21XdU3dPVu0pq0viNMcEqOmSXwMkxgZ1tZjdMeLcwnDmkr4AczMzJQ/GxsbK1NmjI2NleStqqpKOaY5CZ1Wq0WlUpGWloapqanevmPHjqFSqRqcc6ttLaHT6Vi3bh0ODg533E5TY7k5Xo3Fz1C7N/ehUqmwtrZm79695Obmkp2dTXx8PLt27brj8d/4c3PHC+Dk5ER6enqL+gfo378/eXl5jB49usXnCiGEEKLtkaT/NlhaWlJZWWnwrrkhDg4O5Ofn4+bmxp49e5TtgwYNYvHixRw+fJhBgwYp03tuvNtvZWXFc889x4YNG3jzzTeB+mklJiYmPPXUU1RXV5OTk4Orqys5OTnU1tbSs2dPrly5wrVr15SXjm/m6upKeno6c+bM4ezZs3pTaTw9PdmwYQPLli3D2NiY8vJyNBpNo18CRo4cSWRkJAEBAXTu3BmNRoOpqSlubm5s3LiRsrIybGxs2Llzp9JHS+3fv5/p06ejVqv57LPPlDv3bm5u7Nixg9DQUMrKyjh48CCBgYGUl5djbGzM8OHDGTp0KAcOHODs2bPKU4frmvqM3d3dSUtLw9nZGY1Gw759+wgPD7+t8Y8dO5aNGzeyadMmZsyYgUql4tKlS+zatYvp06c3et7MmTMJDAzE1dVVid3Jkyc5evQoY8eObdEYduu0lOu0tzX+tqA1Ve8RQgjRdknSfxuCgoJ47bXXaNeuHd26dbvtdhYtWkRUVBQ2NjaMGDFC2d6hQweSkpJYsWIFV65cwcjIiAULFjRIjOPj44mLi8Pb2xuoT1SXL1+OjY0Na9eu1XuRd82aNajVatRqNd7e3nh7e/PYY4+Rmpqq12ZkZCTh4eHs37+fJ598kqFDh+qNd9WqVfj4+KBSqTA1NWXRokWNJv3PP/88wcHBvP7666hUKtRqNSkpKfTq1Yv58+cTFBQE1H/5ud2VaV1cXHjzzTcpLi7GxcVFeb9g8eLFREVFKbEJCwujV69eHDlyhCVLllBbW0tdXR3Dhw9n4MCBFBfrVza58TO+saoPwJw5c/jggw+UtsePH8/w4cNva/wWFhZs2bKFVatWMWrUKCwtLZWSnU3p06cPKSkpJCQkEBUVpVeys6Xe+fAjtFqZLtSYtv64WAghROug0snkYPGIaitrCNxrFy5UStLfBEn6DZMYGSYxaprExzCJkWFtPUaG5vQ/+sWnhRBCCCGEEE2S6T3ijs2aNYuSkhK9bfb29qSkpNzTfleseHRWF31QMRJCCCGEAEn6xV0giathEiMhhBBCPEiS9AvRxiUumEd5admDHsZDqzVV7wmLeXSejgkhhLi7JOm/DUlJSYSEhKBWq2+538fHhx07dtCuXbu73renpycpKSk8/fTTt3X+pUuX2LFjB2+88YayLTIyEj8/PwYPHtzg+Lbysuy5c+cICwtTKvXc/BmvWbOGXr16KQut3W0HDx4kODiY5ORkvdr7NTU1rFu3jn379mFiYoJWq8XDw4OhQ4cSHx8PwPnz59FqtUrZ0blz5/Liiy82u28flRFVKnm9p1G1OmgF8dleUmz4ICGEEK2WJP234a9//StBQUENkv7rte937979gEZm2KVLl9i0aZNe0r98+fIHOKKHg52dnV5pzps/47fffvue9p+eno6rqytpaWl6Sf/ChQupqqoiPT0dKysrampqyMjIwNnZWfl7lpSUxJUrV1iwYME9HaMQQgghHl2S9LfQ+++/D0BAQABGRkZ069YNe3t7fvvtNyoqKsjIyKB37978/PPPWFpa4unpiY+PD4cOHaKsrIygoCDlrvmNx9388+HDh1m5ciUajQaA8PBwXnjhBb2xlJaWEhMTQ3FxMVVVVYwdO5ZZs2YBkJeXp1enPzIykgEDBhAdHc3ly5fx8fHB3Nyc1NRUpk2bRlBQECNHjuTcuXOEh4dTUVFB9+7d9VaTraysJC4ujuPHj1NVVcWQIUNYuHAhxsbGjcYrLS2NTz75BABTU1PWr19P586dyczMZPPmzQA88cQTREdH06lTJzIyMsjKysLa2prjx49jZ2fHkiVLWLlyJadPn8bJyYn4+HhUKhURERGYmJhQVFRESUkJLi4uREVFoVarOX/+PEuXLuXMmTMAzJgxA19fX7RaLdHR0eTk5KBWq7GwsCA1NZWioiL8/f3Jzc1t8Blv2bKF2NhY5YmHRqMhJiaG/Px8oL5Of3BwMADTpk3DycmJX375hdLSUl5++WXCwsKa/DtVUVFBdnY2n3/+OWPHjlUWLfvtt9/46quvOHjwIFZWVkoMX3nllSbbE+JW1KbG2NhY37P272XbrYXEqGkSH8MkRoZJjBonSX8LLV26lO3bt5OamoqlpSUREREcPnyYrVu3YmFhcctzrl27xo4dOygqKsLb2xs/P78mV/O9ePEic+fOJSkpCWdnZ2VF3pstWLCAOXPm4OLiQnV1NYGBgTzzzDO4uLgQGhpKbGws7u7uZGdnExoaypdffklUVBT+/v6NPo2IiYnBxcWFuXPncvbsWcaPH8+wYcMAiIuLw8XFheXLl6PVagkLCyM9PV1ZEOtmubm5rF+/nu3bt2NjY4NGo8HExIR//etfxMfHk5GRga2tLYmJiXzwwQckJiYCkJ+fz549e+jSpQshISHMnz+frVu3Ym5ujp+fn94qwb/++iupqamYmZkRHBzMzp07mTp1KjExMfTq1Yvk5GRKS0uZMGEC/fr1o7a2VkmwjYyM+OOPPwx+xjdbt24dWq2WPXv2oNFoeOWVV+jduzceHh5A/crI27ZtQ6PRMHr0aCZOnEjPnj0b/bx3797NyJEj6dy5My+++CKZmZm88cYbFBQU0KNHD72VmIW4XdU1dfesfnVbr43dHBKjpkl8DJMYGdbWYyR1+u8DLy+vRhN+QJkH3r17d9q3b8/vv//eZHu//PILjo6OODs7A2BsbNwg8bty5Qo//vgjMTEx+Pj4MGnSJEpLSyksLOTUqVOYmpoqibGbmxumpqacOnXK4LXk5uYyadIkoH6lXDc3N2Xf119/zebNm/Hx8cHPz48jR4402eaBAwfw8fHBxsYGqF8x2MzMjNzcXDw8PJQ56AEBAWRnZyvnOTs706VLFwD69u3Lc889h7W1NSYmJvTp04fTp08rx44ZM0ZZxdbX15ecnBwAsrOzCQgIAMDW1hYPDw9yc3NxcHCgrq6OyMhIMjMzDcbjVrKzs5k0aRIqlQorKyvGjh2rN34vLy+MjIywtrbG0dFRedrQmIyMDPz8/ADw8/MjPT39tsYlhBBCCNEYudN/FzSV8AOYmZkpfzY2NlamzBgbG3N9QeSqqirlmOYskqzValGpVKSlpWFqaqq379ixY6hUqgbn3GpbS+h0OtatW4eDg8Mdt9PUWG6OV2PxM9TuzX2oVCqsra3Zu3cvubm5ZGdnEx8fz65du+54/Df+3NzxQv1TjcLCQiIjI5VtpaWl/Pzzz/Tr14/Tp0/zxx9/yN1+IYQQQtwRSfpvg6WlJZWVlU1O0WkOBwcH8vPzcXNzY8+ePcr2QYMGsXjxYg4fPsygQYOU6T03Jn5WVlY899xzbNiwgTfffBOon1ZiYmLCU089RXV1NTk5Obi6upKTk0NtbS09e/bkypUrXLt2TXnp+Gaurq6kp6czZ84czp49qzeVxtPTkw0bNrBs2TKMjY0pLy9Ho9E0+iVg5MiRREZGEhAQQOfOndFoNJiamuLm5sbGjRuVues7d+5U+mip/fv3M336dNRqNZ999hkjR44E6p9u7Nixg9DQUMrKyjh48CCBgYGUl5djbGzM8OHDGTp0KAcOHODs2bPKU4frmvqM3d3dSUtLw9nZGY1Gw759+wgPD7+t8aenpzNz5kzeffddZdv69etJT09n+fLleHp6EhUVxfLly7GysqKuro6tW7cyceLEO/77d91unZZynfautNUataaSnUIIIdouSfpvQ1BQEK+99hrt2rWjW7dut93OokWLiIqKwsbGhhEjRijbO3ToQFJSEitWrODKlSsYGRmxYMGCBolxfHw8cXFxeHt7A/WJ6vLly7GxsWHt2rV6L/KuWbMGtVqNWq3G29sbb29vHnvsMVJTU/XajIyMJDw8nP379/Pkk08ydOhQvfGuWrUKHx8fVCoVpqamLFq0qNGk//nnnyc4OJjXX38dlUqFWq0mJSWFXr16MX/+fIKCgoD6Lz/R0dG3FUMXFxfefPNNiouLcXFxUd4vWLx4MVFRUUpswsLC6NWrF0eOHGHJkiXU1tZSV1fH8OHDGThwIMXF+uUMb/yMb6zqAzBnzhw++OADpe3x48czfPjwFo+9qqqKffv28fe//11v+7hx4xg/fjyRkZGsWLGC5ORk/P39MTU1VUp2NlYu9na88+FHaLWGny61VW19jqgQQojWQaVrzlwSIR5CbWUNgXvtwoVKSfqbIEm/YRIjwyRGTZP4GCYxMqytx0he5BVCCCGEEKKNk+k94o7NmjWLkpISvW329vakpKTc035XrFhxT9u/mx5UjIQQQgghQJJ+cRdI4mqYxEgIIYQQD5Ik/eKR5+npqbykrNVqmT17NmPHjr2tdlJSUnj66ae5evUqc+fOxdbWlpiYGCIjI9m7dy9ffPEFXbvWV0G5+Z2CvLw8EhISOHPmDObm5nTs2JHQ0FBcXFya7Hfbtm1ER0eTmZlJ3759G+x/++23+fHHH/n222/1yrOeOnWK+Ph4jh07hrm5ORYWFgQHBzN69OgWXXfignmUl5a16Jy2pLVU77mRlX1XwmIenSdlQggh7pwk/aJVWLt2LU8//TQFBQUEBATg5ubG448/flttXb58meDgYPr3709kZKRSg9/GxoakpCTi4uIanHP8+HFCQkJYuXKlsoLx6dOnOXbsmMH+0tPTlVKpixcv1tt38eJFsrOzefLJJ/nmm2/485//DNTX8p86dSrvvfceycnJyrZDhw61+Hp9VEZUqeT1nkbV6qCVxWd7SbHhg4QQQrQqretfMtHm9evXD0tLS9LS0nj55Zfx8fHB29ubwsJCAE6ePMnMmTPx9/dn/PjxDVa/vXDhAtOmTcPV1ZXFixfrLboVEBDADz/8wIkTJxr0u3HjRiZOnKgk/AA9evTgpZdeanK8x48fp6KigtjYWLKysqiurtbb/9lnn+Hh4cGUKVP0xrpt2zaGDBmCr6+vss3W1lbvZyGEEEKI6+ROv2hVcnJyqKqqYv369WRlZWFvb091dTV1dXXU1tYSFhbGqlWrcHR0pLKyEn9/fwYOHIijoyMA77zzDlOmTOHtt99u0LaFhQUhISEkJCQod9evKygowMvLq8XjTUtLw9fXl27dutG3b1+++uorxowZo+zPyMhgwYIFDBo0iNjYWM6dO4ednR0FBQV6aygI0RJqU2NsbKzvapt3u73WSGLUNImPYRIjwyRGjZOkX7QKoaGhmJmZYWVlRVJSEtu3b2fhwoWMGjWKESNG4ODgwIkTJygsLGTevHnKeTU1NZw8eVJJ+j08PNi3bx+vvvpqg1V6ASZPnszHH3/Mr7/+qrf9dpa7qK6uJisrix07dgDg5+dHenq6kvQXFBRw6dIlXF1dUalUvPjii+zevZvg4ODb6k+I66pr6u5qLeu2Xhu7OSRGTZP4GCYxMqytx8hQnX5J+kWrcH1O/3Xu7u7k5+eTk5PDa6+9xrJly+jatSsdO3Zk9+7djbYzc+ZMvvnmG6ZNm8aWLVsaJP6mpqa89dZbrF69WnmhF6B///7k5eW16CXar7/+msrKSgIDAwHQarWcP3+ekpIS7O3tSUtL49KlS4waNQqo/5JgaWmpvG+Qn5/f7L6EEEII0bZJ0i9andraWoqLixkwYAADBgzgzJkzHD16lKFDh9KuXTsyMzOVue+FhYXY2dlhZfXvb8YhISHodLpGE39vb282b95MUVERTk5OQP2XhcDAQFxdXXF3dwfq3x84evRoo5WE0tPTiYqKYtKkScq2qKgodu3axcyZM9m7dy9paWn07NlT2e/l5cU//vEPpkyZgp+fH3v27MHb2xuAc+fOcfDgQSZPntyieO3WaSnXaVt0TlvSWqv3CCGEaFsk6RetjlarJSIigsuXL6NSqbC3t2f+/PmYmJiQkpJCbGwsmzdvRqvV0qlTJxITExu0MWvWLCXx/+STT/T2GRkZ8e677zJr1ixlW58+fUhJSSEhIYGoqCi9kp23cu7cOX766SdWr16tt93b25uFCxfSs2dPnnjiCb2EH2DcuHGkp6cTFxfHli1biI+PJzExEQsLCywsLHjjjTdaHK93PvwIrVamCzWmrT8uFkII0TqodDI5WIg27cKFSkn6myBJv2ESI8MkRk2T+BgmMTKsrcfI0Jx+KdkphBBCCCFEKyfTe4S4xyZMmEBdnf6c8GeffZbo6OgHNCIhhBBCtDWS9Atxj2VkZDzoIQghhBCijZPpPUIIIYQQQrRycqdftAqenp6o1WrUajVarZbZs2c3WirTUDspKSk8/fTTXL16lblz52Jra0tMTAyRkZHs3buXL774QqnRHxERgZOTE1OnTgUgLy+PhIQEzpw5o1fBx8XFpdE+T506RXx8PMeOHcPc3BwLCwuCg4P54Ycf+Pnnn4H60qLdu3fHzMwMqH96cOjQIZKTkykvL8fExAQHBwfmzZtH7969W3TNiQvmUV5a1uJYtRWtsWQn1JftDItZ8aCHIYQQ4j6RpF+0GtcX6CooKCAgIAA3Nzcef/zx22rr8uXLyiJYkZGRqFQqAGxsbEhKSiIuLq7BOcePHyckJISVK1cybNgwAE6fPs2xY8ca7ae0tJSpU6fy3nvvkZycrGw7dOgQS5cuVY7z9PTUW4Ds+++/JzIykuTkZJ555hmgfgXfsrKyFif9PiojqlTy0K9RtTpohfHZXlL8oIcghBDiPpKkX7Q6/fr1w9LSkrS0NHbt2qXc/U9MTMTR0ZGTJ08SGxtLRUUFNTU1TJ8+HX9/f+X8CxcuEB4ezsiRI3n77bf12g4ICGDr1q2cOHGCP/3pT3r7Nm7cyMSJE5WEH6BHjx706NGj0bFu27aNIUOGKIuFAdja2ur9fCvJycnMmTNHSfivX7cQQgghxK1I0i9anZycHKqqqli/fj1ZWVnY29tTXV1NXV0dtbW1hIWFsWrVKhwdHamsrMTf35+BAwfi6OgIwDvvvMOUKVMaJPwAFhYWhISEkJCQoNyZv66goAAvL68WjbWgoIChQ4e2+BoLCgqIiopq8XlCXKc2NcbGxvqutXc322qtJEZNk/gYJjEyTGLUOEn6RasRGhqKmZkZVlZWJCUlsX37dhYuXMioUaMYMWIEDg4OnDhxgsLCQubNm6ecV1NTw8mTJ5Wk38PDg3379vHqq69ia2vboJ/Jkyfz8ccf8+uvv+ptv5117mRtPPGgVNfU3bVFbNr6gjjNITFqmsTHMImRYW09RoYW55KkX7QaN855B3B3dyc/P5+cnBxee+01li1bRteuXenYsSO7d+9utJ2ZM2fyzTffMG3aNLZs2dIg8Tc1NeWtt95i9erVygu9AP379ycvL4/Ro0c3e8z9+/cnPz+/BVdZr1+/fuTl5dG3b98WnyuEEEKItkeSftEq1dbWUlxczIABAxgwYABnzpzh6NGjDB06lHbt2pGZmanMmy8sLMTOzg4rq39/Ow4JCUGn0zWa+Ht7e7N582aKiopwcnIC6r8sBAYG4urqiru7OwAnT57k6NGjjVYSmjJlCn5+fuzZswdvb28Azp07x8GDB5k8eXKj1zd79myWLFmCk5MT/fv3B+orB1VUVODh4dGiWO3WaSnXaVt0TlvSmqv3CCGEaDsk6RetklarJSIigsuXL6NSqbC3t2f+/PmYmJiQkpJCbGwsmzdvRqvV0qlTJxITExu0MWvWLCXx/+STT/T2GRkZ8e677zJr1ixlW58+fUhJSSEhIYGoqCi9kp2NsbOzY8uWLcTHx5OYmIiFhQUWFha88cYbTV7f8OHDiY6OJjo6mosXL2JiYkL37t2ZP39+ywIFvPPhR2i1Ms2oMW39cbEQQojWQaWTScVCtGkXLlRK0t8ESfoNkxgZJjFqmsTHMImRYW09Robm9Le+4tNCCCGEEEIIPTK9R4j7YMKECdTV6c8Lf/bZZ4mOjn5AIxJCCCFEWyJJvxD3QUZGxoMeghBCCCHaMEn6hWjjEhfMo7y07EEP46HVWqv33MjKvithMSse9DCEEELcQ5L0t0BSUhIhISGo1epb7vfx8WHHjh20a9furvft6elJSkqKXh36lrh06RI7duzQqwoTGRmJn58fgwcPbnB8REQETk5OTJ069bbH/Cg4d+4cYWFhbNmyBWj4Ga9Zs4ZevXoxZsyYu953WVkZq1at4p///Cfm5uaYmJgwZcoUpVTnd999R3JyMuXl5ZiYmODg4MC8efPo3bs3NTU1pKSkkJWVhYmJCSYmJvTo0YPQ0FD+9Kc/tWgcPiojqlTyek+janXQyuOzvaT4QQ9BCCHEPSZJfwv89a9/JSgoqEHSX1tbi4mJSZMLPj1oly5dYtOmTXpJ//Llyx/giB4O10tmXnfzZ/z222/fk36vXr3K1KlTmTBhAitWrMDIyIhLly6xb98+AL7//nsiIyNJTk7mmWeeAaCgoICysjJ69+7NwoULuXbtGp9++int27dHp9Oxf/9+CgsLW5z0CyGEEKL1k6S/md5//30AAgICMDIyolu3btjb2/Pbb79RUVFBRkYGvXv35ueff8bS0hJPT098fHw4dOgQZWVlBAUFKXfNbzzu5p8PHz7MypUr0Wg0AISHh/PCCy/ojaW0tJSYmBiKi4upqqpi7NixSr34vLw8li9fzpUrV7CwsCAyMpIBAwYQHR3N5cuX8fHxwdzcnNTUVKZNm0ZQUBAjR47k3LlzhIeHU1FRQffu3fVeOq2srCQuLo7jx49TVVXFkCFDWLhwIcbGxo3GKy0tTaltb2pqyvr16+ncuTOZmZls3rwZgCeeeILo6Gg6depERkYGWVlZWFtbc/z4cezs7FiyZAkrV67k9OnTODk5ER8fj0qlIiIiAhMTE4qKiigpKcHFxYWoqCjUajXnz59n6dKlnDlzBoAZM2bg6+uLVqslOjqanJwc1Go1FhYWpKamUlRUhL+/P7m5uQ0+4y1bthAbG6s88dBoNMTExCgr6I4fP57g4GAApk2bhpOTE7/88gulpaW8/PLLhIWFNRqfrKwsOnToQEhIiLKtffv2BAQEAJCcnMycOXOUhB/qV+EF+O233/jqq684ePAg7du3B0ClUvHyyy832p8QTVGbGmNjY31Hbdzp+W2BxKhpEh/DJEaGSYwaJ0l/My1dupTt27eTmpqKpaUlERERHD58mK1bt2JhYXHLc65du8aOHTsoKirC29sbPz8/JdG/lYsXLzJ37lySkpJwdnamrq6OysrKBsctWLCAOXPm4OLiQnV1NYGBgTzzzDO4uLgQGhpKbGws7u7uZGdnExoaypdffklUVBT+/v6NPo2IiYnBxcWFuXPncvbsWcaPH8+wYcMAiIuLw8XFheXLl6PVagkLCyM9Pb3RFWNzc3NZv34927dvx8bGBo1Gg4mJCf/617+Ij48nIyMDW1tbEhMT+eCDD5SFsfLz89mzZw9dunQhJCSE+fPns3XrVszNzfHz8yM7O1tZ6fbXX38lNTUVMzMzgoOD2blzJ1OnTiUmJoZevXqRnJxMaWkpEyZMoF+/ftTW1pKdnc3nn3+OkZERf/zxh8HP+Gbr1q1Dq9WyZ88eNBoNr7zyCr1791ZWwC0pKWHbtm1oNBpGjx7NxIkT6dmz5y1jdOTIEQYMGHDLfVB/Vz8qKqrRfT169OCxxx5r9HwhWqK6pu6Oalu39drYzSExaprExzCJkWFtPUZSp/8e8vLyajThB5R54N27d6d9+/b8/vvvTbb3yy+/4OjoiLOzMwDGxsYNErsrV67w448/EhMTg4+PD5MmTaK0tJTCwkJOnTqFqampkhi7ublhamrKqVOnDF5Lbm4ukyZNAsDBwQE3Nzdl39dff83mzZvx8fHBz8+PI0eONNnmgQMH8PHxwcbGBgBLS0vMzMzIzc3Fw8MDW1tboP6OenZ2tnKes7MzXbp0AaBv374899xzWFtbY2JiQp8+fTh9+rRy7JgxY7C0tMTExARfX19ycnIAyM7OVu6W29ra4uHhQW5uLg4ODtTV1REZGUlmZqbBeNxKdnY2kyZNQqVSYWVlxdixY/XG7+XlhZGREdbW1jg6OipPG27lbq6Jd+LECXx8fHjppZeIiYm5a+0KIYQQovWQO/13oKmEH8DMzEz5s7GxsTJlxtjYWEn6qqqqlGOakwhqtVpUKhVpaWmYmprq7Tt27BgqlarBObfa1hI6nY5169bh4OBwx+00NZab49VY/Ay1e3MfKpUKa2tr9u7dS25uLtnZ2cTHx7Nr1647Hv+NPzd3vABOTk6kp6c3ur9fv37k5eXRt2/fW+47ffo0ly5don379vzpT39i9+7dbN26lf/93/9tySUJIYQQoo2QpL8FLC0tqaysbHKKTnM4ODiQn5+Pm5sbe/bsUbYPGjSIxYsXc/jwYQYNGqRM77nxbr+VlRXPPfccGzZs4M033wTqp5WYmJjw1FNPUV1dTU5ODq6uruTk5FBbW0vPnj25cuUK165dU146vpmrqyvp6enMmTOHs2fP6k2l8fT0ZMOGDSxbtgxjY2PKy8vRaDSNfgkYOXIkkZGRBAQE0LlzZzQaDaampri5ubFx40bKysqwsbFh586dSh8ttX//fqZPn45areazzz5j5MiRQP3TjR07dhAaGkpZWRkHDx4kMDCQ8vJyjI2NGT58OEOHDuXAgQOcPXtWeepwXVOfsbu7O2lpaTg7O6PRaNi3bx/h4eG3Nf6xY8eyceNGNm3axIwZM1CpVFy6dIldu3Yxffp0Zs+ezZIlS3BycqJ///5A/fsaFRUVeHh4MGrUKBYvXszy5cuxtq6fv3jlypXbGstunZZynfa2zm0L2krJTiGEEK2bJP0tEBQUxGuvvUa7du3o1q3bbbezaNEioqKisLGxYcSIEcr2Dh06kJSUxIoVK7hy5QpGRkYsWLCgQWIcHx9PXFwc3t7eQH2iunz5cmxsbFi7dq3ei7xr1qxBrVajVqvx9vbG29ubxx57jNTUVL02IyMjCQ8PZ//+/Tz55JMMHTpUb7yrVq3Cx8cHlUqFqakpixYtajTpf/755wkODub1119HpVKhVqtJSUmhV69ezJ8/n6CgIKD+y8/trkjr4uLCm2++SXFxMS4uLsr7BYsXLyYqKkqJTVhYGL169eLIkSMsWbKE2tpa6urqGD58OAMHDqS4WL9U4Y2f8Y1VfQDmzJnDBx98oLQ9fvx4hg8fflvjt7CwYMuWLaxatYpRo0YpU5WmTJkCwPDhw4mOjiY6OpqLFy9iYmJC9+7dmT9/PlD/nsW6deuYOHEiJiYmtG/fHltbW+XF4pZ458OP0Grv3nSj1qatzxEVQgjROqh0d3NysRD3QVtZQ+B+uXChUpL+JkjSb5jEyDCJUdMkPoZJjAxr6zGSF3mFEEIIIYRo42R6j7hts2bNoqSkRG+bvb09KSkp97TfFStW3NP276YHFSMhhBBCiBtJ0i9umySuhkmMhBBCCPEwkKRftAqenp7KC8tarZbZs2czduzY22onJSWFp59+mqtXrzJ37lxsbW2JiYkhMjKSvXv38sUXX9C1a321k5vfL8jLyyMhIYEzZ85gbm5Ox44dCQ0NxcXFpcl+t23bRnR0NJmZmXplOisrK0lISODbb79VSoKOGzcOGxsbZcXjkpIS2rVrR8eOHQGIjo7m2WefbfY1Jy6YR3lpWfOD1Ma0heo9d+pWMbKy70pYzKPzVE4IIVo7SfpFq7F27VqefvppCgoKCAgIwM3Njccff/y22rp8+TLBwcH079+fyMhIpR6/jY0NSUlJxMXFNTjn+PHjhISEsHLlSmU149OnT3Ps2DGD/aWnpytlUxcvXgzUrwsQEhJC79692bt3L2q1mmvXrvHpp5/i7++Pv78/cOcvNvuojKhSyes9jarVgcSnabeI0faS4kYOFkII8SDIv2Si1enXrx+WlpakpaXx8ssv4+Pjg7e3N4WFhQCcPHmSmTNn4u/vz/jx4xssknXhwgWmTZuGq6srixcv1luAKyAggB9++IETJ0406Hfjxo1MnDhRSfgBevTowUsvvdTkeI8fP05FRQWxsbFkZWVRXV0N1K8A/H//938sXLgQtVoNQLt27Zg2bdrtBUYIIYQQbZbc6RetTk5ODlVVVaxfv56srCzs7e2prq6mrq6O2tpawsLCWLVqFY6OjlRWVuLv78/AgQNxdHQE4J133mHKlCm8/fbbDdq2sLAgJCSEhIQEkpOT9fYVFBTg5eXV4vGmpaXh6+tLt27d6Nu3L1999RVjxozhyJEj9OvXr8HKy0I8CtSmxtjYWD/oYTxUJB5Nk/gYJjEyTGLUOEn6RasRGhqKmZkZVlZWJCUlsX37dhYuXMioUaMYMWIEDg4OnDhxgsLCQubNm6ecV1NTw8mTJ5Wk38PDg3379vHqq682WLEXYPLkyXz88cf8+uuvettvZ8mL6upqsrKy2LFjBwB+fn6kp6czZsyY22pPiIdFdU1dm66XfbO2Xj/cEImPYRIjw9p6jAzV6ZekX7Qa1+f0X+fu7k5+fj45OTm89tprLFu2jK5du9KxY0d2797daDszZ87km2++Ydq0aWzZsqVB4m9qaspbb73F6tWrlRd6Afr3709eXh6jR49u9pi//vprKisrCQwMBECr1XL+/HlKSkpwcnJi+/bt1NbWYmIi/6sKIYQQ4vZJJiFapdraWoqLixkwYAADBgzgzJkzHD16lKFDh9KuXTsyMzPx9fUFoLCwEDs7O6ys/v3tOCQkBJ1O12ji7+3tzebNmykqKsLJyQmo/7IQGBiIq6sr7u7uQP37A0ePHm20klB6ejpRUVFMmjRJ2RYVFcWuXbuYPXs2Xbp0YcWKFYSHh6NWq6mqquK//uu/CA4Ovmux2q3TUq7T3rX2Whup3mNYY9V7hBBCPDwk6RetklarJSIigsuXL6NSqbC3t2f+/PmYmJiQkpJCbGwsmzdvRqvV0qlTJxITExu0MWvWLCXxv14e8zojIyPeffddZs2apWzr06cPKSkpJCQkEBUVpVey81bOnTvHTz/9xOrVq/W2e3t7s3DhQmbPns2mTZtYvXo1Y8aMwdzcXNl/N73z4UdotTKVqDFt/XFxc0iMhBDi4afSycRhIdq0CxcqJelvgiS0hkmMDJMYNU3iY5jEyLC2HiNDc/qlZKcQQgghhBCtnEzvEeI+mDBhAnV1+nOen332WaKjox/QiIQQQgjRlkjSL8R9kJGR8aCHIIQQQog2TKb3CCGEEEII0crJnX7xyPP09EStVqNWq9FqtcyePbvREpmG2klJSeHpp5/m6tWrzJ07F1tbW2JiYoiMjGTv3r188cUXSm3+iIgInJycmDp1KgB5eXkkJCRw5swZvco9Li4uTfa7bds2oqOjyczMpG/fvsr2yspKEhIS+PbbbzEzMwNg3LhxSsWg2+3vZokL5lFeWtaic9oSKdlpWGMxsrLvSljMigcwIiGEEDeTpF+0CtcX5iooKCAgIAA3Nzcef/zx22rr8uXLBAcH079/fyIjI1GpVADY2NiQlJREXFxcg3OOHz9OSEgIK1euZNiwYQCcPn2aY8eOGewvPT0dV1dX0tPTWbx4MVC/um9ISAi9e/dm7969qNVqrl27xqeffnrH/d3MR2VElUoe+jWqVgcSn6Y1EqPtJcUPYDBCCCFuRf4lE61Kv379sLS0JC0tjZdffhkfHx+8vb0pLCwE6hfLmjlzJv7+/owfP5709HS98y9cuMC0adNwdXVl8eLFSsIPEBAQwA8//MCJEyca9Ltx40YmTpyoJOAAPXr04KWXXmpyvMePH6eiooLY2FiysrKorq4GIDs7m//7v/9j4cKFqNVqANq1a8e0adPuqD8hhBBCtE1yp1+0Kjk5OVRVVbF+/XqysrKwt7enurqauro6amtrCQsLY9WqVTg6OlJZWYm/vz8DBw7E0dERgHfeeYcpU6bw9ttvN2jbwsKCkJAQEhISSE5O1ttXUFCAl5dXi8eblpaGr68v3bp1o2/fvnz11VeMGTOGI0eO0K9fP0xNTW953u32J8T9pDY1xsbG+kEP46EhsWiaxMcwiZFhEqPGSdIvWoXQ0FDMzMywsrIiKSmJ7du3s3DhQkaNGsWIESNwcHDgxIkTFBYWMm/ePOW8mpoaTp48qST9Hh4e7Nu3j1dffRVbW9sG/UyePJmPP/6YX3/9VW/77axxV11dTVZWFjt27ADAz8+P9PR0xowZY7A9WVNPPAqqa+ra9EI5N2rriwYZIvExTGJkWFuPkaHFuSTpF63C9Tn917m7u5Ofn09OTg6vvfYay5Yto2vXrnTs2JHdu3c32s7MmTP55ptvmDZtGlu2bGmQ+JuamvLWW2+xevVq5YVegP79+5OXl8fo0aObPeavv/6ayspKAgMDAdBqtZw/f56SkhKcnJzYvn07tbW1mJg0/N/0dvoTQgghRNslSb9odWpraykuLmbAgAEMGDCAM2fOcPToUYYOHUq7du3IzMzE19cXgMLCQuzs7LCy+vc345CQEHQ6XaOJv7e3N5s3b6aoqAgnJyeg/stCYGAgrq6uuLu7A/XvDxw9erTRSkLp6elERUUxadIkZVtUVBS7du1i9uzZdOnShRUrVhAeHo5araaqqor/+q//Ijg4+Lb6a8xunZZynbZF57QlUr3HsKaq9wghhHg4SNIvWh2tVktERASXL19GpVJhb2/P/PnzMTExISUlhdjYWDZv3oxWq6VTp04kJiY2aGPWrFlK4v/JJ5/o7TMyMuLdd99VSmcC9OnTh5SUFBISEoiKitIroXkr586d46effmL16tV62729vVm4cCGzZ89m06ZNrF69mjFjxmBubq7sv53+mvLOhx+h1cp0oca09cfFzSExEkKIh59KJ5ODhWjTLlyolKS/CZLQGiYxMkxi1DSJj2ESI8PaeowMzemXkp1CCCGEEEK0cjK9R4h7bMKECdTV6c93fvbZZ4mOjn5AIxJCCCFEWyNJvxD3WEZGxoMeghBCCCHaOEn6hWjjEhfMo7y07EEP46El1XsMMxQjK/uuhMWsuI8jEkIIcTNJ+sUjzdPTE7VajVqtRqvVMnv27BaXrLzeTkpKCk8//TRXr15l7ty52NraEhMTQ2RkJHv37uWLL75QavNHRETg5OTE1KlTAcjLyyMhIYEzZ87oVdJxcXFptM9Tp04RHx/PsWPHMDc3x8LCguDgYKX2/meffcbf/vY3rl27hkqlok+fPrz33nt07dqVyspKEhMTOXjwIO3atUOlUtG3b1/effddunTp0qJr91EZUaWS13saVasDiU/TDMRoe0nxfRyMEEKIW5GkXzzyri/MVVBQQEBAAG5ubjz++OO31dbly5cJDg6mf//+REZGolKpALCxsSEpKYm4uLgG5xw/fpyQkBBWrlzJsGHDADh9+jTHjh1rtJ/S0lKmTp3Ke++9R3JysrLt0KFDAHz66ad8/PHHrFu3jp49ewKQm5vL+fPnsbe3JyQkhF69epGVlYWZmRl1dXXs3LmToqKiFif9QgghhGj9JOkXrUa/fv2wtLQkLS2NXbt2KXf/ExMTcXR05OTJk8TGxlJRUUFNTQ3Tp0/H399fOf/ChQuEh4czcuRI3n77bb22AwIC2Lp1KydOnOBPf/qT3r6NGzcyceJEJeEH6NGjBz169Gh0rNu2bWPIkCHKImEAtra2ys9//etfiYmJURJ+gCFDhgBw6NAh/u///o///M//xNTUFABjY2NeffXVFsVLCCGEEG2HJP2i1cjJyaGqqor169eTlZWFvb091dXV1NXVUVtbS1hYGKtWrcLR0ZHKykr8/f0ZOHAgjo6OALzzzjtMmTKlQcIPYGFhQUhICAkJCcqd+esKCgrw8vJq0VgLCgoYOnToLfdduHCB33//nWefffaW+48cOUK/fv2UhF+Ih53a1BgbG+sHPYwHTmLQNImPYRIjwyRGjZOkXzzyQkNDMTMzw8rKiqSkJLZv387ChQsZNWoUI0aMwMHBgRMnTlBYWMi8efOU82pqajh58qSS9Ht4eLBv3z5effVVbG1tG/QzefJkPv74Y3799Ve97bezvl1T57S0vUOHDvHhhx+i0Wh49dVXmTFjRovHI8S9VF1T16YXzAFZNMgQiY9hEiPD2nqMDC3OJUm/eORdn9N/nbu7O/n5+eTk5PDaa6+xbNkyunbtSseOHdm9e3ej7cycOZNvvvmGadOmsWXLlgaJv6mpKW+99RarV69WXugF6N+/P3l5ecoLuM3Rv39/8vPzb7mvc+fO2NnZkZeXxwsvvNBgf79+/di+fTu1tbWYmJjg7u7O7t27+fDDD7ly5UqzxyCEEEKItkOSftGq1NbWUlxczIABAxgwYABnzpzh6NGjDB06lHbt2pGZmanMmy8sLMTOzg4rq39/Kw4JCUGn0zWa+Ht7e7N582aKiopwcnIC6r8sBAYG4urqiru7OwAnT57k6NGjjVYSmjJlCn5+fuzZswdvb28Azp07x8GDB5k8eTJz5sxhxYoVrFu3jieeeAKA7777jvbt2+Pu7o6dnR1xcXGEh4djZmYGwNWrV7GwsGhxzHbrtJTrtC0+r62Qkp2GNadkpxBCiAdLkn7Rqmi1WiIiIrh8+TIqlQp7e3vmz5+PiYkJKSkpxMbGsnnzZrRaLZ06dSIxMbFBG7NmzVIS/08++URvn5GREe+++y6zZs1StvXp04eUlBQSEhKIiorSK9nZGDs7O7Zs2UJ8fDyJiYlYWFhgYWHBG2+8AdS/ONyuXTtCQ0O5du0aRkZGSslOlUrFpk2bSEhIYOzYsZibm2NpacmTTz7J+PHjWxyzdz78CK225VOU2oq2/ri4OSRGQgjx8FPpbmdCshCi1bhwoVKS/iZIQmuYxMgwiVHTJD6GSYwMa+sxMjSnX1acEUIIIYQQopWT6T1C3EMTJkygrk5/rvOzzz5LdHT0AxqREEIIIdoiSfqFuIcyMjIe9BCEEEIIISTpF6KtS1wwj/LSsgc9jIeWVO8xrDkxsrLvSljMivs0IiGEEDeTpL+FkpKSCAkJQa1W33K/j48PO3bsoF27dne9b09PT1JSUvRq0rfEpUuX2LFjh1IhBiAyMhI/Pz8GDx7c4PiIiAicnJyYOnXqbY/5UXDu3DnCwsLYsmUL0PAzXrNmDb169WLMmDH3pP+DBw8SHBxMcnLyLWv9r1y5kk8++YRvv/2Wxx9/XNleVlbGqlWr+Oc//4m5uTkmJiZMmTKFyZMnt6h/H5URVSp5vadRtTqQ+DStGTHaXlJ8nwYjhBDiVuRfshb661//Sk1NTYPttbW1AOzevfueJPx3w6VLl9i0aZPetuXLl98y4W9LrpfPvO7mz/jtt9++Zwk/QHp6Oq6urqSlpTXYV1tby2effYazs7PewmJXr15l6tSpODo68t///d9kZWXxySefoNVKvX0hhBBCNCR3+lvg/fffB+prqBsZGdGtWzfs7e357bffqKioICMjg969e/Pzzz9jaWmJp6cnPj4+HDp0iLKyMoKCgpS75jced/PPhw8fZuXKlWg0GgDCw8MbrMxaWlpKTEwMxcXFVFVVMXbsWKV2fF5eHsuXL+fKlStYWFgQGRnJgAEDiI6O5vLly/j4+GBubk5qairTpk0jKCiIkSNHcu7cOcLDw6moqKB79+56L6BWVlYSFxfH8ePHqaqqYsiQISxcuBBjY+NG45WWlqbUuTc1NWX9+vV07tyZzMxMNm/eDMATTzxBdHQ0nTp1IiMjg6ysLKytrTl+/Dh2dnYsWbKElStXcvr0aZycnIiPj0elUhEREYGJiQlFRUWUlJTg4uJCVFQUarWa8+fPs3TpUs6cOQPAjBkz8PX1RavVEh0dTU5ODmq1GgsLC1JTUykqKsLf35/c3NwGn/GWLVuIjY1VnnhoNBpiYmKU1XTHjx9PcHAwANOmTcPJyYlffvmF0tJSXn75ZcLCwpr8O1VRUUF2djaff/45Y8eOpaysDBsbG2X/wYMHeeKJJwgNDWXZsmW8/vrrAGRlZdGhQwdCQkKUY9u3b09AQECT/QnxoKhNjbGxsX7Qw3ig2vr1GyLxMUxiZJjEqHGS9LfA0qVL2b59O6mpqVhaWhIREcHhw4fZunVroyuhXrt2jR07dlBUVIS3tzd+fn5Kon8rFy9eZO7cuSQlJeHs7ExdXR2VlZUNjluwYAFz5szBxcWF6upqAgMDeeaZZ3BxcSE0NJTY2Fjc3d3Jzs4mNDSUL7/8kqioKPz9/fXuGN8oJiYGFxcX5s6dy9mzZxk/fjzDhg0DIC4uDhcXF5YvX45WqyUsLIz09PRGp5Lk5uayfv16tm/fjo2NDRqNBhMTE/71r38RHx9PRkYGtra2JCYm8sEHHyiLZOXn57Nnzx66dOlCSEgI8+fPZ+vWrZibm+Pn50d2dray6u2vv/5KamoqZmZmBAcHs3PnTqZOnUpMTAy9evUiOTmZ0tJSJkyYQL9+/aitrVUSbCMjI/744w+Dn/HN1q1bh1arZc+ePWg0Gl555RV69+6Nh4cHACUlJWzbtg2NRsPo0aOZOHEiPXv2bPTz3r17NyNHjqRz5868+OKLZGZm6k2/Sk9PZ8KECQwePJiamhry8vIYMGAAR44cYcCAAY22K8TDprqmrk3Xz27r9cMNkfgYJjEyrK3HSOr032NeXl6NJvyAMi2ke/futG/fnt9//73J9n755RccHR1xdnYGwNjYmMcee0zvmCtXrvDjjz8SExODj48PkyZNorS0lMLCQk6dOoWpqamSGLu5uWFqasqpU6cMXktubi6TJk0CwMHBATc3N2Xf119/zebNm/Hx8cHPz48jR4402eaBAwfw8fFR7lpbWlpiZmZGbm4uHh4e2NraAvV31LOzs5XznJ2d6dKlCwB9+/blueeew9raGhMTE/r06cPp06eVY8eMGYOlpSUmJib4+vqSk5MDQHZ2tnLH29bWFg8PD3Jzc3FwcKCuro7IyEgyMzMNxuNWsrOzmTRpEiqVCisrK8aOHas3fi8vL4yMjLC2tsbR0VF52tCYjIwM/Pz8APDz8yM9PV3Zd+HCBX788Ue8vLwA8PX1VaYAyZp6QgghhGgJudN/h5pK+AHMzMyUPxsbGytTZoyNjZXEraqqSjmmOcmcVqtFpVKRlpaGqamp3r5jx46hUqkanHOrbS2h0+lYt24dDg4Od9xOU2O5OV6Nxc9Quzf3oVKpsLa2Zu/eveTm5pKdnU18fDy7du264/Hf+HNzxwv1TzUKCwuJjIxUtpWWlvLzzz/j7OxMZmYmdXV1jB8/Hqif33/16lUWLVqEk5OT3heEO7Fbp6VcJ+8CNEaq9xjW3Oo9QgghHhxJ+lvI0tKSysrKJqfoNIeDgwP5+fm4ubmxZ88eZfugQYNYvHgxhw8fZtCgQcr0nhvv9ltZWfHcc8+xYcMG3nzzTaB+WomJiQlPPfUU1dXV5OTk4OrqSk5ODrW1tfTs2ZMrV65w7do1amtrMTFp+NG7urqSnp7OnDlzOHv2rN5UGk9PTzZs2MCyZcswNjamvLwcjUbT6JeAkSNHEhkZSUBAAJ07d0aj0WBqaoqbmxsbN25U5q7v3LlT6aOl9u/fz/Tp01Gr1Xz22WeMHDkSqH+6sWPHDkJDQykrK+PgwYMEBgZSXl6OsbExw4cPZ+jQoRw4cICzZ88qTx2ua+ozdnd3Jy0tDWdnZzQaDfv27SM8PPy2xp+ens7MmTN59913lW3r168nPT0dZ2dnMjIySE5O1ovPjBkz+OKLLxg7diwbN25k06ZNzJgxA5VKxaVLl9i1axfTp09v0Tje+fAjtFp5ctCYtv64uDkkRkII8fCTpL+FgoKCeO2112jXrh3dunW77XYWLVpEVFQUNjY2jBgxQtneoUMHkpKSWLFiBVeuXMHIyIgFCxY0SIzj4+OJi4vD29sbqE9Uly9fjo2NDWvXrtV7kXfNmjWo1WrUajXe3t54e3vz2GOPkZqaqtdmZGQk4eHh7N+/nyeffJKhQ4fqjXfVqlX4+PigUqkwNTVl0aJFjSb9zz//PMHBwbz++uuoVCrUajUpKSn06tWL+fPnExQUBNR/+bnd1WldXFx48803KS4uxsXFRXm/YPHixURFRSmxCQsLo1evXhw5coQlS5ZQW1tLXV0dw4cPZ+DAgRQX65cSvPEzvrGqD8CcOXP44IMPlLbHjx/P8OHDWzz2qqoq9u3bx9///ne97ePGjWP8+PH4+Pjwxx9/4Orqqrff29ub9PR0fHx82LJlC6tWrWLUqFHKNKcpU6a0eCxCCCGEaP1UOpkcLB5BbWUNgfvhwoVKudPfBLmLbZjEyDCJUdMkPoZJjAxr6zGSF3mFEEIIIYRo42R6j7gjs2bNoqSkRG+bvb09KSkp97TfFStW3NP276YHFSMhhBBCiOsk6Rd3RBJXwyRGQgghhHjQJOlvoaSkJEJCQlCr1bfc7+Pjw44dO2jXrt1d79vT05OUlBSefvrp2zr/0qVL7NixQ2/xp8jISPz8/Bg8eHCD49vKvPlz584RFhamvLR782e8Zs0aevXqpay5cLcdPHiQ4OBgkpOTGT16tLK9pqaGdevWsW/fPkxMTNBqtXh4eDB//nxl7YX4+HiOHTuGubk5FhYWBAcH67XRHIkL5lFeWna3L6vVkJKdhrUkRlb2XQmLeXSe1AkhRGshSX8L/fWvfyUoKKhB0n+9DGZjq90+DC5dusSmTZv0kv7ly5c/wBE9HOzs7PSq9Nz8Gb/99tv3tP/09HRcXV1JS0vTS9gXLlxIVVUV6enpWFlZUVNTQ0ZGBtXV1VRUVDB16lTee+89kpOTgfoa/4cOHWpx/z4qI6pU8npPo2p1IPFpWgtitL2k2PBBQggh7jpJ+lvg/fffB+pXkTUyMqJbt27Y29vz22+/UVFRQUZGBr179+bnn3/G0tIST09PfHx8OHToEGVlZQQFBSl3zW887uafDx8+zMqVK9FoNACEh4fzwgsv6I2ltLSUmJgYiouLqaqqYuzYscyaNQuAvLw8vZKdkZGRDBgwgOjoaC5fvoyPjw/m5uakpqYybdo0goKCGDlyJOfOnSM8PJyKigq6d++ut7BUZWUlcXFxHD9+nKqqKoYMGcLChQsxNjZuNF5paWl88sknAJiamrJ+/Xo6d+5MZmYmmzdvBuCJJ54gOjqaTp06kZGRQVZWFtbW1hw/fhw7OzuWLFnCypUrOX36NE5OTsTHx6NSqYiIiMDExISioiJKSkpwcXEhKioKtVrN+fPnWbp0qbIa7owZM/D19UWr1RIdHU1OTg5qtRoLCwtSU1MpKirC39+f3NzcBp/xli1biI2NVZ54aDQaYmJiyM/PB+pLdgYHBwMwbdo0nJyc+OWXXygtLeXll18mLCysyb9TFRUVZGdn8/nnnzN27Fhl/YLffvuNr776ioMHD2JlZaXE8JVXXgFgw4YNDBkyBF9fX6UtW1tbvZ+FEEIIIa6TpL8Fli5dyvbt20lNTcXS0pKIiAgOHz7M1q1bG12Z99q1a+zYsYOioiK8vb3x8/NrcmGvixcvMnfuXJKSknB2dlYW57rZggULmDNnDi4uLlRXVxMYGMgzzzyDi4sLoaGhxMbG4u7uTnZ2NqGhoXz55ZdERUXh7+/f6NOImJgYXFxcmDt3LmfPnmX8+PEMGzYMgLi4OFxcXFi+fDlarZawsDDS09OV2vg3y83NZf369Wzfvh0bGxs0Gg0mJib861//Ij4+noyMDGxtbUlMTOSDDz4gMTERqF+lds+ePXTp0oWQkBDmz5/P1q1bMTc3x8/PT2/BsF9//ZXU1FTMzMwIDg5m586dTJ06lZiYGHr16kVycjKlpaVMmDCBfv36UVtbqyTYRkZG/PHHHwY/45utW7cOrVbLnj170Gg0vPLKK/Tu3RsPDw+gfpG0bdu2odFoGD16NBMnTqRnz56Nft67d+9m5MiRdO7cmRdffJHMzEzeeOMNCgoK6NGjh96ibDcqKCjQW0dBiEeF2tQYGxvrBz2MB6KtXndzSXwMkxgZJjFqnCT9d8jLy6vRhB9Q5oF3796d9u3b8/vvv+Po6Njo8b/88guOjo44OzsDYGxs3CDxu3LlCj/++CPl5eXKNo1GQ2FhIZ07d8bU1FRJjN3c3JT534ZWEc7NzWXx4sVA/aJZbm5uyr6vv/6avLw8Pv74Y6D+y4ydnV2jbR04cAAfHx9sbGwAlL5zc3Px8PBQVsENCAjAx8dHOc/Z2ZkuXboA0LdvX7p164a1df3/wH369OH06dPKtY0ZM0Zp19fXly+//JKpU6eSnZ1NREQEUH/328PDg9zcXHx9famrqyMyMpIhQ4YoK/i2RHZ2NosWLUKlUmFlZcXYsWPJzs5Wkn4vLy+MjIywtrbG0dGRM2fONJn0Z2RksHDhQgD8/PyIjIzUm37VGFleQzyqqmvq2mQd7bZeP9wQiY9hEiPD2nqMDNXpl6T/DjWV8AOYmZkpfzY2NlamzBgbGyuJW1VVlXJMc5I5rVaLSqUiLS0NU1NTvX3Hjh1DpVI1OOdW21pCp9Oxbt26RlfgbUk7TY3l5ng1Fj9D7d7ch0qlwtramr1795Kbm0t2djbx8fHs2rXrjsd/48/NHS/UP9UoLCwkMjJS2VZaWsrPP/9Mv379OH36NH/88cct7/b3799fmWIkhBBCCGGIJP0tZGlpSWVlpcG75oY4ODiQn5+Pm5sbe/bsUbYPGjSIxYsXc/jwYQYNGqRM77kx8bOysuK5555jw4YNvPnmm0D9tBITExOeeuopqqurycnJwdXVlZycHGpra+nZsydXrlzh2rVrykvHN3N1dSU9PZ05c+Zw9uxZvak0np6ebNiwgWXLlmFsbEx5eTkajabRLwEjR44kMjKSgIAAOnfujEajwdTUFDc3NzZu3KjMXd+5c6fSR0vt37+f6dOno1ar+eyzz5Q7925ubuzYsYPQ0FDKyso4ePAggYGBlJeXY2xszPDhwxk6dCgHDhzg7NmzylOH65r6jN3d3UlLS8PZ2RmNRsO+ffsIDw+/rfGnp6czc+ZM3n33XWXb+vXrSU9PZ/ny5Xh6ehIVFcXy5cuxsrKirq6OrVu3MnHiRKZMmYKfnx979uzB29sbqK9CdPDgwUanXDVmt05LuU57W9fQFkj1HsNaWr1HCCHE/SdJfwsFBQXx2muv0a5dO7p163bb7SxatIioqChsbGwYMWKEsr1Dhw4kJSWxYsUKrly5gpGREQsWLGiQGMfHxxMXF6ckfJaWlixfvhwbGxvWrl2r9yLvmjVrUKvVqNVqvL298fb25rHHHiM1NVWvzcjISMLDw9m/fz9PPvmk3pzxRYsWsWrVKnx8fFCpVJiamrJo0aJGk/7nn3+e4OBgXn/9dVQqFWq1mpSUFHr16sX8+fMJCgoC6r/8REdH31YMXVxcePPNNykuLsbFxUVJdhcvXkxUVJQSm7CwMHr16sWRI0dYsmQJtbW11NXVMXz4cAYOHEhxsX41kRs/4xur+gDMmTOHDz74QGl7/PjxDB8+vMVjr6qqYt++ffz973/X2z5u3DjGjx9PZGQkK1asIDk5GX9/f0xNTZWSnWq1Wqk4FB8fT2JiIhYWFlhYWDRratDN3vnwI7RamS7UmLb+uLg5JEZCCPHwU+lkcrB4BLWVNQTuhwsXKiXpb4IktIZJjAyTGDVN4mOYxMiwth4jQ3P6pfi0EEIIIYQQrZxM7xF3ZNasWZSUlOhts7e3JyUl5Z72u2LFo7Oi54OKkRBCCCHEdZL0izsiiathEiMhhBBCPGiS9AvRxiUumEd5admDHsZDS6r3GHa3Y2Rl35WwmEfnaZ4QQjwKJOkXjzRPT0+lMpFWq2X27NmMHTv2ttpJSUnh6aef5urVq8ydOxdbW1tiYmKIjIxk7969fPHFF3TtWl9u8OYXifPy8khISODMmTOYm5vTsWNHQkNDcXFxabTP3377jY8++oj//d//5bHHHqOurg4PDw/eeecdjI2NAairq2PEiBE888wzrFu3Tu/87777juTkZMrLyzExMcHBwYF58+bRu3fvFl27j8qIKpW83tOoWh1IfJp2l2O0vaTY8EFCCCFaRJJ+8chbu3YtTz/9NAUFBQQEBODm5sbjjz9+W21dvnyZ4OBg+vfvT2RkpLLwlo2NDUlJScTFxTU45/jx44SEhLBy5UqGDRsGwOnTpzl27Fij/ZSWlvKXv/yF+fPns3btWgAqKytJTEykuroac3NzAL799ltsbW355z//yfnz5+ncuTMA33//PZGRkSQnJ/PMM88AUFBQQFlZWYuTfiGEEEK0fnL7SrQa/fr1w9LSkrS0NF5++WV8fHzw9vamsLAQgJMnTzJz5kz8/f0ZP3486enpeudfuHCBadOm4erqyuLFi/VW2g0ICOCHH37gxIkTDfrduHEjEydOVBJ+gB49evDSSy81OtZt27YxZMgQJkyYoGyzsrJi8eLFSsIP9Qt4BQQEMHr0aHbv3q1sT05OZs6cOUrCf/36X3jhheaESgghhBBtjNzpF61GTk4OVVVVrF+/nqysLOzt7amurqauro7a2lrCwsJYtWoVjo6OVFZW4u/vz8CBA3F0dATgnXfeYcqUKbz99tsN2rawsCAkJISEhASSk5P19hUUFODl5dWisRYUFOgtfnYr5eXl5ObmsmLFCp566imWLFnCjBkzlPOjoqJa1KcQjwq1qTE2NtYPehh3XWu8prtJ4mOYxMgwiVHjJOkXj7zQ0FDMzMywsrIiKSmJ7du3s3DhQkaNGsWIESNwcHDgxIkTFBYWMm/ePOW8mpoaTp48qST9Hh4e7Nu3j1dffRVbW9sG/UyePJmPP/6YX3/9VW/73VjfbsOGDezdu5eLFy+SkJCAs7Mzu3fvZuTIkVhZWfHcc89RV1fHL7/8wsCBA++4PyEeZtU1da1ugZ22vmiQIRIfwyRGhrX1GBlanEuSfvHIuz6n/zp3d3fy8/PJycnhtddeY9myZXTt2pWOHTvqTZG52cyZM/nmm2+YNm0aW7ZsaZD4m5qa8tZbb7F69WrlhV6A/v37k5eXx+jRo5s95n79+pGfn6/8HBwcTHBwMBMmTKCmpgaAjIwMysvL8fT0BOrfN0hPT2fgwIH069ePvLw8+vbt2+w+hRBCCNF2SdIvWpXa2lqKi4sZMGAAAwYM4MyZMxw9epShQ4fSrl07MjMz8fX1BaCwsBA7OzusrP79rTgkJASdTtdo4u/t7c3mzZspKirCyckJqP+yEBgYiKurK+7u7kD9+wNHjx5ttJLQlClT8PPz0xtPXV2dkvDn5eVx+fJlvv/+e+XdgnPnzjFu3DgWLVrE7NmzWbJkCU5OTvTv3185p6KiAg8PjxbFbLdOS7lO26Jz2hIp2WnYvSjZKYQQ4u6SpF+0KlqtloiICC5fvoxKpcLe3p758+djYmJCSkoKsbGxbN68Ga1WS6dOnUhMTGzQxqxZs5TE/5NPPtHbZ2RkxLvvvsusWbOUbX369CElJYWEhASioqL0SnY2xs7Ojq1bt/LRRx+xdu1aOnTogFqtZvTo0fTv359Vq1YxduxYvZeJ7ezs6Nu3L1988QW+vr5ER0cTHR3NxYsXMTExoXv37syfP7/FMXvnw4/Qau98ilJr1dYfFzeHxEgIIR5+Kt3dmJAshHhkXbhQKUl/EyShNUxiZJjEqGkSH8MkRoa19RgZmtMvJTuFEEIIIYRo5WR6jxD30IQJE6ir05/r/OyzzxIdHf2ARiSEEEKItkiSfiHuoYyMjAc9BCGEEEIISfqFaOsSF8yjvLTsQQ/joSXVewyTGBkmMWqaxMcwidG/Wdl3JSxmxYMexiNHkn7xSPP09EStVqNWq9FqtcyePbvRMpmG2klJSeHpp5/m6tWrzJ07F1tbW2JiYoiMjGTv3r188cUXSn3+iIgInJycmDp1KlBfLjMhIYEzZ87oVe9xcXFptM9Tp04RHx/PsWPHMDc3x8LCguDgYKXef1ZWFn/729+orKzEysoKCwsLZsyYwciRI5U2Dh48SHBwMMnJyS1aJ+BGPiojqlTyek+janUg8WmaxMgwiVHTJD6GSYwU20uKH/QQHkmS9ItH3vXFuQoKCggICMDNzY3HH3/8ttq6fPkywcHB9O/fn8jISKVkpo2NDUlJScTFxTU45/jx44SEhLBy5UqGDRsGwOnTpzl27Fij/ZSWljJ16lTee+89kpOTlW2HDh0C4NNPP+Xjjz8mKSlJWTH4yJEj/PDDD3pJf3p6Oq6urqSlpd120i+EEEKI1k+SftFq9OvXD0tLS9LS0ti1a5dy9z8xMRFHR0dOnjxJbGwsFRUV1NTUMH36dPz9/ZXzL1y4QHh4OCNHjuTtt9/WazsgIICtW7dy4sQJ/vSnP+nt27hxIxMnTlQSfoAePXrQo0ePRse6bds2hgwZoizMBWBra6v8nJSUxPLly5WEH+pX/r2+EBdARUUF2dnZfP7554wdO5aysjJsbGxaFDMhhBDiUaM2NcbGxvqW+xrbLiTpF61ITk4OVVVVrF+/nqysLOzt7amurqauro7a2lrCwsJYtWoVjo6OVFZW4u/vz8CBA5XE+p133mHKlCkNEn4ACwsLQkJCSEhIUO7MX1dQUICXl1eLxlpQUMDQoUNvue/ChQucO3eOZ599tsk2du/ezciRI+ncuTMvvvgimZmZvPHGGy0ahxBCCPGoqa6pu2U9fqnTL3X6RSsXGhqKj48PSUlJJCUl4erqysKFC9myZQvnzp3D3Nyc3377jcLCQubNm4ePjw9/+ctfqKmp4eTJk0o7Hh4e7Nu3j9LS0lv2M3nyZI4fP86vv/6qt/121rdr6TkBAQGMGzdO78lERkYGfn5+APj5+ZGent7icQghhBCibZA7/eKRd31O/3Xu7u7k5+eTk5PDa6+9xrJly+jatSsdO3Zk9+7djbYzc+ZMvvnmG6ZNm8aWLVuwtbXV229qaspbb73F6tWrlRd6oX7aTV5eXovm1Pfv35/8/Pxb7uvUqRN2dnbk5+crTwNSU1P517/+xaxZswDIz8+nsLCQyMhI5bzS0lJ+/vlnnJ2dmz0OgN06LeU6bYvOaUukYoZhEiPDJEZNk/gYJjH6Nyv7roYPEg1I0i9aldraWoqLixkwYAADBgzgzJkzHD16lKFDh9KuXTsyMzOVefOFhYXY2dlhZfXvR2EhISHodLpGE39vb282b95MUVERTk5OQP2XhcDAQFxdXXF3dwfg5MmTHD16tNFKQlOmTMHPz489e/bg7e0NwLlz5zh48CCTJ0/mzTffJC4ujqSkJJ588kkArl69qpyfnp7OzJkzeffdd5Vt69evJz09vcVJ/zsffoRW2/KnFW1FW39c3BwSI8MkRk2T+BgmMRJ3SpJ+0apotVoiIiK4fPkyKpUKe3t75s+fj4mJCSkpKcTGxrJ582a0Wi2dOnUiMTGxQRuzZs1SEv9PPvlEb5+RkRHvvvuucscdoE+fPqSkpJCQkEBUVJReyc7G2NnZsWXLFuLj40lMTMTCwgILCwtlTv4rr7xCu3btmD9/PpWVlTz++OOYm5sTERFBVVUV+/bt4+9//7tem+PGjWP8+PFERkZiYWFxB1EUQgghRGuj0t3OhGQhRKtx4UKl3OlvgtxdM0xiZJjEqGkSH8MkRoa19RjJi7xCCCGEEEK0cTK9R4h7aMKECdTV6b949eyzzxIdHf2ARiSEEEKItkiSfiHuoYyMjAc9BCGEEEIISfpbIikpiZCQENRq9S33+/j4sGPHDtq1a3fX+/b09CQlJUWvNGVLXLp0iR07dugt3hQZGYmfnx+DBw9ucHxERAROTk5MnTr1tsf8KDh37hxhYWFs2bIFaPgZr1mzhl69ejFmzJi73ndZWRmrVq3in//8J+bm5piYmDBlyhQmT54MwPfff8+6desoLS2lffv2GBsb8+qrrzJhwgSljcLCQsaMGcPChQsJDAy8rXEkLphHeWnZ3bikVknK5BkmMTJMYtQ0iY9hEiPDHoUYWdl3JSxmxQPpW5L+FvjrX/9KUFBQg6S/trYWExOTJmvAP2iXLl1i06ZNekn/8uXLH+CIHg7Xq+hcd/NnfKvVee+Gq1evMnXqVCZMmMCKFSswMjLi0qVL7Nu3D6hP+BctWsTatWsZOHAgAGfOnCEtLU2vnbS0NFxdXUlPT7/tpN9HZUSVSl7vaVStDiQ+TZMYGSYxaprExzCJkWGPQIy2lxQ/sL4l6W+m999/H6hfGdXIyIhu3bphb2/Pb7/9RkVFBRkZGfTu3Zuff/4ZS0tLPD098fHx4dChQ5SVlREUFKTcNb/xuJt/Pnz4MCtXrkSj0QAQHh7OCy+8oDeW0tJSYmJiKC4upqqqirFjxyolJPPy8li+fDlXrlzBwsKCyMhIBgwYQHR0NJcvX8bHxwdzc3NSU1OZNm0aQUFBjBw5knPnzhEeHk5FRQXdu3fXm4deWVlJXFwcx48fp6qqiiFDhrBw4UKMjY0bjVdaWppS7tLU1JT169fTuXNnMjMz2bx5MwBPPPEE0dHRdOrUiYyMDLKysrC2tub48ePY2dmxZMkSVq5cyenTp3FyciI+Ph6VSkVERAQmJiYUFRVRUlKCi4sLUVFRqNVqzp8/z9KlSzlz5gwAM2bMwNfXF61WS3R0NDk5OajVaiwsLEhNTaWoqAh/f39yc3MbfMZbtmwhNjZWeeKh0WiIiYlRFtUaP348wcHBAEybNg0nJyd++eUXSktLefnllwkLC2s0PllZWXTo0IGQkBBlW/v27QkICAAgOTmZOXPmKAn/9XjNmzdP+bm2tpY9e/awbds23njjDfLz83nmmWca7VMIIYQQbZck/c20dOlStm/fTmpqKpaWlkRERHD48GG2bt3aaE30a9eusWPHDoqKivD29sbPz09J9G/l4sWLzJ07l6SkJJydnamrq6OysrLBcQsWLGDOnDm4uLhQXV1NYGAgzzzzDC4uLoSGhhIbG4u7uzvZ2dmEhoby5ZdfEhUVhb+/f6NPI2JiYnBxcWHu3LmcPXuW8ePHM2zYMADi4uJwcXFh+fLlaLVawsLCSE9PV6ah3Cw3N5f169ezfft2bGxs0Gg0mJiY8K9//Yv4+HgyMjKwtbUlMTGRDz74QKmVn5+fz549e+jSpQshISHMnz+frVu3Ym5ujp+fH9nZ2criV7/++iupqamYmZkRHBzMzp07mTp1KjExMfTq1Yvk5GRKS0uZMGEC/fr1o7a2luzsbD7//HOMjIz4448/DH7GN1u3bh1arZY9e/ag0Wh45ZVX6N27Nx4eHgCUlJSwbds2NBoNo0ePZuLEifTs2fOWMTpy5AgDBgy45T6AgoICoqKiGt0PcODAAXr06EGPHj3w8/MjPT1dkn4hhBDiIaY2NcbGxvqB9C1J/x3w8vJqchGk6/PAu3fvTvv27fn9999xdHRs9PhffvkFR0dHZUVVY2NjHnvsMb1jrly5wo8//kh5ebmyTaPRUFhYSOfOnTE1NVUSYzc3N0xNTTl16lSTXzagPlFfvHgxAA4ODri5uSn7vv76a/Ly8vj444+B+i8zdnZ2jbZ14MABfHx8sLGxAVD6zs3NxcPDQ1nlNiAgAB8fH+U8Z2dnunTpAkDfvn3p1q0b1tb1/2P06dOH06dPK9c2ZswYpV1fX1++/PJLpk6dSnZ2NhEREQDY2tri4eFBbm4uvr6+1NXVERkZyZAhQxg5cmST8biV7OxsFi1ahEqlwsrKirFjx5Kdna0k/V5eXhgZGWFtbY2joyNnzpxpNOlv6fIYoaGh/Pbbb1y4cIEffvgBqH+a4ufnp8TA19eXhQsXYmZm1uJrE0IIIcS9V11Td8/WEjBUp1+S/jtgaNXTG5MvY2NjZcqMsbGxkvRVVVUpxzQnEdRqtahUKtLS0jA1NdXbd+zYMVQqVYNzbrWtJXQ6HevWrcPBweGO22lqLDfHq7H4GWr35j5UKhXW1tbs3buX3NxcsrOziY+PZ9euXXc8/ht/bu54AZycnEhPT290f9++fcnPz6dv374ArF27Fo1Go3whPH/+PD/88APHjh1j3bp1QP17Al9++SXe3t4tui4hhBBCtH6S9LeApaUllZWVBu+aG+Lg4EB+fj5ubm7s2bNH2T5o0CAWL17M4cOHGTRokDK958a7/VZWVjz33HNs2LCBN998E6ifVmJiYsJTTz1FdXU1OTk5uLq6kpOTQ21tLT179uTKlStcu3ZNeen4ZtdfBp0zZw5nz57Vm0rj6enJhg0bWLZsGcbGxpSXl6PRaBr9EjBy5EgiIyMJCAigc+fOaDQaTE1NcXNzY+PGjZSVlWFjY8POnTuVPlpq//79TJ8+HbVazWeffabcuXdzc2PHjh2EhoZSVlbGwYMHCQwMpLy8HGNjY4YPH87QoUM5cOAAZ8+eVZ46XNfUZ+zu7k5aWhrOzs5oNBr27dtHeHj4bY1/7NixbNy4kU2bNjFjxgxUKhWXLl1i165dTJ8+nTlz5rBkyRL69OmjTAO6evWqcv6uXbt46aWXiI+PV7ZlZWWRlpbW4qR/t05LuU57W9fRFjwK1SAeNImRYRKjpkl8DJMYGfYoxMjKvusD61uS/hYICgritddeo127dnTr1u2221m0aBFRUVHY2NgwYsQIZXuHDh1ISkpixYoVXLlyBSMjIxYsWNAgMY6PjycuLk5J7iwtLVm+fDk2NjasXbtW70XeNWvWoFarUavVeHt74+3tzWOPPUZqaqpem5GRkYSHh7N//36efPJJhg4dqjfeVatW4ePjg0qlwtTUlEWLFjWa9D///PMEBwfz+uuvo1KpUKvVpKSk0KtXL+bPn09QUBBQ/+XndhepcnFx4c0336S4uBgXFxfl/YLFixcTFRWlxCYsLIxevXpx5MgRlixZQm1tLXV1dQwfPpyBAwdSXKz/Fv2Nn/GNVX0A5syZwwcffKC0PX78eIYPH35b47ewsGDLli2sWrWKUaNGYWlpqZTsBBg+fDjR0dHExsZSVlZG586dUavVSrx27drFggUL9NocNWoUS5cupaioiO7duzd7LO98+BFabcumG7UlbX1Z9+aQGBkmMWqaxMcwiZFhEqOmqXQtnVwsxAPWVtYQuF8qKjSS9DehUycrLlxo+EK9+DeJkWESo6ZJfAyTGBnW1mNkZKSiY8fGZ6PInX4h2rimfkGIek29GCXqSYwMkxg1TeJjmMTIMIlR4+ROv7hts2bNoqSkRG+bvb09KSkpD2hEDx+JkRBCCCEeBpL0CyGEEEII0co93GsVCyGEEEIIIe6YJP1CCCGEEEK0cpL0CyGEEEII0cpJ0i+EEEIIIUQrJ0m/EEIIIYQQrZwk/UIIIYQQQrRykvQLIYQQQgjRyknSL0Qrc+rUKV555RVeeuklXnnlFX777bcGx9TV1fH+++8zevRoXnzxRT799NNm7Wst7jRGycnJjB07lvHjxzNhwgS+++67+zj6++NOY3TdyZMnefbZZ/nwww/vw6jvr7sRo3379uHt7c24cePw9vbm/Pnz92n098edxujChQsEBwfj7e2Nl5cXy5Yto7a29j5ewb3XnBh9//33TJgwAScnpwb/L8nv7HpNxagt/M5uFp0QolWZNm2aLjMzU6fT6XSZmZm6adOmNThm165duqCgIF1dXZ3uwoULumHDhunOnj1rcF9rcacx+vbbb3VXrlzR6XQ63dGjR3XPPfec7urVq/fvAu6DO42RTqfT1dbW6qZOnaqbN2+ebsWKFfdt7PfLncYoLy9P9/LLL+tKS0t1Op1Od+nSJd21a9fu3wXcB3cao5iYGOXvTnV1tW7ixIm6vXv33r8LuA+aE6PffvtNd+TIEd1HH33U4P8l+Z1dr6kYtYXf2c0hd/qFaEUuXLhAQUEB48aNA2DcuHEUFBRQXl6ud9y+ffuYNGkSRkZGPP7444wePZr9+/cb3Nca3I0YDRs2DHNzcwB69+6NTqfj4sWL9/U67qW7ESOADRs2MGLECHr27Hk/h39f3I0Y/ed//idBQUHY2NgAYG1tjZmZ2f29kHvobsRIpVKh0WjQarVUV1dTU1ODnZ3dfb+We6W5MerRowf9+vXDxMSkQRvyO7teUzFq7b+zm0uSfiFakZKSEuzs7DA2NgbA2NgYW1tbSkpKGhzXtWtX5Wd7e3t+//13g/tag7sRoxtlZmbyxBNP0KVLl3s78PvobsTo2LFjfP/99wQGBt63cd9PdyNGhYWFnD17lr/85S/4+fmxbt06dDrd/buIe+xuxGjOnDmcOnWKF154Qfnvueeeu38XcY81N0aG2pDf2c3XGn9nN5ck/UIIcZt+/PFH1qxZw+rVqx/0UB4qNTU1LFmyhPfff1/5h1o0VFdXx/Hjx/n444/ZsmUL3377Lbt3737Qw3qo7N+/n969e/P999/z7bff8o9//KNV3cUW91db/50tSb8QrYi9vT3nzp2jrq4OqE8qSktLsbe3b3BccXGx8nNJSYly16Opfa3B3YgRwOHDh3nvvfdITk7mqaeeuj+Dv0/uNEZlZWWcOXOG4OBgPD09+a//+i927tzJkiVL7ut13Et34+9R165d8fLyQq1WY2VlxahRo8jLy7t/F3GP3Y0Ybd26lfHjx2NkZIS1tTWenp7k5ubev4u4x5obI0NtyO9sw1rz7+zmkqRfiFakU6dO9O3bl6ysLACysrLo27cvjz/+uN5xXl5efPrpp2i1WsrLy/nqq6946aWXDO5rDe5GjPLy8nj33XdZu3Yt/fv3v+/XcK/daYy6du1Kbm4uX3/9NV9//TXTp09n8uTJfPDBBw/icu6Ju/H3aNy4cXz//ffodDpqamrIycmhT58+9/1a7pW7EaPu3bvz7bffAlBdXU12dja9evW6vxdyDzU3Rk2R39mGtfbf2c32YN8jFkLcbSdOnNBNnDhR9+c//1k3ceJEXWFhoU6n0+lmzpypy8vL0+l09VVVoqKidKNGjdKNGjVKl5qaqpzf1L7W4k5jNGHCBN2QIUN048ePV/47duzYA7mWe+VOY3SjtWvXtsrqPXcao7q6Ol1sbKzOy8tLN2bMGF1sbKyurq7ugVzLvXKnMTp9+rQuMDBQN27cON3LL7+sW7Zsma6mpuaBXMu90pwY/fTTT7phw4bpBg0apBs4cKBu2LBhum+//Van08nv7ObEqC38zm4OlU7Xit4aEkIIIYQQQjQg03uEEEIIIYRo5STpF0IIIYQQopWTpF8IIYQQQohWTpJ+IYQQQgghWjlJ+oUQQgghhGjlJOkXQgghhBCilZOkXwghRJvh6enJgAEDGDRokPLfuXPn7rjNQ4cO3aUR3r6ioiJ69+5NbW3tgx4KAL179+b06dMPehhCiP+fyYMegBBCCHE/paSk4O7u/qCHoaitrcXEpPX8c9zarkeI1kLu9AshhGjzLl++zKJFi3jhhRcYNmwYCQkJ1NXVAXDmzBlee+01hgwZwpAhQ5g/fz6XLl0C4L333qO4uJhZs2YxaNAgNm7cSG5uLsOHD9dr/8anAUlJSYSGhhIWFoazszO7du1qsv/Tp08zdepUnnvuOYYMGcI777zTrGuKiIhg2bJlzJw5k0GDBhEQEEBZWRnLly/HxcUFLy8vCgoK9Ma4fv16xowZg4uLCwsXLqSqqkrZv3PnTl588UWef/55Zs2apfeEpHfv3mzbto0///nP/PnPf+Yvf/kLAD4+PgwaNIh9+/bxxx9/EBISgqurKy4uLoSEhPD7778rbUybNo3ExEQCAgIYNGgQQUFBlJeXK/v/8Y9/EBAQwODBg/Hw8CAjIwOA6upqPvzwQ0aMGIG7uztRUVFcu3atWTESoi2RpF8IIUSbt2DBAkxMTPjyyy/JzMzkhx9+4NNPPwVAp9MREhLCd999x+eff87vv/9OUlISAKtWraJr166kpKRw+PBh3njjjWb19z//8z94eXnxj3/8A29v7yb7X7NmDUOHDuWnn37i22+/ZerUqc2+rs8//5x33nmHnJwc1Go1r7zyCv379ycnJ4eXXnqJuLg4veP37NnD5s2b+e///m9OnTrFunXrAMjOzmb16tUkJiby/fff061bN+bNm6d37ldffcXOnTvZt28f27ZtA2D37t0cPnyYMWPGoNVqmTBhAt988w3ffPMNZmZmREdH67WRlZVFXFwc2dnZ1NTU8Le//Q2A4uJi3njjDaZOnUp2djaZmZn07dtX+QxOnTpFZmYmX375JaWlpSQnJzc7RkK0FZL0CyGEaFPefPNNBg8ezODBg5kzZw7nz5/n22+/ZdGiRVhYWNCpUycCAwPZu3cvAD169GDo0KGo1Woef/xxXn/9dX766ac7GsPAgQMZPXo0RkZGVFZWNtm/iYkJxcXFlJaWYmZmxuDBg5vdz4svvoiTkxNmZma8+OKLmJmZ4evri7GxMWPGjOHo0aN6x//lL3/B3t6eDh06MHv2bGUMe/bswd/fn/79+6NWq5k3bx6//PILRUVFyrnBwcF06NCBdu3a3XIsHTt25KWXXsLc3BwrKytmz57dII4TJkzgySefpF27dnh5eSnj27NnD+7u7owbNw5TU1M6duxI37590el0fPrppyxatIgOHTpgZWVFSEiIMm4hxL/JpDshhBBtSnJyst6c/ry8PGpra3nhhReUbVqtFnt7ewAuXLhATEwM//jHP9BoNOh0Otq3b39HY+jSpYvy5+Li4ib7f++991izZg0TJ07kscce4/XXX2fixInN6qdTp07Kn9u1a0fnzp31fr5y5Yre8df7BOjatSulpaUAlJaW0r9/f2WfpaUlHTp04Ny5c3Tv3r3Bubdy9epV4uLi+O677/jjjz8A0Gg01NXVYWxsDICNjY1yvLm5uTK+kpISnnjiiQZtlpeXc/XqVSZMmKBs0+l0aLXaJsciRFskSb8QQog2rUuXLqjVanJycm75Aurq1atRqVR89tlndOzYka+++qrBtJQbmZub680pr6ur05ubDqBSqZrdv42NDTExMUD9vPbXX38dFxcXevTo0eJrNaSkpET5c3FxMba2tgDY2tryf//3f8q+K1eucPHiRezs7G55Tbfyt7/9jVOnTrFz505sbGw4evQovr6+6HQ6g+Oyt7cnLy+vwfaOHTvSrl079u7dqzcWIURDMr1HCCFEm2Zra8vQoUNZsWIFlZWVaLVazpw5R2OHWQAAAh9JREFUw48//gjU3422sLCgffv2nDt3jk2bNumd37lzZ86ePav8/OSTT1JVVcWBAweoqanhP/7jP6iurr7t/q+/RwDw2GOPoVKpMDK6N/98b9++nd9//52LFy8qL/UCeHt7k5GRwdGjR6muruajjz5iwIAByl3+W7k5LhqNBjMzM9q3b8/Fixf561//2uxxeXt7c+jQIfbt20dtbS0VFRUcPXoUIyMjJk2aRGxsLBcuXADg3LlzfPfdd7cZASFaL0n6hRBCtHkrV66kpqZGqVwTGhpKWVkZAHPnzqWgoIDBgwcTHBzMn//8Z71zg4OD+Y//+A8GDx7M5s2bsba2ZunSpSxevJjhw4djbm6uN52npf3n5+czadIkBg0axOzZs4mMjMTBweGexGHcuHEEBQUxevRoHBwcmD17NgBubm68/fbbvPXWW7zwwgucPXuWhISEJtuaO3cuERERDB48mH379jF9+nSqqqpwdXXllVdeYdiwYc0eV9euXdm4cSMff/wxzz//PL6+vhw7dgyon/7Uo0cPJk+ejLOzM4GBgZw6der2gyBEK6XSNee5mhBCCCFaNU9PT2JiYh6qNQyEEHeP3OkXQgghhBCilZOkXwghhBBCiFZOpvcIIYQQQgjRysmdfiGEEEIIIVo5SfqFEEIIIYRo5STpF0IIIYQQopWTpF8IIYQQQohWTpJ+IYQQQgghWjlJ+oUQQgghhGjl/j91vJigF36n8AAAAABJRU5ErkJggg==","text/plain":["
"]},"metadata":{},"output_type":"display_data"},{"data":{"text/plain":["
"]},"metadata":{},"output_type":"display_data"}],"source":["ml.features_importances_plot(\n"," classifier=best_rf_model, \n"," top_features=30, \n"," model_name='rf',\n"," show=True, \n"," path_save=None\n",")"]},{"cell_type":"markdown","metadata":{},"source":["Let's try now using SVM."]},{"cell_type":"code","execution_count":14,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["performing gridSearch...\n","GridSearchCV took 0.26 seconds for 1 candidate parameter settings.\n","GridSearchCV(cv=10,\n"," estimator=Pipeline(steps=[('scl', None),\n"," ('clf', SVC(random_state=1))]),\n"," n_jobs=10,\n"," param_grid={'clf__C': [10], 'clf__gamma': [0.001],\n"," 'clf__kernel': ['rbf']},\n"," scoring=make_scorer(matthews_corrcoef))\n","Model with rank: 1\n"," Mean validation score: 0.934 (std: 0.027)\n"," Parameters: {'clf__C': 10, 'clf__gamma': 0.001, 'clf__kernel': 'rbf'}\n"," \n","\n","make_scorer(matthews_corrcoef)\n","10\n","Best score (scorer: make_scorer(matthews_corrcoef)) and parameters from a 10-fold cross validation:\n"," MCC score:\t0.934\n"," Parameters:\t{'clf__C': 10, 'clf__gamma': 0.001, 'clf__kernel': 'rbf'}\n","\n","0.933967 (0.026972) with: {'clf__C': 10, 'clf__gamma': 0.001, 'clf__kernel': 'rbf'}\n"," clf__C clf__gamma clf__kernel means stds\n","0 10 0.001 rbf 0.933967 0.026972\n"]}],"source":["ml = ShallowML(X_train, X_test, y_train, y_test, report_name=None, columns_names=fps_x.columns)\n","\n","param_grid = {'clf__C':[10], 'clf__gamma':[0.001], 'clf__kernel':['rbf']}\n","\n","best_svm_model = ml.train_best_model(\n"," model_name=None,\n"," model='svm',\n"," score=make_scorer(matthews_corrcoef),\n"," param_grid=param_grid,\n",")"]},{"cell_type":"code","execution_count":15,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":[" precision recall f1-score support\n","\n"," 0 0.99 0.92 0.96 253\n"," 1 0.93 0.99 0.96 247\n","\n"," accuracy 0.96 500\n"," macro avg 0.96 0.96 0.96 500\n","weighted avg 0.96 0.96 0.96 500\n","\n","[[234 19]\n"," [ 2 245]]\n"]},{"data":{"text/plain":["{'Accuracy': 0.958,\n"," 'MCC': 0.9181788424394336,\n"," 'f1 score': 0.9589041095890412,\n"," 'roc_auc': 0.9584020098894241,\n"," 'Precision': array([0.494 , 0.9280303, 1. ]),\n"," 'Recall': array([1. , 0.99190283, 0. ]),\n"," 'fdr': 0.07196969696969698,\n"," 'sn': 0.9919028340080972,\n"," 'sp': 0.924901185770751}"]},"execution_count":15,"metadata":{},"output_type":"execute_result"}],"source":["scores, report, cm, cm2 = ml.score_testset(best_svm_model)\n","print(report)\n","print(cm) \n","scores"]},{"cell_type":"markdown","metadata":{},"source":["The SVM feature importance can only be calculated if the best model has a linear kernel. Since the best model has the RBF kernel, we can't calculate the feature importance, so we will use the linear_svm model instead."]},{"cell_type":"code","execution_count":16,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["performing gridSearch...\n"]},{"name":"stderr","output_type":"stream","text":["/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n","/home/jna/miniconda3/envs/dna-conda/lib/python3.8/site-packages/sklearn/svm/_base.py:1225: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n"," warnings.warn(\n"]},{"name":"stdout","output_type":"stream","text":["GridSearchCV took 0.49 seconds for 4 candidate parameter settings.\n","GridSearchCV(cv=10,\n"," estimator=Pipeline(steps=[('scl', None),\n"," ('clf', LinearSVC(random_state=1))]),\n"," n_jobs=10, param_grid=[{'clf__C': [0.01, 0.1, 1.0, 10]}],\n"," scoring=make_scorer(matthews_corrcoef))\n","Model with rank: 1\n"," Mean validation score: 0.922 (std: 0.030)\n"," Parameters: {'clf__C': 0.01}\n"," \n","\n","Model with rank: 2\n"," Mean validation score: 0.910 (std: 0.031)\n"," Parameters: {'clf__C': 0.1}\n"," \n","\n","Model with rank: 3\n"," Mean validation score: 0.882 (std: 0.040)\n"," Parameters: {'clf__C': 1.0}\n"," \n","\n","make_scorer(matthews_corrcoef)\n","10\n","Best score (scorer: make_scorer(matthews_corrcoef)) and parameters from a 10-fold cross validation:\n"," MCC score:\t0.922\n"," Parameters:\t{'clf__C': 0.01}\n","\n","0.922102 (0.029691) with: {'clf__C': 0.01}\n","0.909869 (0.031035) with: {'clf__C': 0.1}\n","0.881781 (0.040070) with: {'clf__C': 1.0}\n","0.867122 (0.041528) with: {'clf__C': 10}\n"," clf__C means stds\n","0 0.01 0.922102 0.029691\n","1 0.10 0.909869 0.031035\n","2 1.00 0.881781 0.040070\n","3 10.00 0.867122 0.041528\n"]}],"source":["ml = ShallowML(X_train, X_test, y_train, y_test, report_name=None, columns_names=fps_x.columns)\n","\n","best_linear_svm_model = ml.train_best_model(\n"," model_name=None,\n"," model='linear_svm', \n"," score=make_scorer(matthews_corrcoef),\n"," param_grid=None\n",")"]},{"cell_type":"code","execution_count":17,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":[" precision recall f1-score support\n","\n"," 0 0.99 0.94 0.97 253\n"," 1 0.94 0.99 0.97 247\n","\n"," accuracy 0.97 500\n"," macro avg 0.97 0.97 0.97 500\n","weighted avg 0.97 0.97 0.97 500\n","\n","[[238 15]\n"," [ 2 245]]\n"]},{"data":{"text/plain":["{'Accuracy': 0.966,\n"," 'MCC': 0.933294079969078,\n"," 'f1 score': 0.9664694280078896,\n"," 'roc_auc': 0.966307148229345,\n"," 'Precision': array([0.494 , 0.94230769, 1. ]),\n"," 'Recall': array([1. , 0.99190283, 0. ]),\n"," 'fdr': 0.057692307692307696,\n"," 'sn': 0.9919028340080972,\n"," 'sp': 0.9407114624505929}"]},"execution_count":17,"metadata":{},"output_type":"execute_result"}],"source":["scores, report, cm, cm2 = ml.score_testset(best_linear_svm_model)\n","print(report)\n","print(cm) \n","scores"]},{"cell_type":"code","execution_count":18,"metadata":{},"outputs":[{"data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAApcAAAG/CAYAAAANe/xXAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAADr+ElEQVR4nOzdeVhV1f748fcBzmF0FhBnI2dEIUlwQtFbKiEgamTigAVOYSohiqIRIFcxVNJwut5vKqEBDqBZ12taN4EmE34OlDigYeCAMSnTOb8/eNh5BBQRQY7r9Tz3eTz77L32Wvtsbx/32uvzkalUKhWCIAiCIAiCUA+0GrsDgiAIgiAIguYQwaUgCIIgCIJQb0RwKQiCIAiCINQbEVwKgiAIgiAI9UYEl4IgCIIgCEK9EcGlIAiCIAiCUG9EcCkIgiAIT8jf35+IiIh6ay8iIoJBgwYxZMiQemmvZ8+eXL16FYDAwEA2bdpUL+0KQm2I4FIQBOEJOTg4YGlpiZWVlfS/7Ozsp27z1KlT9dTDx4uMjMTX17fBzvco8fHxvPXWW43djSo8PDzo168fVlZWDBo0iPnz55OTk/PE7TwY6FXnxo0b7Ny5kyNHjvD9998/TZerFRQUxLx58+q9XUGoiQguBUEQ6iAqKorTp09L/zM1NW3U/pSVlTXq+evqee93YGAgp0+f5quvviIvL4/Vq1fX+zn++OMPWrZsSZs2bZ742Of1+qlUKpRKZWN3Q2gkIrgUBEGoJ/n5+SxbtoyhQ4cybNgwIiIiKC8vByAzM5Np06YxaNAgBg0axOLFi8nLywPggw8+ICsri9mzZ2NlZcW2bdtISUlh+PDhau0/+HQzMjISHx8ffH19sba2Zv/+/Y88/+P07NmTPXv28Nprr2FlZcX69evJzMzkzTffxNramgULFlBSUgIg9S0qKopBgwbh4ODAoUOH1K6Dn58ftra2jBw5ks2bN0uBRnx8PO7u7oSGhvLqq6+ycOFCVq5cya+//oqVlRUDBw4E4MSJE7i4uGBtbY29vT2RkZFS+9evX6dnz57s37+fESNGMGjQID799FPp+/LycqKiohg9ejRWVlZMmDCBGzduAJCRkcHMmTN59dVXef311zly5Eitrk/Lli15/fXX+f3336v9ft++ffzjH//g1VdfZfbs2dKT7LfffhsAZ2dnrKysqpzv1KlTeHp6kpOTg5WVFf7+/gD897//xdHRkYEDB+Lh4UFGRoZ0jIODA1u3bsXJyYkBAwY8NsB8cAq/8rf717/+hZ2dHUOHDiUuLk7at6SkhH/+85+MGDGCwYMHExgYyP379wH466+/8Pb2xtbWFhsbG7y9vfnzzz+lYz08PIiIiMDd3Z3+/ftz7dq1GvsUHx/PqFGjsLKyku6fkpISBg4cyG+//Sbtd+fOHSwtLbl9+7bU923btkl9P3bsGCdPnuT111/n1VdfJSoq6pHXQmggKkEQBOGJjBw5UvX9999X2T5nzhzVihUrVIWFhapbt26p3NzcVJ9//rlKpVKprly5ovrf//6nKi4uVt2+fVs1ZcoUVXBwcI1tJicnq4YNG1bjeTdu3Kjq06eP6j//+Y+qvLxcde/evUee/2EbN25ULV68WPrco0cPlbe3tyo/P1/122+/qfr27auaNm2aKjMzU5WXl6caO3asKj4+Xupb7969VaGhoari4mJVSkqKqn///qqMjAyVSqVSffDBB6rZs2er8vPzVdeuXVO99tprqn379qlUKpUqLi5O1bt3b9Vnn32mKi0tVd27d08VFxencnd3V+tfcnKy6sKFC6ry8nLV+fPnVXZ2dqr//Oc/KpVKpbp27ZqqR48eqoCAANW9e/dU58+fV/Xt21d18eJFlUqlUm3btk31xhtvqDIyMlRKpVJ1/vx51Z07d1SFhYWq4cOHq2JjY1WlpaWq//f//p/q1VdfVf3222/VXqOpU6dK/b59+7bKw8ND5evrq1KpVKolS5aoPv74Y5VKpVKdOnVK9eqrr6r+3//7f6ri4mJVUFCQasqUKWrX9sqVK9Weo7rf+tKlS6r+/fur/ve//6lKSkpUW7duVY0ePVpVXFws3Qfjx49XZWVlqe7du1dtmw+e88G+Vv5269evV5WUlKhOnDihsrS0VN29e1elUqlUwcHBKm9vb1Vubq4qPz9f5e3trQoPD1epVCrVnTt3VEePHlUVFRWp8vPzVe+9955qzpw5atfL3t5e9dtvv6lKS0tVJSUl1fatsLBQZWVlJd0v2dnZ0m/g7+8v9VWlUql2796t8vT0VOt7ZGSkqqSkRLV3717VoEGDVIsWLZLuWwsLC1VmZmaN11poGOLJpSAIQh3MmzePgQMHMnDgQObOncutW7f49ttvWbZsGQYGBrRp04YZM2Zw+PBhALp06cKQIUNQKBS0bt2amTNn8uOPPz5VHwYMGMDo0aPR0tKioKDgkeevjXfffRcjIyO6d+9Ojx49GDJkCJ06daJZs2YMHz6cc+fOqe2/YMECFAoFr776Kvb29nz55ZeUl5dz5MgRFi9ejJGRER07dmTmzJlqTzZNTEzw8PBAR0cHPT29avsyaNAgevbsiZaWFr169cLR0ZEffvhBbZ/58+ejp6dHr1696NWrFxcuXADgiy++YMGCBbz00kvIZDJ69epFq1atOHHiBB06dMDNzQ0dHR369u3L66+/zldffVXjNQkODmbgwIE4OztjbGzM0qVLq+yTkJCAm5sbffv2RaFQsGjRIn799VeuX79e62v/oCNHjmBvb8+QIUOQy+XMmjWL+/fvc/r0aWkfDw8PzMzMarx+j6Kjo8O8efOQy+XY29tjYGDA5cuXUalUfPHFFyxbtoyWLVtiZGSEt7e3dA+1atWK119/HX19fYyMjJgzZ06Ve9jV1ZXu3bujo6ODXC6vsQ9aWlr8/vvv3L9/HxMTE7p37w6Ak5MTiYmJ0n4JCQk4OTmp9X3OnDnI5XLGjRtHbm4u06ZNk+7b7t27k56e/sTXRKhfOo3dAUEQhKZo06ZNDB48WPqcmppKWVkZQ4cOlbYplUrMzMwAuH37NsHBwfz0008UFhaiUqlo3rz5U/WhXbt20p+zsrIeef7aaNu2rfRnXV3dKp9v3bolfW7evDkGBgbS5/bt25OTk0Nubi6lpaW0b99e7bsHFzw92O+anDlzhvDwcH7//XdKS0spKSlhzJgxNfZXX1+foqIiAP788086d+5cpc0//viD1NRUaeodKqbQx48fX2M/li9fzqRJkx7Z15ycHPr27St9NjQ0pGXLlmRnZ9OxY8dHD7SG9h68flpaWpiZmaldwyf5XR/WsmVLdHT+/s9/5bW7c+cO9+7dY8KECdJ3qgfenbx37x6rV6/mu+++46+//gKgsLCQ8vJytLW1a90vAwMDIiIi+Ne//kVAQADW1tYsWbIEc3NzbG1tKS4u5syZM7Rt25YLFy4wevRotb5XnqsysH7wXVVdXV0KCwvremmEeiKCS0EQhHrQrl07FAoFycnJav/hrrRu3TpkMhmHDh2iVatWHDt2jKCgoBrb09fXl951g4og6M6dO2r7yGSyWp+/vuXl5VFUVCQFmDdu3KB79+60atUKuVxOVlYWL7/8svTdgwueHux3dZ8BFi9ezNSpU9m+fTu6urqEhISQm5tbq761a9eOzMxMevToobbdzMwMGxsbdu7c+URjfRwTExP++OMP6XNRURF3796t8yIvExMTtfcOVSrVY69hfWjVqhV6enocPny42r7/61//4vLly+zbtw9jY2POnz+Pi4sLKpXqifs1bNgwhg0bxv3791m/fj0rVqwgOjoaLS0txowZQ2JiIm3btmXEiBEYGRnV2xiFhiGmxQVBEOqBiYkJQ4YMISwsjIKCApRKJZmZmdJUbmFhIQYGBjRv3pzs7Gy2b9+udnzbtm3VFkB069aN4uJiTpw4QWlpKZ9++qm0oKYu538WIiMjKSkp4aeffuLEiROMGTMGbW1txowZQ0REBAUFBfzxxx/s3LnzkU8H27RpQ3Z2ttr4CgsLadGiBbq6uqSmpqpNlT7OpEmT2LBhA1euXEGlUnHhwgVyc3MZMWIEV65c4cCBA5SWllJaWkpqaqraYpm6cHJyIj4+nvPnz1NSUsLHH3+MpaWl9NTy4d/2ccaOHcvJkydJSkqitLSUf/3rXygUCqysrJ6qn4+jpaXFpEmTCA0N5fbt2wBkZ2fz3XffARW/ia6uLs2bN+fu3bt88skndTrPrVu3+O9//0tRUREKhQIDAwPpaSRUXM8vv/yShIQE3njjjacfmNDgRHApCIJQT9asWUNpaSnjxo3DxsYGHx8fbt68CVS8H3ju3DkGDhyIl5cXr732mtqxXl5efPrppwwcOJAdO3bQrFkzVq5cyfLlyxk+fDj6+vqPnU5+1PnrW9u2bWnevDnDhg3D19eXVatWYW5uDsCKFSvQ19dn9OjRTJkyhTfeeAM3N7ca27K1teXll19m6NChDBo0CICVK1eyceNGrKys2LRpE2PHjq1132bOnMnYsWPx9PTE2tqagIAAiouLMTIyYseOHRw5coRhw4YxdOhQwsPDHxm014adnR0LFizgvffeY+jQoVy7dk0twfr8+fPx9/dn4MCBtVqd/tJLL7F27Vo++ugjbG1t+eabb4iKikKhUDxVP2vjgw8+oEuXLkyePBlra2tmzJjB5cuXAZg+fTrFxcXY2try5ptvMmzYsDqdQ6lUsnPnToYNG8arr77Kjz/+yMqVK6Xv+/fvj76+Pjk5OVUyJghNg0z14PNsQRAEQXiMlJQUPvjgA7799tvG7oogCM8h8eRSEARBEARBqDdiQY8gCIIgCPWupndEt23bprZiX9A8YlpcEARBEARBqDdiWlwQBEEQBEGoNyK4FARBEARBEOqNCC4FQRAEQRCEeiMW9AjCCy43txCl8slfvW7TxojbtwueQY8aj6aN6euvDyOXazNy5JjH79zEaNpvBWJMTYkmjqtNm/qrhCSCS0F4wSmVqjoFl5XHahpNGlNBQQFyubZGjelBmjguMaamQ1PHVR9EcCk0aQ4ODigUChQKBUqlkjlz5uDo6FindqKioujRowf37t1j/vz5mJiYEBwcTEBAAIcPH+arr76iffv2APj7+2NhYcHUqVMBSE1NJSIigszMTPT19WnVqhU+Pj7Y2NjUeM4rV67w8ccf8//+3/+jRYsWlJeXY29vz/vvvy+VQisvL2fEiBH069ePzZs3qx3/3XffsWnTJu7cuYOOjg6dOnVi0aJF9OzZ84nHL2iuKxcvsmrWtMbuRr1TyLUpKS1/Jm0bmbXHNzjsmbQtCC8CEVwKTd7GjRvp0aMH586dw93dHTs7O1q3bl2ntvLz8/Hy8qJv374EBAQgk8kAMDY2JjIyktWrV1c5Jj09HW9vb9asWSOVQ7t69SoXLlyo8Tw5OTm8/fbbLF68mI0bNwIVT5nWr19PSUkJ+vr6AHz77beYmJjw888/c+vWLdq2bQvA//73PwICAti0aRP9+vUD4Ny5c9y8eVMEl4Ka8uJipsg08PX6MhU8o3FF38h6Ju0KwotCA/8fR3hR9enTB0NDQ2JjYxk7dizOzs44OTmRkZEBwKVLl3jnnXdwc3Nj/PjxxMXFqR1/+/ZtPDw8sLW1Zfny5VJgCeDu7s7333/PxYsXq5x327ZtTJw4Ua3ObpcuXXj99ddr7OuePXsYNGgQEyZMkLYZGRmxfPlyKbAEiIuLw93dndGjR3Pw4EFp+6ZNm5g7d64UWFaOf+jQobW5VIIgCILwzIgnl4LGSE5Opri4mC1btpCYmIiZmRklJSWUl5dTVlaGr68va9euxdzcnIKCAtzc3BgwYADm5uYAvP/++0yZMoUFCxZUadvAwABvb28iIiLYtGmT2nfnzp1jzJgnWzBx7tw5hgwZ8sh97ty5Q0pKCmFhYbz00kusWLGCWbNmSccHBgY+0Tlr8jQvcRsbN6uXPjxPNGlMcrl2Y3ehSVLItRvtPtCk+6+SJo4JNHdc9UEEl0KT5+Pjg66uLkZGRkRGRhIdHc3SpUsZNWoUI0aMoFOnTly8eJGMjAwWLVokHVdaWsqlS5ek4NLe3p4jR47w1ltvYWJiUuU8kydPZufOnZw5c0Zte30Uudq6dSuHDx/m7t27REREYG1tzcGDBxk5ciRGRka88sorlJeX8+uvvzJgwICnPt+Dbt8uqNOL6cbGzbh5M79e+9LYNG1Mpc/onURNV1Ja3ij3gabdf6CZYwLNHFd9BssiuBSavMp3LisNHjyYtLQ0kpOTmTZtGqtWraJ9+/a0atVKbWr5Ye+88w7ffPMNHh4e7Nq1q0qAKZfLee+991i3bp20sAegb9++pKamMnr06Fr3uU+fPqSlpUmfvby88PLyYsKECZSWlgIQHx/PnTt3cHBwACreB42Li2PAgAH06dOH1NRUevfuXetzCoIgCEJDEMGloFHKysrIysrC0tISS0tLMjMzOX/+PEOGDEFPT48DBw7g4uICQEZGBqamphgZ/T0t7O3tjUqlqjHAdHJyYseOHVy/fh0LCwugIiidMWMGtra2DB48GKh4v/P8+fM1rlyfMmUKrq6uav0pLy+XAsvU1FTy8/P53//+J737mZ2dzRtvvMGyZcuYM2cOK1aswMLCgr59+0rH5ObmYm9vXz8XU9AI2rq6RKuUjd2NevesV4sLglB3IrgUNIpSqcTf35/8/HxkMhlmZmYsXrwYHR0doqKiCA0NZceOHSiVStq0acP69eurtDF79mwpwPzss8/UvtPS0mLhwoXMnj1b2tarVy+ioqKIiIggMDBQLRVRTUxNTdm9ezcff/wxGzdupGXLligUCkaPHk3fvn1Zu3Ytjo6OaouKTE1N6d27N1999RUuLi4EBQURFBTE3bt30dHRoWPHjixevPjpL6KgUbq+/DLzFixp7G7UO02clhQETSFT1ccLY4IgNFnincu/adqYDhzYh1yujaOjW2N3pd5p2m8FYkxNiSaOqz7fuRSpiARBEARBEIR6I6bFBeEZmjBhAuXl6u+F9e/fn6CgoEbqkSAIgiA8WyK4FIRnKD4+vrG7IAiCIAgNSkyLC4IgCIIgCPVGPLkUmjQHBwcUCgUKhQKlUsmcOXNqTP/zuHaioqLo0aMH9+7dY/78+ZiYmBAcHExAQACHDx/mq6++kvJb+vv7Y2FhwdSpU4GKNEARERFkZmaqrRa3sbGp8ZyXL18mPDycCxcuoK+vj4GBAV5eXlK+zMTERP71r39RUFCAkZERBgYGzJo1i5EjRwJw8+ZN1q5dy88//4y+vj46OjpMmTKFyZMnP/H4m4rw5f4UPMO6z88yvU1jaNnjZeR6uo3dDUEQXjAiuBSavMok6ufOncPd3R07Oztat25dp7by8/Px8vKib9++BAQESKmAjI2NiYyMZPXq1VWOSU9Px9vbmzVr1kj1xa9evcqFCxdqPE9OTg5Tp07lgw8+kMpJ5uTkcOrUKQC++OILdu7cSWRkpFRB6OzZs3z//feMHDmSe/fuMXXqVCZMmEBYWBhaWlrk5eVx5MiROo27qSi4kcUU2TOccClTwbNsv4H9BNwqLm7sbgiC8IIRwaWgMfr06YOhoSGxsbHs379fepq5fv16zM3NuXTpEqGhoeTm5lJaWsr06dNxc/s7Rcvt27fx8/Nj5MiRVeqLu7u7s3v3bi5evMjLL7+s9t22bduYOHGiFFgCdOnShS5dutTY1z179jBo0CApgTqAiYmJ9DkyMpKQkBApsISKSkCVCdMTExNp2bIl3t7e0vfNmzfH3d299hdMEARBEJ4BEVwKGiM5OZni4mK2bNlCYmIiZmZmlJSUUF5eTllZGb6+vqxduxZzc3MKCgpwc3NjwIABUgD3/vvvM2XKlCqBJYCBgQHe3t5ERERITxornTt3jjFjxjxRX8+dO8eQIUOq/e727dtkZ2fTv3//Go8/e/YslpaWT3TOmrRpY/T4nWpQn3nRakMh1654uig8kYb+nRqKJo5LjKnp0NRx1QcRXApNno+PD7q6uhgZGREZGUl0dDRLly5l1KhRjBgxgk6dOnHx4kUyMjJYtGiRdFxpaSmXLl2Sgkt7e3uOHDnCW2+9VaXsI8DkyZPZuXMnZ86cUdtelzoET3qMu7s7BQUF6OrqEhcXV6dz1qQpJVEvKS3XqGnrhqJpyZ5Bc5NYizE1DZo4rvoMlkVwKTR5le9cVho8eDBpaWkkJyczbdo0Vq1aRfv27WnVqhUHDx6ssZ133nmHb775psa64nK5nPfee49169ZJC3ugYro6NTVVWohTG3379iUtLa3a79q0aYOpqSlpaWnS082YmBh+++03qeykhYUFcXFxtT6fIAiCIDQUEVwKGqWsrIysrCwsLS2xtLQkMzOT8+fPM2TIEPT09Dhw4ID0XmNGRgampqYYGf09Lezt7S3VFa8uwHRycmLHjh1cv34dCwsLoCIonTFjBra2tgwePBiAS5cucf78+RpXrk+ZMgVXV1cSEhJwcnICIDs7m5MnTzJ58mTmzZvH6tWriYyMpFu3bgDcu3dPOt7R0ZFt27axfft2Zs2ahUwmIy8vj/379zN9+vT6uZjPISOz9kSL1eK11hKQ64rV4oIgNCwRXAoaRalU4u/vT35+PjKZDDMzMxYvXoyOjg5RUVGEhoayY8cOlEolbdq0Yf369VXamD17thRgfvbZZ2rfaWlpsXDhQukJIkCvXr2IiooiIiKCwMBAtVRENTE1NWXXrl2Eh4ezfv16DAwMMDAw4N133wXgzTffRE9Pj8WLF1NQUEDr1q3R19fH398fqHgHdNeuXaxdu5ZRo0ZhaGgopSLSZL7BYc+0fU2b6qqsLS4IgtCQZKr6fHlLEIQmpym9c/msadqYKoNLR0e3x+/cxGjabwViTE2JJo6rPt+5FG/GC4IgCIIgCPVGTIsLwjM0YcIEysvV3+Hr378/QUFBjdQjQRAEQXi2RHApCM9QfHx8Y3dBEARBEBqUCC4FQXjuPeua4pU0brW4qC0uCEIjEMHlE4qMjMTb2xuFQlHt987Ozuzduxc9Pb16P7eDgwNRUVFqOR2fRF5eHnv37pVWJAMEBATg6urKwIEDq+zv7++PhYUFU6dOrXOfm4Ls7Gx8fX3ZtWsXUPU33rBhA927d2fcuHH1ds709HT8/PwA+OuvvygoKKBDhw5ARbL2t99+m7S0NNavX8/ly5dp2bIlKpWKN954g1mzZknt/PXXXwwdOhR3d3cCAgLqrX/Pm2deU7ySqC0uCILw1ERw+YQ++eQTPD09qwSXZWVl6OjoPDJJd2PLy8tj+/btasFlSEhII/bo+VCZFqjSw79xdeUgn1bPnj2leyU+Pp4TJ06wceNG6fv09HTeffdd/vnPf2Jvbw/AzZs32bJli1o7CQkJDBgwgMOHD/PBBx/U+I8eQRAEQWgoIrh8Ah9++CFQUYpPS0uLDh06YGZmxpUrV8jNzSU+Pp6ePXvyyy+/YGhoiIODA87Ozpw6dYqbN2/i6ekpPQV8cL+HP58+fZo1a9ZQWFgIgJ+fH0OHDlXrS05ODsHBwWRlZVFcXIyjo6OUezE1NZWQkBCKioowMDAgICAAS0tLgoKCyM/Px9nZGX19fWJiYvDw8MDT05ORI0eSnZ2Nn58fubm5dOzYUW0hSkFBAatXryY9PZ3i4mIGDRrE0qVL0dauOYdebGyslCdSLpezZcsW2rZty4EDB9ixYwcAnTt3JigoiDZt2hAfH09iYiLNmjUjPT0dU1NTVqxYwZo1a7h69SoWFhaEh4cjk8nw9/dHR0eH69evc+PGDWxsbAgMDEShUHDr1i1WrlxJZmYmALNmzcLFxQWlUklQUBDJyckoFAoMDAyIiYnh+vXruLm5kZKSUuU33rVrF6GhodIT3MLCQoKDg6XqOuPHj8fLywsADw8PLCws+PXXX8nJyWHs2LH4+vrW6V7btm0bkyZNkgJLAGNjY5YvX662X1xcHH5+fmzZsoXjx48/cY1zaBq1xUVN8aejqTWQNXFcYkxNh6aOqz6I4PIJrFy5kujoaGJiYjA0NMTf35/Tp0+ze/duDAwMqj3m/v377N27l+vXr+Pk5ISrq6sUUFbn7t27zJ8/n8jISKytrSkvL6egoKDKfkuWLGHu3LnY2NhQUlLCjBkz6NevHzY2Nvj4+BAaGsrgwYNJSkrCx8eHr7/+msDAQNzc3Gp8uhocHIyNjQ3z58/n2rVrjB8/nmHDhgGwevVqbGxsCAkJQalU4uvrS1xcHJMnT662rZSUFLZs2UJ0dDTGxsYUFhaio6PDb7/9Rnh4OPHx8ZiYmLB+/Xo++ugjKZl5WloaCQkJtGvXDm9vbxYvXszu3bvR19fH1dWVpKQkqQrOmTNniImJQVdXFy8vL/bt28fUqVMJDg6me/fubNq0iZycHCZMmECfPn0oKysjKSmJL7/8Ei0tLf7666/H/sYP27x5M0qlkoSEBAoLC3nzzTfp2bOnFATeuHGDPXv2UFhYyOjRo5k4cSJdu3at8feuyblz5x4bKF64cIG//voLW1tbbt68SVxcXJ2Cy6aQ51LUFH86mpaPDzQ3z6AYU9OgieMSeS6fI2PGjKkxsASk9/Q6duxI8+bN+fPPPx/Z3q+//oq5uTnW1tYAaGtr06JFC7V9ioqK+OGHHwgODsbZ2ZlJkyaRk5NDRkYGly9fRi6XSwGYnZ0dcrmcy5cvP3YsKSkpTJo0CYBOnTphZ2cnfXf8+HF27NiBs7Mzrq6unD179pFtnjhxAmdnZ4yNjQEwNDREV1eXlJQU7O3tpbKK7u7uJCUlScdZW1vTrl07AHr37s0rr7xCs2bN0NHRoVevXly9elXad9y4cVJlGhcXF5KTkwFISkrC3d0dABMTE+zt7UlJSaFTp06Ul5cTEBDAgQMHHns9qpOUlMSkSZOQyWQYGRnh6Oio1v8xY8agpaVFs2bNMDc3l56ePq3K33ro0KHcuHEDqHgy7OzsjEwm47XXXuPMmTNkZ2fXy/kEQRAEoa7Ek8un9KjAEkD3gbq+2tra0lSztrY2lcWRih944b42BZOUSiUymYzY2FjkcrnadxcuXEAmk1U5prptT0KlUrF582Y6der01O08qi8PX6+art/j2n34HDKZjGbNmnH48GFSUlJISkoiPDyc/fv3P3X/H/xc2/4+Tu/evUlLS2P06NEA0nT4oEGDKC8vp6SkhISEBHR1daUn0aWlpezfv1+tNKUgCIIgNDQRXD4hQ0NDCgoKHjm1XRudOnUiLS0NOzs7EhISpO1WVlYsX76c06dPY2VlJU2LP/j00sjIiFdeeYWtW7cyb948oGI6VkdHh5deeomSkhKSk5OxtbUlOTmZsrIyunbtSlFREffv35cWHz3M1taWuLg45s6dy7Vr19SmoB0cHNi6dSurVq1CW1ubO3fuUFhYWGOwOXLkSAICAnB3d6dt27YUFhYil8uxs7Nj27Zt3Lx5E2NjY/bt2yed40kdPXqU6dOno1AoOHToECNHjgQqntbu3bsXHx8fbt68ycmTJ5kxYwZ37txBW1ub4cOHM2TIEE6cOMG1a9ekp6iVHvUbDx48mNjYWKytrSksLOTIkSPSqu/69O677zJjxgxeeeUVhg8fDkBJSQlKpRKAY8eO8dJLL/H5559Lx5w+fZolS5ZoZHBpZNaeaJGK6Im1BOS6IhWRIAgNSwSXT8jT05Np06ahp6cnpY6pi2XLlhEYGIixsTEjRoyQtrds2ZLIyEjCwsIoKipCS0uLJUuWVAnAwsPDWb16NU5OTkBFQBQSEoKxsTEbN25UW9CzYcMGFAoFCoUCJycnnJycaNGiBTExMWptBgQE4Ofnx9GjR+nWrRtDhgxR6+/atWulaVi5XM6yZctqDC5fffVVvLy8mDlzJjKZDIVCQVRUFN27d2fx4sV4enoCFUF2XavV2NjYMG/ePLKysrCxsZHe/1y+fDmBgYHStfH19aV79+6cPXuWFStWUFZWRnl5OcOHD2fAgAFkZakHLQ/+xg+uIgeYO3cuH330kdT2+PHjpeCvPvXq1YstW7awYcMGVq1aRevWrZHL5cyePRsTExPi4+OlPlSysrJCqVTy448/YmNjU+99aky+wWENch5Ne4+qsra4IAhCQ5KpajMPKwjPmRclB2dDaAoLehqKpo2pMrh0dHRr7K7UO037rUCMqSnRxHGJBT2CIAiCIAjCc0lMiwtPZfbs2dLq5UpmZmZERUU90/OGhTXMNGl9aKxrJAiCIAiNQQSXwlMRAdLjiWskCIIgvEhEcCkIgqDBrly8yKpZ0xq7G/WuoVb2G5m1b7AFZYKgKURwKTR5Dg4O0mp4pVLJnDlzcHR0rFM7UVFR9OjRg3v37jF//nxMTEwIDg4mICCAw4cP89VXX9G+fXug6qKi1NRUIiIiyMzMRF9fn1atWuHj4/PYldt79uwhKCiIAwcO0Lt37yrfL1iwgB9++IFvv/1WLa/p5cuXCQ8P58KFC+jr62NgYICXl5eUG1MQAMqLi5miidWNylQNUrWpIVJgCYKm0cD/xxFeRBs3buTQoUOsWbOGpUuXcufOnTq3lZ+fj6enJ926dSM0NFSqn25sbExkZGS1x6Snp+Pt7Y2npyf//e9/SUxMJDg4uFb9iIuLk3KMPuzu3bskJSXRuXNnvvnmG2l7Tk4OU6dO5R//+Id0vk8++aTaUqGCIAiC0JDEk0tBo/Tp0wdDQ0NiY2PZv3+/9DRz/fr1mJubc+nSJUJDQ8nNzaW0tJTp06fj5vZ3mpbbt2/j5+fHyJEjWbBggVrb7u7u7N69m4sXL/Lyyy+rfbdt2zYmTpwo1WIH6NKlC126dHlkf9PT08nNzSUyMhI3Nzf8/PxQKBTS94cOHcLe3p6hQ4cSFxfHa6+9BlQ87Rw0aBAuLi7SviYmJmqfa6tNG6MnPqZSfaaueF5o0phEjsunp5BrN+g9oUn3XyVNHBNo7rjqgwguBY2SnJxMcXExW7ZsITExETMzM0pKSigvL6esrAxfX1/Wrl2Lubk5BQUFuLm5MWDAAMzNzQF4//33mTJlSpXAEipKfXp7exMREcGmTZvUvjt37hxjxox54v7Gxsbi4uJChw4d6N27N8eOHZPq0QPEx8ezZMkSrKysCA0NJTs7G1NTU86dO6eW5P5piDyXf9O0MZVqULWhxlJSWt5g94Sm3X+gmWMCzRyXyHMpCA/x8fHB2dmZyMhIIiMjsbW1ZenSpezatYvs7Gz09fW5cuUKGRkZLFq0CGdnZ95++21KS0u5dOmS1I69vT1HjhwhJyen2vNMnjyZ9PR0zpw5o7a9LrUISkpKSExMxNXVFQBXV1e1qfFz586Rl5eHra0tenp6/OMf/5DqiIvaB4IgCMLzSjy5FDTCxo0b6dGjh/R58ODBpKWlkZyczLRp01i1ahXt27enVatWUoBWnXfeeYdvvvkGDw8Pdu3aVaXuuFwu57333mPdunXSwh6Avn37kpqa+kSLaY4fP05BQQEzZswAQKlUcuvWLW7cuIGZmRmxsbHk5eUxatQooCIYNTQ0xMvLi759+5KWllbrcwkvLm1dXaJVysbuRr1ryNXigiA8GRFcChqnrKyMrKwsLC0tsbS0JDMzk/PnzzNkyBD09PQ4cOCA9G5iRkYGpqamGBn9/d6ht7c3KpWqxgDTycmJHTt2cP36dSwsLICKoHTGjBnY2tpKdeAvXbrE+fPna1y5HhcXR2BgIJMmTZK2BQYGsn//ft555x0OHz5MbGwsXbt2lb4fM2YMP/30E1OmTMHV1ZWEhASpxnh2djYnT56UaqwLAkDXl19m3oIljd2NeqeJ05KCoCnEtLigcZRKJf7+/jg5OTF+/Hhu3rzJm2++iY6ODlFRURw5cgQnJyccHR358MMPKSkpqdLG7NmzcXFxwcPDg+zsbLXvtLS0WLhwIX/88Ye0rVevXkRFRbFlyxZGjx6Nk5MTq1atwtTUtNo+Zmdn8+OPP/L666+rbXdyciI+Pp5jx47RuXNntcAS4I033iAuLg5TU1N27drFkSNHGDVqFE5OTvj4+NC6des6XjVBEARBqB8ylXh5SxBeaGJBz980bUwHDuxDLtfG0dHt8Ts3MZr2W4EYU1OiieMSC3oEQRAEQRCE55J451IQnrEJEyZQXq6+8KB///4EBQU1Uo8EQRAE4dkRwaUgPGPx8fGN3QVBEARBaDAiuKxGZGQkRUVFLFny6BWWx44dw8TEBEtLyyc+R3x8PCdOnGDjxo117eYTe7B2dl3k5eWxd+9e3n33XWlbQEAArq6uDBw4sMr+D9feflIRERF89dVXtG7dmujo6Dq10ZRcvnyZwMBAbt68iY6ODv369WPlypXo6elVu/+nn37K0aNHAcjMzKR169bSqveoqCjMzMwarO/PWvhyfwoaoMZzQ6W3aSgte7yMXE+3sbshCMILRgSXT+HYsWNYWFjUKbhsivLy8ti+fbtacBkSEvLMzrdz505OnDhR7Qro8vJyqea3ppDL5SxdupQ+ffqgVCpZtGgRO3bsYN68edXuP2fOHObMmQOAh4cHnp6ejBw5siG73GAKbmQxRdYAr4iXqaAhztNAfgJuFRc3djcEQXjBNGhwuXjxYi5fvkxpaSmdO3cmNDSUFi1aEBsby2effQZU/Ad2y5YttG3blm+++YbIyEjKysrQ0tIiLCwMIyMj3NzcSElJAeD69evS58o/T548me+++4779+8THh5OTEwMZ86cQU9Pj82bN2NsbFzl6WRNTyvT09P58MMPuXfvHsXFxUyePJkZM2bw3Xffcfz4cU6dOsUXX3zBzJkzcXFxYf/+/URHR1NeXo6RkRGrVq3ipZdeoqSkhODgYFJSUjA1NeWll1567PVycHDA2dmZU6dOcfPmTTw9PaWngD179uSXX37B0NCwyufTp0+zZs0aCgsLAfDz82Po0KFqbefk5BAcHExWVhbFxcU4Ojoye/ZsAFJTUwkJCaGoqAgDAwMCAgKwtLQkKCiI/Px8nJ2d0dfXJyYmRi2oyc7Oxs/Pj9zcXDp27Kj2nmFBQQGrV68mPT2d4uJiBg0axNKlS2sMEKdMmUJxcTHTp09n6NChjBgxgtDQUAYOHEhaWhpz5syhS5cuNdYJ//rrr/n4449p2bIlw4cPZ8OGDfzyyy/k5ubWeP8AnDx5kk8//ZSSkhIp2BswYAApKSmEhobSv39/Tp8+jUwmIyIiQiobWd09/Mknn9CpUydmzZoFVFTcWbhwIUePHkUmk1UZc8eOHaU/a2lpYWlpSUZGxmPvE0EQBEF4njRocBkQECA9hYqIiGDbtm0MGzaMLVu2EB0djbGxMYWFhejo6HD58mWWL1/Onj176Nq1KyUlJZSUlHD37t1HnuPu3bu88sorLF68mO3btzNjxgx27dpFcHAwq1atYvfu3SxcuLDWfe7QoQP//ve/USgUFBYWMmnSJIYNG8awYcNwcHBQm/b96aef+PLLL9mzZw8KhYKTJ0+ybNkyYmJi2Lt3L9evXycxMZGysjLefvtttWCiJvfv35eOdXJywtXVVQooaxr//PnziYyMxNramvLycgoKCqrst2TJEubOnYuNjQ0lJSXMmDGDfv36YWNjg4+PD6GhoQwePJikpCR8fHz4+uuvCQwMxM3NrcYKN8HBwdjY2DB//nyuXbvG+PHjGTZsGACrV6/GxsaGkJAQlEolvr6+xMXF1ZjwOzo6mp49exITE4OhoSEpKSn89ttvrFq1ihUrVlBWVsbkyZOrrRPesmVLVqxYweeff85LL73Etm3bHnudoWJqefPmzezYsQMjIyN+//133n33XU6cOAHAxYsXWb16NUFBQXz66ads3ryZdevWkZKSUu097OHhwezZs/H09EQmk7F7926mTJlSbWD5sPv37xMXF8eiRYtq1fen0aaN0eN3qkF9pq54FIVcu+KpolAnDfU7NTRNHJcYU9OhqeOqDw0aXB48eJCEhARKS0spKiqia9eulJeX4+zsjLGxMYAUOJ06dYrhw4dLSaQVCgUKheKxwaWBgQEjRowAKkrytWvXjt69e0ufT5069UR9vn//PqtWrSI9PR2ZTEZOTg4XLlyQnlg96Pjx41y4cEGquKJSqcjLywMgJSUFFxcX5HI5crmc8ePH88svvzz2/OPGjQMqnmo1b96cP//8s9pzV/r1118xNzfH2toaAG1tbVq0aKG2T1FRET/88AN37tyRthUWFpKRkUHbtm2Ry+VSlRk7OzvkcjmXL19+ZFBbOcbly5cD0KlTJ+zs7NSuTWpqKjt37gQqrmtNCcZr0qVLF6ysrADU6oRXqqwTrqWlRZ8+faSnw2+++Sbh4eGPbf+7774jMzOTt99+W9pWVlbGrVu3AOjWrRt9+vQBYMCAAXzzzTcAnDhxotp72NzcnE6dOvHtt98yYMAAjh8/ztKlSx/bj7KyMhYuXIitra1U+vFZagp5LktKyzVqurqhaVo+PtDcPINiTE2DJo6rPoPlBgsuf/rpJz7//HNiYmJo3bo1CQkJ7Nu3r8b9a8rtrqOjo/Zd8UPvEykUCunPWlpaap+1tbWlqVptbW2Uyr/r7T7cTqWPP/4YY2NjwsLC0NHRwdPTs8Z9VSoVbm5uLFiwoNbjeRxd3b9fxn+4/5VtPtif2pxHqVQik8mIjY1FLperfXfhwoVqn6zV5mnbo6hUKjZv3kynTp3q3IaBgYFaezXVCT927FiNbTzu/hk2bBhr1qypclxGRkaVe6usrOyxffbw8ODzzz8nIyOD1157jWbNHv2Xt7y8HF9fX1q0aCEF6oIgCILQlDRYcJmXl4eRkREtW7akpKSEuLg4AEaOHElAQADu7u60bduWwsJC5HI5Q4cO5dNPP+XKlStq0+Jt27altLSUq1ev0qVLFxITE+vUn86dOxMTE4NSqaSoqIgTJ05IU7gPys/Pp2fPnujo6PDbb7/x008/8cYbbwBgZGREfv7f/3JxcHBgyZIlvPnmm7Rr147y8nLOnz+PhYUFdnZ2HDx4kHHjxlFWVkZiYiLt27evU9+h4slgWloadnZ2JCQkSNutrKxYvnw5p0+fxsrKSpoWf/DppZGREa+88gpbt26VFovcuHEDHR0d6f3Q5ORkbG1tSU5OpqysjK5du1JUVMT9+/cpKytDR6fqrWNra0tcXBxz587l2rVrJCUlSU9AHRwc2Lp1K6tWrUJbW5s7d+5QWFhY52CzW7duNdYJt7KyIiAgQLp3vvjiC+m4R90/Q4YM4ZNPPuH333+ne/fuQMX7p49bsFXTPaxQKLC3tycsLIyzZ88+dnq+smyltrY2ISEhTx3QaxIjs/ZEi9XiT6wlINcVq8UFQWhYDRZcDh8+nEOHDjF27FhMTU2xsLAgLS2NV199FS8vL2bOnIlMJkOhUBAVFUXXrl356KOPWLhwobQyOCwsjJ49exIQEMDMmTPp0KEDgwYNqlN/XnvtNb788kscHR3p0qULffv2rXa/OXPm4Ofnx6FDh+jcuTM2NjbSd+PHj2fp0qUcPXpUWtDz/vvvM2fOHMrLyyktLWXMmDFYWFgwefJk0tPTcXR0pF27dtjY2KjVpn5Sy5YtIzAwEGNjY+k1AICWLVsSGRlJWFgYRUVFaGlpsWTJEinIqxQeHs7q1atxcnICKqZyQ0JCMDY2ZuPGjWoLejZs2CC9luDk5ISTkxMtWrQgJiZGrc2AgAD8/Pw4evQo3bp1Y8iQIWr9Xbt2Lc7OzshkMuRyOcuWLatzcFlZJzw0NJQdO3agVCpp06YN69evp02bNnz00UfMnj2bli1bMmbMGLXjarp/unbtytq1awkICOD+/fuUlpZibW392OCypnu4bdu2aGlp4eLiwrfffkuvXr0e2c63337LoUOH6NGjBxMmTADA2tqalStX1ukaaRLf4LAGOY+mTXVVln8UBEFoSKK2uPBCeHh1fUOaOXMmkydPZuzYsQ1+7tpoCu9cNhRNG5OoLd60iDE1HZo4LlFbXBCagLS0NEaPHk2zZs14/fXXG7s7giAIgtAgRBL1RvbFF1+we/fuKtvDwsKkVe6abPbs2dy4cUNtm5mZGVFRUfV6nvT09Hptrzb69etX7eKiwMBAzpw5o7ZNW1u7xjKR58+fx9/fv8r2qVOnSpkJBEEQBOF5IabFBeEFJ6bF/6ZpYxLT4k2LGFPToYnjapKpiARBEOpK1BavG1FbXBCExiCCS0FjODg4SKvalUolc+bMwdHRsU7tREVF0aNHD+7du8f8+fMxMTEhODiYgIAADh8+zFdffSWlkvL391er1JSamkpERASZmZno6+vTqlUrfHx81DINVGfPnj0EBQVx4MABevfuzYcffigl2s/IyKBjx45S3tP4+HhOnTrFpk2buHPnDjo6OnTq1IlFixbRs2fPJx7z807UFq8bUVtcEITGIIJLQaNs3LiRHj16cO7cOdzd3bGzs5NKjj6p/Px8vLy86Nu3LwEBAVLeycra9KtXr65yTHp6Ot7e3qxZs0bKm3r16lUuXLjw2PPFxcVJuUKXL1+uloLIwcFBGhvA//73PwICAti0aRP9+vUDKmqX37x5UyODS0EQBKHp0Jx/ogvCA/r06YOhoSGxsbGMHTsWZ2dnnJycyMjIAODSpUu88847uLm5MX78eCmpf6Xbt2/j4eGBra0ty5cvV0to7u7uzvfff8/FixernHfbtm1MnDhRLSF/ly5dHrtaPD09ndzcXEJDQ0lMTKSkpOSR+2/atIm5c+dKgWXlmIcOHfrI4wRBEAThWRNPLgWNlJycTHFxMVu2bCExMREzMzNKSkooLy+nrKwMX19f1q5di7m5OQUFBbi5uTFgwACpbvv777/PlClTqi3laWBggLe3NxEREWzatEntu3Pnzqklba+t2NhYXFxc6NChA7179+bYsWNSXfnqnDt3jsDAwCc+T3XatDGq87H1+QL4oyjk2hVT1kKdNNTv1NA0cVxiTE2Hpo6rPojgUtAoPj4+6OrqYmRkRGRkJNHR0SxdupRRo0YxYsQIOnXqxMWLF8nIyGDRokXScaWlpVy6dEkKLu3t7Tly5AhvvfUWJiYmVc4zefJkdu7cWSWlUF2SL5SUlJCYmMjevXsBcHV1JS4u7pHBZX1qCqvFS0rLNepdyIamaataQXNX64oxNQ2aOC6xWlwQavDge4kAgwcPJi0tjeTkZKZNm8aqVato3749rVq14uDBgzW288477/DNN9/g4eHBrl27qgSYcrmc9957j3Xr1qnViO/bty+pqamMHj261n0+fvw4BQUFzJgxA6ioMX7r1i1u3LiBmZlZtcf06dOH1NTUFyIXqiAIgtC0iOBS0FhlZWVkZWVhaWmJpaUlmZmZnD9/niFDhqCnp8eBAwdwcXEBKlZjm5qaYmT09xSxt7c3KpWqxgDTycmJHTt2cP36dSwsLICKoHTGjBnY2tpK9dwvXbrE+fPna1y5HhcXR2BgoFpC9MDAQPbv38/cuXOrPWbOnDmsWLECCwsL+vbtC1SsUs/NzcXe3r5uF+w5ZmTWnmiRiuiJtQTkuiIVkSAIDUsEl4LGUiqV+Pv7k5+fj0wmw8zMjMWLF6Ojo0NUVBShoaHs2LEDpVJJmzZtWL9+fZU2Zs+eLQWYn332mdp3WlpaLFy4kNmzZ0vbevXqRVRUFBEREQQGBqqlIqpOdnY2P/74I+vWrVPb7uTkxNKlS5kzZ47aYqJKw4cPJygoiKCgIO7evYuOjg4dO3Zk8eLFdbhSzz/f4LAGOY+mTXVVJlEXBEFoSKJCjyC84JrCO5cNRdPGJCr0NC1iTE2HJo6rPt+5FG/IC4IgCIIgCPVGTIsLQgOZMGEC5eXq7/P179+foKCgRuqRIAiCINQ/EVwKQgOJj49v7C4IgiAIwjMngktBEJ5b4cv9KWiAVeKVNG61eI+XkeuJ1eKCIDQsEVzWUWRkJN7e3igUimq/d3Z2Zu/evejp6dX7uR0cHIiKilLL5/gk8vLy2Lt3L++++660LSAgAFdXVwYOHFhlf39/fywsLJg6dWqd+9wUZGdn4+vry65du4Cqv/GGDRvo3r37M0lufvPmTdauXcvPP/+Mvr4+Ojo6TJkyhdu3b3P06FEAMjMzad26NUZGRiiVSu7du4ehoSFFRUXk5OTQtWtXAEaMGMHChQvrvY+NoeBGFlMaMnl6mUqjkrX/BNwqLm7sbgiC8IIRwWUdffLJJ3h6elYJLsvKytDR0Xlkgu7GlpeXx/bt29WCy5CQkEbs0fPB1NRUCiyh6m9cXSnI+nDv3j2mTp3KhAkTCAsLQ0tLi7y8PI4cOcKcOXOYM2cOAB4eHnh6ejJy5Ei141NSUvjnP/8ppt0FQRCE54IILuvgww8/BMDd3R0tLS06dOiAmZkZV65cITc3l/j4eHr27Mkvv/yCoaEhDg4OODs7c+rUKW7evImnp6f0FPDB/R7+fPr0adasWUNhYSEAfn5+DB06VK0vOTk5BAcHk5WVRXFxMY6OjlLexdTUVEJCQigqKsLAwICAgAAsLS0JCgoiPz8fZ2dn9PX1iYmJUQtcsrOz8fPzIzc3l44dO6otQikoKGD16tWkp6dTXFzMoEGDWLp0KdraNefSi42NlXJEyuVytmzZQtu2bTlw4AA7duwAoHPnzgQFBdGmTRvi4+NJTEykWbNmpKenY2pqyooVK1izZg1Xr17FwsKC8PBwZDIZ/v7+6OjocP36dW7cuIGNjQ2BgYEoFApu3brFypUryczMBGDWrFm4uLigVCoJCgoiOTkZhUKBgYEBMTExXL9+HTc3N1JSUqr8xrt27SI0NFR6gltYWEhwcDBpaWkAjB8/Hi8vL6AiCLSwsODXX38lJyeHsWPH4uvrW+P1SUxMpGXLlnh7e0vbmjdvjru7e803YT16nmuLi5ri9UNTayBr4rjEmJoOTR1XfRDBZR2sXLmS6OhoYmJiMDQ0xN/fn9OnT7N7924MDAyqPeb+/fvs3buX69ev4+TkhKurqxRQVufu3bvMnz+fyMhIrK2tKS8vp6CgoMp+S5YsYe7cudjY2FBSUsKMGTPo168fNjY2+Pj4EBoayuDBg0lKSsLHx4evv/6awMBA3Nzcany6GhwcjI2NDfPnz+fatWuMHz+eYcOGAbB69WpsbGwICQlBqVTi6+tLXFwckydPrratlJQUtmzZQnR0NMbGxhQWFqKjo8Nvv/1GeHg48fHxmJiYsH79ej766CMpkXlaWhoJCQm0a9cOb29vFi9ezO7du9HX18fV1ZWkpCSpAs6ZM2eIiYlBV1cXLy8v9u3bx9SpUwkODqZ79+5s2rSJnJwcJkyYQJ8+fSgrKyMpKYkvv/wSLS0t/vrrr8f+xg/bvHkzSqWShIQECgsLefPNN+nZs6dUHefGjRvs2bOHwsJCRo8ezcSJE6Vp64edPXsWS0vLar9rCM9znktRU7x+aFo+PtDcPINiTE2DJo5L5Ll8Do0ZM6bGwBKQ3tPr2LEjzZs3588//3xke7/++ivm5uZYW1sDoK2tTYsWLdT2KSoq4ocffiA4OBhnZ2cmTZpETk4OGRkZXL58GblcLgVgdnZ2yOVyLl++/NixpKSkSKUIO3XqhJ2dnfTd8ePH2bFjB87Ozri6unL27NlHtnnixAmcnZ0xNjYGwNDQEF1dXVJSUrC3t5dKKrq7u5OUlCQdZ21tTbt27QDo3bs3r7zyCs2aNUNHR4devXpx9epVad9x48ZhaGiIjo4OLi4uJCcnA5CUlCQ9/TMxMcHe3p6UlBQ6depEeXk5AQEBHDhw4LHXozpJSUlMmjQJmUyGkZERjo6Oav0fM2YMWlpaNGvWDHNzc+npaXVEHQNBEARBk4gnl/XkUYElgO4D9X21tbWlqWZtbW0puCh+4MX72gQcSqUSmUxGbGwscrlc7bsLFy5UWzawum1PQqVSsXnzZjp16vTU7TyqLw9fr5qu3+PaffgcMpmMZs2acfjwYVJSUkhKSiI8PJz9+/c/df8f/Fzb/gJYWFgQFxf3ROd/UTRUTfFKGrdaHFFbXBCEhieCyzoyNDSkoKDgkVPbtdGpUyfS0tKws7MjISFB2m5lZcXy5cs5ffo0VlZW0rT4g08vjYyMeOWVV9i6dSvz5s0DKqZjdXR0eOmllygpKSE5ORlbW1uSk5MpKyuja9euFBUVcf/+fWnx0cNsbW2Ji4tj7ty5XLt2TW0K2sHBga1bt7Jq1Sq0tbW5c+cOhYWFNQabI0eOJCAgAHd3d9q2bUthYSFyuRw7Ozu2bdvGzZs3MTY2Zt++fdI5ntTRo0eZPn06CoWCQ4cOSQte7Ozs2Lt3Lz4+Pty8eZOTJ08yY8YM7ty5g7a2NsOHD2fIkCGcOHGCa9euSU9RKz3qNx48eDCxsbFYW1tTWFjIkSNH8PPzq1P/HR0d2bZtG9u3b2fWrFnIZDLy8vLYv38/06dPr1ObmqKhaopX0rSpLlFbXBCExiCCyzry9PRk2rRp6Onp0aFDhzq3s2zZMgIDAzE2NmbEiBHS9pYtWxIZGUlYWBhFRUVoaWmxZMmSKgFYeHg4q1evxsnJCagIiEJCQjA2Nmbjxo1qC3o2bNiAQqFAoVDg5OSEk5MTLVq0ICYmRq3NgIAA/Pz8OHr0KN26dWPIkCFq/V27di3Ozs7IZDLkcjnLli2rMbh89dVX8fLyYubMmchkMhQKBVFRUXTv3p3Fixfj6ekJVATZda1UY2Njw7x588jKysLGxkZ6/3P58uUEBgZK18bX15fu3btz9uxZVqxYQVlZGeXl5QwfPpwBAwaQlaX+hOzB3/jBVeQAc+fO5aOPPpLaHj9+PMOHD69T/w0MDNi1axdr165l1KhR0hT/lClT6tSeIAiCIDQmmUq88CU0YS9KDs5n6Xle0NPQNG1MlU8uHR3dGrsr9U7TfisQY2pKNHFcYkGPIAiCIAiC8FwS0+JCvZg9ezY3btxQ22ZmZkZUVNQzPW9YWMO+k/c0GusaCYIgCEJDEsGlUC9EgPR44hoJgiAILwIRXApNnoODg7RQSalUMmfOHBwdHevUTmXN9nv37jF//nxMTEwIDg4mICCAw4cP89VXX9G+fXug6vueqampREREkJmZib6+Pq1atcLHxwcbG5saz3n58mXCw8O5cOEC+vr6GBgY4OXlxejRowE4dOgQ//rXv7h//z4ymYxevXrxwQcf0L59ewoKCoiIiODbb7+VUh+98cYbUoWmpiZ8uT8FDZh2qDoal4qox8vI9UQqIkEQGpYILgWNsHHjRnr06MG5c+dwd3fHzs6O1q1b16mt/Px8vLy86Nu3LwEBAVL+SmNjYyIjI1m9enWVY9LT0/H29mbNmjVSNaOrV69y4cKFGs+Tk5PD1KlT+eCDD9i0aZO07dSpUwB88cUX7Ny5k82bN0vVfVJSUrh16xZmZmZ4e3vTs2dPDh8+jEKh4P79+3zxxRd1GvPzoOBGFlMauxpPmUqjKgL9BNx6IH+uIAhCQxDBpaBR+vTpg6GhIbGxsezfv196mrl+/XrMzc25dOkSoaGh5ObmUlpayvTp03Fz+3sl7e3bt/Hz82PkyJEsWLBArW13d3d2797NxYsXefnll9W+27ZtGxMnTpQCS4AuXbrQpUuXGvu6Z88eBg0ahIuLi7TNxMRE+vzJJ58QHBysVjZy0KBBAJw6dYo//viDf//731ICfT09PTw8PJ7oegmCIAhCfRPBpaBRkpOTKS4uZsuWLSQmJmJmZkZJSQnl5eWUlZXh6+vL2rVrMTc3p6CgADc3NwYMGIC5uTkA77//PlOmTKkSWEJFPkpvb28iIiKkJ42Vzp07x5gxY56or+fOnVPLIfqg27dv8+eff9K/f/9qvz979ix9+vSpUpmpLtq0MarzsfWZukIh1654cijUu/r8nZ4nmjguMaamQ1PHVR9EcCloBB8fH3R1dTEyMiIyMpLo6GiWLl3KqFGjGDFiBJ06deLixYtkZGSwaNEi6bjS0lIuXbokBZf29vYcOXKEt956q0rFHoDJkyezc+dOzpw5o7a9LuliH3XM49qrz/S0z0uey5LSco2akn6eaFo+PtDcPINiTE2DJo6rPoNlEVwKGqHynctKgwcPJi0tjeTkZKZNm8aqVato3749rVq14uDBgzW288477/DNN9/g4eHBrl27qgSYcrmc9957j3Xr1kkLewD69u1LamqqtBCnNvr27UtaWlq137Vt2xZTU1NSU1MZOnRole8tLCyIjo6usYSnIAiCIDQW8V8lQeOUlZWRlZWFpaUllpaWZGZmcv78eYYMGYKenh4HDhyQ3mvMyMjA1NQUI6O/p4a9vb1RqVQ1BphOTk7s2LGD69evY2FhAVQEpTNmzMDW1lYq0Xnp0iXOnz9f48r1KVOm4OrqSkJCglRGMjs7m5MnTzJ58mTmzp1LWFgYmzdvpnPnzgB89913NG/eHDs7O9q1a0dYWBh+fn4oFAqKi4v5v//7P7y8vOr1ejYUI7P2RIvV4vWqJSDXFavFBUFoWCK4FDSOUqnE39+f/Px8ZDIZZmZmLF68GB0dHaKioggNDWXHjh0olUratGnD+vXrq7Qxe/ZsKcD87LPP1L7T0tJi4cKFail/evXqRVRUFBEREQQGBqqlIqqJqakpu3btIjw8nPXr12NgYICBgQHvvvsuULGASE9PDx8fH+7fv4+WlpaUikgmk7F9+3bWrVvHuHHj0NfXB5CC1KbIN7jxE+Jr2lRXZflHQRCEhiRqiwvCC+55eefyeaBpYxK1xZsWMaamQxPHJWqLC4IgCIIgCM8lMS0uCM/YhAkTKC9Xf4+vf//+BAUFNVKPBEEQBOHZEcGlIDxj8fHxjd0FQRAEQWgwIrgUBEHQYFcuXmTVrGmN3Y169zyv7Dcya/9cLFAThMYigkuh3jg4OKBQKKSSi3PmzKkxDU9t27l37x4vv/wy7777LtbW1kDFk8ClS5cSERHBuHHjpG0nTpxg48aNANy8eZO1a9fy888/o6+vj46ODlOmTGHy5MnVnvP69eu4ubmRkpJSx9FXFRkZSVFREUuWLKnXfSudP3+eZcuWoVQqKSsrw9ramhUrVqBQKJ6m24KGKS8ubvya7c/Cc1wHvrFTaglCY3s+/2YKTdbGjRs5dOgQa9asYenSpdy5c+ep2vnPf/6Dq6srXl5ealVxOnTowIYNGygrK6ty7L1795g6dSrm5ub85z//ITExkc8++wylUlnncT2PunXrxt69ezl48CAJCQncvXuXmJiYxu6WIAiC8IITTy6FZ6JPnz4YGhoSGxvL/v37paeZ69evx9zcnEuXLhEaGkpubi6lpaVMnz4dN7fq06W89tprpKamsmPHDunJpIWFBSUlJcTGxuLu7q62f2JiIi1btsTb21va1rx58yr7PcrixYu5fPkypaWldO7cmdDQUFq0aEFKSgohISFYWlpy5swZdHR0WLNmDZ988gm///47ZmZmREZGYmBgAEBWVhbvvvsuWVlZvPTSS4SGhtKsWTPy8/MJCAjg4sWLmJmZ0bp1a9q2bQtAUlIS69evp7i4mPLycmbPnl3tE2A9PT3pz2VlZVIuTEEQBEFoTCK4FJ6J5ORkiouL2bJlC4mJiZiZmVFSUkJ5eTllZWX4+vqydu1azM3NKSgowM3NjQEDBkg1vh/Wv39/jh8/rrZt4cKFeHl5SdV2Kp09exZLS8un6n9AQACtW7cGICIigm3btuHr6wtUVPX55z//SXBwMB9++CGzZs1i3759tGvXjnfffZfDhw8zadIkAH7++WcOHDhA27ZtWbp0KZs3b2bJkiVs2rQJQ0NDjhw5wp07d5gwYQJjx44FKgLz6OhotLW1uXXrFhMmTGDo0KG0aNGiSj+zs7Px8vIiMzMTe3v7Gqf9H6VNG6PH71SD+syL9rzQpDGJBOqNQyHXrvN9pEn3XyVNHBNo7rjqgwguhXrl4+ODrq4uRkZGREZGEh0dzdKlSxk1ahQjRoygU6dOXLx4kYyMDBYtWiQdV1payqVLl2oMLqvL9d+zZ09sbGzYtWsXbdq0eeS+T6pyqrm0tJSioiK6du0qfdetWzd69+4NVASCWVlZtGvXDqioF3716lVp3xEjRkhPJCdOnEhwcDAAKSkpLF++HIDWrVvzj3/8Qzrmzp07LFu2jKtXr6Ktrc1ff/3F5cuXGTBgQJV+mpqacvDgQYqKivjggw/4z3/+88TvuYok6n/TtDGVPqcLXjRdSWl5ne4jTbv/QDPHBJo5rvoMlkVwKdSrjRs30qNHD+nz4MGDSUtLIzk5mWnTprFq1Srat29Pq1atOHjwYK3bTUtLo3v37lW2L1iwAHd3d6lkIlRMmcfFxdV5DD/99BOff/45MTExtG7dmoSEBPbt2yd9/+CCGW1tbXQfqN2sra1NcXFxte2qVCpkMpn055qsWrUKBwcHPvnkE2QyGa+//nqNbVYyMDBg3LhxJCQk1GkRlSAIgiDUFxFcCs9MWVkZWVlZWFpaYmlpSWZmJufPn2fIkCHo6elx4MABaUo7IyMDU1NTjIyqTtEeO3aMzz//nO3bt1f5rlOnTrz++ut89tlnWFhYAODo6Mi2bdvYvn07s2bNQiaTkZeXx/79+5k+ffpj+52Xl4eRkREtW7akpKTkqQLVEydOcOfOHVq3bs3+/fsZNGgQAHZ2dsTHx/PKK6+Qm5vLsWPHGDNmDAD5+fl06NABmUzG999/r/Yk9EHXrl3D1NQUhUJBSUkJ//3vf9UCe0EA0NbVJVqlWYvZ4PlPRSQILzIRXArPjFKpxN/fn/z8fGQyGWZmZixevBgdHR2ioqIIDQ1lx44dKJVK2rRpw/r166VjfXx8pFRE5ubmbN26tdppYYC5c+eyf/9+6bOBgQG7du1i7dq1jBo1CkNDQykVUW0MHz6cQ4cOMXbsWExNTbGwsCAtLa1O18DOzo5ly5Zx7do1unXrhr+/v9TnZcuWMW7cODp06MCQIUOkYxYvXsyHH37Itm3b6NmzJz179pS+27BhAyYmJrz11lv88ssvbN++HZlMhlKpxMbGhrlz59apn4Lm6vryy8xbUPsUV02FJk5LCoKmkKnq4wU1QRCaLPHO5d80bUwHDuxDLtfG0bH6TAxNmab9ViDG1JRo4rjq851LkbdEEARBEARBqDdiWlx4oQQGBqolY4eKRTii/rcgCIIg1A8RXAovlKCgoMbugiAIgiBoNBFcCoIgaLArFy+yata0xu5GvXveV4v7Boc1djcEodGI4LIOIiMj8fb2Vst3+CBnZ2f27t2rVp6vvjg4OBAVFVXnlDN5eXns3btXLS9kQEAArq6uDBw4sMr+/v7+WFhYMHXq1Dr3uSnIzs7G19eXXbt2AVV/4w0bNtC9e3fGjRv3TM5/8uRJvLy82LRpE6NHj5a2l5aWsnnzZo4cOYKOjg5KpRJ7e3uGDBlCeHg4ALdu3UKpVGJiYgLA/Pnz1ZKyCy+28uJipsg08PX6MhU8p+OKvpHV2F0QhEYlgss6+OSTT/D09KwSXJaVlaGjo/NEycEbWl5eHtu3b1cLLkNCQhqxR88HU1NTKbCEqr/xggULnun54+LisLW1JTY2Vi24XLp0KcXFxcTFxWFkZERpaSnx8fFYW1tL91lkZCRFRUUsWaJ56WYEQRCEpkcEl0/oww8/BMDd3R0tLS06dOiAmZkZV65cITc3l/j4eHr27Mkvv/yCoaEhDg4OODs7c+rUKW7evImnp6f0FPDB/R7+fPr0adasWUNhYSEAfn5+DB06VK0vOTk5BAcHk5WVRXFxMY6OjsyePRuA1NRUQkJCKCoqwsDAgICAACwtLQkKCiI/Px9nZ2f09fWJiYnBw8MDT09PRo4cSXZ2Nn5+fuTm5tKxY0fKy/+ediooKGD16tWkp6dTXFzMoEGDWLp0KdraNdcvjo2N5bPPPgNALpezZcsW2rZty4EDB9ixYwcAnTt3JigoiDZt2hAfH09iYiLNmjUjPT0dU1NTVqxYwZo1a7h69SoWFhaEh4cjk8nw9/dHR0eH69evc+PGDWxsbAgMDEShUHDr1i1WrlxJZmYmALNmzcLFxQWlUklQUBDJyckoFAoMDAyIiYnh+vXruLm5kZKSUuU33rVrF6GhodIT3MLCQoKDg6Xcl+PHj8fLywsADw8PLCws+PXXX8nJyWHs2LFSTfKa5ObmkpSUxJdffomjoyM3b97E2NiYK1eucOzYMU6ePCkll5fL5bz55puPbO9Jidri6jRpTKK2eOMQtcXVaeKYQHPHVR9EcPmEVq5cSXR0NDExMRgaGuLv78/p06fZvXs3BgYG1R5z//599u7dy/Xr13FycsLV1VUKKKtz9+5d5s+fT2RkJNbW1pSXl1NQUFBlvyVLljB37lxsbGwoKSlhxowZ9OvXDxsbG3x8fAgNDWXw4MEkJSXh4+PD119/TWBgIG5ubjU+XQ0ODsbGxob58+dz7do1xo8fz7BhwwBYvXo1NjY2hISEoFQq8fX1JS4ujsmTJ1fbVkpKClu2bCE6OhpjY2MKCwvR0dHht99+Izw8nPj4eExMTFi/fj0fffSRlEQ9LS2NhIQE2rVrh7e3N4sXL2b37t3o6+vj6upKUlISgwcPBuDMmTPExMSgq6uLl5cX+/btY+rUqQQHB9O9e3c2bdpETk4OEyZMoE+fPpSVlUmBnJaWFn/99ddjf+OHbd68GaVSSUJCAoWFhbz55pv07NkTe3t7AG7cuMGePXsoLCxk9OjRTJw4Ua02+cMOHjzIyJEjadu2Lf/4xz84cOAA7777LufOnaNLly60aNGixmPrg8hz+TdNG5OoLd44RG3xv2nimEAzxyXyXD5nxowZU2NgCUjv6XXs2JHmzZvz559/PrK9X3/9FXNzc6ytrYGKVDkPBxhFRUX88MMPBAcH4+zszKRJk8jJySEjI4PLly8jl8ulAMzOzg65XM7ly5cfO5aUlBQmTZoEVJRWtLOzk747fvw4O3bswNnZGVdXV86ePfvINk+cOIGzszPGxsYAGBoaoqurS0pKCvb29tI7gu7u7iQlJUnHWVtb065dOwB69+7NK6+8QrNmzdDR0aFXr15q5RDHjRsnVeBxcXEhOTkZgKSkJNzd3QEwMTHB3t6elJQUOnXqRHl5OQEBARw4cOCx16M6SUlJTJo0CZlMhpGREY6Ojmr9HzNmDFpaWjRr1gxzc3Pp6WlN4uPjcXV1BcDV1fWpyk0KgiAIQmMTTy7rwaMCSwBdXV3pz9ra2tJUs7a2NpUFkoqLi6V9alM0SalUIpPJiI2NRS6Xq3134cIFZDJZlWOq2/YkVCoVmzdvplOnTk/dzqP68vD1qun6Pa7dh88hk8lo1qwZhw8fJiUlhaSkJMLDw9VKR9a1/w9+rm1/oeIpbUZGBgEBAdK2nJwcfvnlF/r06cPVq1f566+/nvnTS0FzidriDU/UFhdedCK4rANDQ0MKCgoeObVdG506dSItLQ07OzsSEhKk7VZWVixfvpzTp09jZWUlTYs/GGAYGRnxyiuvsHXrVubNmwdUTMfq6Ojw0ksvUVJSQnJyMra2tiQnJ1NWVkbXrl0pKiri/v370uKjh9na2hIXF8fcuXO5du2a2hS0g4MDW7duZdWqVWhra3Pnzh0KCwtrDDZHjhxJQEAA7u7utG3blsLCQuRyOXZ2dmzbtk16t3Dfvn3SOZ7U0aNHmT59OgqFgkOHDjFy5Eig4mnt3r178fHx4ebNm5w8eZIZM2Zw584dtLW1GT58OEOGDOHEiRNcu3ZNeopa6VG/8eDBg4mNjcXa2prCwkKOHDmCn59fnfofFxfHO++8w8KFC6VtW7ZsIS4ujpCQEBwcHAgMDCQkJAQjIyPKy8vZvXs3EydOfOr7T3gxiNrigiA0NBFc1oGnpyfTpk1DT0+PDh061LmdZcuWERgYiLGxMSNGjJC2t2zZksjISMLCwigqKkJLS4slS5ZUCcDCw8NZvXo1Tk5OQEVAFBISgrGxMRs3blRb0LNhwwYUCgUKhQInJyecnJxo0aIFMTExam0GBATg5+fH0aNH6datG0OGDFHr79q1a3F2dkYmkyGXy1m2bFmNweWrr76Kl5cXM2fORCaToVAoiIqKonv37ixevBhPT0+gIsiua3JzGxsb5s2bR1ZWFjY2NtL7n8uXLycwMFC6Nr6+vnTv3p2zZ8+yYsUKysrKKC8vZ/jw4QwYMICsLPXUIQ/+xg+uIgeYO3cuH330kdT2+PHjGT58+BP3vbi4mCNHjvD555+rbX/jjTcYP348AQEBhIWFsWnTJtzc3JDL5VIqoprSYAmCIAhCY5OpajMHKwjPoRclB+ezJhb0/E3TxnTgwD7kcm0cHd0auyv1TtN+KxBjako0cVxiQY8gCIIgCILwXBLT4sJTmz17Njdu3FDbZmZmRlRU1DM9b1hY0ymv1ljXSBAEQRAamgguhacmAqTHE9dIEARBeFGI4FLQCA4ODtKCJaVSyZw5c3B0dKxTO5W12+/du8f8+fMxMTEhODiYgIAADh8+zFdffUX79hWpRh5+7zM1NZWIiAgyMzPR19enVatW+Pj4YGNj88jz7tmzh6CgIA4cOEDv3r2l7QUFBURERPDtt99KKY7eeOMNjI2NpcpHN27cQE9Pj1atWgEQFBRE//79n3jsgma6cvEiq2ZNa+xu1LvnPRWRb3DTmVkRhPomgktBY2zcuJEePXpw7tw53N3dsbOzo3Xr1nVqKz8/Hy8vL/r27UtAQICUx9LY2JjIyEhWr15d5Zj09HS8vb1Zs2aNVNXo6tWrXLhw4bHnq6wtHhcXx/Lly4GKfJre3t707NmTw4cPo1AouH//Pl988QVubm64uVUs0hALm4RHKS8uZopMA1+vL1PBczqu6BtZj99JEDTY8/k3UxCeQp8+fTA0NCQ2NpaxY8fi7OyMk5MTGRkZAFy6dIl33nkHNzc3xo8fX6Uizu3bt/Hw8MDW1pbly5erJUh3d3fn+++/5+LFi1XOu23bNiZOnCgFlgBdunTh9ddff2R/09PTyc3NJTQ0lMTEREpKSoCKSkB//PEHS5culVIP6enp4eHhUbcLIwiCIAgNQDy5FDROcnIyxcXFbNmyhcTERMzMzCgpKaG8vJyysjJ8fX1Zu3Yt5ubmFBQU4ObmxoABAzA3Nwfg/fffZ8qUKSxYsKBK2wYGBnh7exMREcGmTZvUvjt37hxjxox54v7Gxsbi4uJChw4d6N27N8eOHWPcuHGcPXuWPn36VKnAVN/atDGq87H1mbrieaFJY5LLtRu7Cy8khVy7zveRJt1/lTRxTKC546oPIrgUNIaPjw+6uroYGRkRGRlJdHQ0S5cuZdSoUYwYMYJOnTpx8eJFMjIyWLRokXRcaWkply5dkoJLe3t7jhw5wltvvVWlcg/A5MmT2blzJ2fOnFHbXpeUsSUlJSQmJrJ3717g79ri48aNq1N7dSHyXP5N08ZU+py+k6jpSkrL63Qfadr9B5o5JtDMcdVnsCyCS0FjVL5zWWnw4MGkpaWRnJzMtGnTWLVqFe3bt6dVq1YcPHiwxnbeeecdvvnmGzw8PNi1a1eVAFMul/Pee++xbt06aWEPQN++fUlNTWX06NG17vPx48cpKChgxowZQEXN+Fu3bnHjxg0sLCyIjo6usVSnIAiCIDyPxH+xBI1UVlZGVlYWlpaWWFpakpmZyfnz5xkyZAh6enocOHAAFxcXADIyMjA1NcXI6O/pYW9vb1QqVY0BppOTEzt27OD69etYWFgAFUHpjBkzsLW1lUp1Xrp0ifPnz9e4cj0uLo7AwEAmTZokbQsMDGT//v3MmTOHdu3aERYWhp+fHwqFguLiYv7v//4PLy+v+rxcggbT1tUlWqVs7G7Uu+d9tbggvMhEcCloJKVSib+/P/n5+chkMszMzFi8eDE6OjpERUURGhrKjh07UCqVtGnThvXr11dpY/bs2VKAWZn2p5KWlhYLFy5k9uzZ0rZevXoRFRVFREQEgYGBaqmIqpOdnc2PP/7IunXr1LY7OTmxdOlS5syZw/bt21m3bh3jxo1DX19f+l4Qaqvryy8zb8GSxu5GvdPEaUlB0BSitrggvODEO5d/07QxidriTYsYU9OhieMStcUFQRAEQRCE55KYFheEBjBhwgTKy9XfD+vfvz9BQUGN1CNBEARBeDZEcCkIDSA+Pr6xuyAIgiAIDUIEl4IgCBpM1BZvPKLGuPCiEsFlHURGRuLt7S2V5HuYs7Mze/fuRU9Pr97P7eDgQFRUlFo+xyeRl5fH3r17effdd6VtAQEBuLq6MnDgwCr7vyh1q7Ozs/H19WXXrl1A1d94w4YNdO/enXHjxtX7uW/evMnatWv5+eef0dfXR0dHhylTpnD79m2OHj0KQGZmJq1bt5bSJUVFRXHz5k0iIiLIzMxUW5luY2NT730Umi5RW7zxiBrjwotKBJd18Mknn+Dp6VkluKxMdv2oBN2NLS8vj+3bt6sFlyEhIY3Yo+eDqampFFhC1d+4ulKQ9eHevXtMnTqVCRMmEBYWhpaWFnl5eRw5coQ5c+YwZ84cADw8PPD09GTkyJFART1yb29v1qxZI9Uyv3r1KhcuXHgm/RQEQRCE2hLB5RP68MMPAXB3d0dLS4sOHTpgZmbGlStXyM3NJT4+np49e/LLL79gaGiIg4MDzs7OnDp1ips3b+Lp6Sk9BXxwv4c/nz59mjVr1lBYWAiAn58fQ4cOVetLTk4OwcHBZGVlUVxcjKOjo5R3MTU1lZCQEIqKijAwMCAgIABLS0uCgoLIz8/H2dkZfX19YmJi1AKX7Oxs/Pz8yM3NpWPHjmqLUAoKCli9ejXp6ekUFxczaNAgli5dirZ2zfWLY2NjpRyRcrmcLVu20LZtWw4cOMCOHTsA6Ny5M0FBQbRp04b4+HgSExNp1qwZ6enpmJqasmLFCtasWcPVq1exsLAgPDwcmUyGv78/Ojo6XL9+nRs3bmBjY0NgYCAKhYJbt26xcuVKMjMzAZg1axYuLi4olUqCgoJITk5GoVBgYGBATEwM169fx83NjZSUlCq/8a5duwgNDZWe4BYWFhIcHExaWhoA48ePl5Kae3h4YGFhwa+//kpOTg5jx47F19e3xuuTmJhIy5Yt8fb2lrY1b94cd3f3mm9CYNu2bUycOFEKLAG6dOlCly5dHnmcIAiCIDxrIrh8QitXriQ6OpqYmBgMDQ3x9/fn9OnT7N69GwMDg2qPuX//Pnv37uX69es4OTnh6uoqBZTVuXv3LvPnzycyMhJra2vKy8spKCiost+SJUuYO3cuNjY2lJSUMGPGDPr164eNjQ0+Pj6EhoYyePBgkpKS8PHx4euvvyYwMBA3N7can64GBwdjY2PD/PnzuXbtGuPHj5cCmNWrV2NjY0NISAhKpRJfX1/i4uKYPHlytW2lpKSwZcsWoqOjMTY2prCwEB0dHX777TfCw8OJj4/HxMSE9evX89FHH0mJzNPS0khISKBdu3Z4e3uzePFidu/ejb6+Pq6uriQlJUkVcM6cOUNMTAy6urp4eXmxb98+pk6dSnBwMN27d2fTpk3k5OQwYcIE+vTpQ1lZGUlJSXz55ZdoaWnx119/PfY3ftjmzZtRKpUkJCRQWFjIm2++Sc+ePbG3twfgxo0b7Nmzh8LCQkaPHs3EiRPp2rVrtdfo7NmzWFpaVvvdo5w7d44xY8Y88XHVadPG6PE71aA+86I9LzRpTHJ5zf/wE549hVz7ie8nTbr/KmnimEBzx1UfRHBZD8aMGVNjYAlI7+l17NiR5s2b8+eff2Jubl7j/r/++ivm5uZYW1sDoK2tTYsWLdT2KSoq4ocffuDOnTvStsLCQjIyMmjbti1yuVwKwOzs7JDL5Vy+fPmRQS1UBITLly8HoFOnTtjZ2UnfHT9+nNTUVHbu3AlUBM2mpqY1tnXixAmcnZ0xNjYGkM6dkpKCvb29VFLR3d0dZ2dn6Thra2vatWsHQO/evenQoQPNmlX8Je7VqxdXr16VxjZu3DipXRcXF77++mumTp1KUlIS/v7+AJiYmGBvb09KSgouLi6Ul5cTEBDAoEGDpGnmJ5GUlMSyZcuQyWQYGRnh6OhIUlKSFFyOGTMGLS0tmjVrhrm5OZmZmTUGl3WtYVCftQ9EEvW/adqYSp/zBS+arqS0/InuJ027/0AzxwSaOa76DJZFcFkPHhVYAujq6kp/1tbWlqaatbW1pSChuLhY2qc2gYNSqUQmkxEbG4tcLlf77sKFC8hksirHVLftSahUKjZv3kynTp2eup1H9eXh61XT9Xtcuw+fQyaT0axZMw4fPkxKSgpJSUmEh4ezf//+p+7/g59r218ACwsL4uLinuj8AH379iU1NZXRo0c/8bGCIAiC8CyJ4LIODA0NKSgoeOxTwMfp1KkTaWlp2NnZkZCQIG23srJi+fLlnD59GisrK2la/MGnl0ZGRrzyyits3bqVefPmARXTsTo6Orz00kuUlJSQnJyMra0tycnJlJWV0bVrV4qKirh//760+Ohhtra2xMXFMXfuXK5du6Y2Be3g4MDWrVtZtWoV2tra3Llzh8LCwhqDzZEjRxIQEIC7uztt27alsLAQuVyOnZ0d27Zt4+bNmxgbG7Nv3z7pHE/q6NGjTJ8+HYVCwaFDh6QnkXZ2duzduxcfHx9u3rzJyZMnmTFjBnfu3EFbW5vhw4czZMgQTpw4wbVr16SnqJUe9RsPHjyY2NhYrK2tKSws5MiRI/j5+dWp/46Ojmzbto3t27cza9YsZDIZeXl57N+/n+nTp9d43DvvvMOMGTOwtbWVrt2lS5c4f/48jo6OdeqLoJm0dXWJVikbuxv1rqmkIhKEF5EILuvA09OTadOmoaenR4cOHerczrJlywgMDMTY2JgRI0ZI21u2bElkZCRhYWEUFRWhpaXFkiVLqgRg4eHhrF69GicnJ6AiIAoJCcHY2JiNGzeqLejZsGEDCoUChUKBk5MTTk5OtGjRgpiYGLU2AwIC8PPz4+jRo3Tr1o0hQ4ao9Xft2rU4Ozsjk8mQy+UsW7asxuDy1VdfxcvLi5kzZyKTyVAoFERFRdG9e3cWL16Mp6cnUBFk17VSjY2NDfPmzSMrKwsbGxvp/c/ly5cTGBgoXRtfX1+6d+/O2bNnWbFiBWVlZZSXlzN8+HAGDBhAVpZ6ypAHf+MHV5EDzJ07l48++khqe/z48QwfPrxO/TcwMGDXrl2sXbuWUaNGYWhoKKUiepRevXoRFRVFREQEgYGBaqmIBOFBXV9+mXkLljR2N+qdJk5LCoKmkKnq8+UtQWhAL0oOzmdNvHP5N00b04ED+5DLtXF0dGvsrtQ7TfutQIypKdHEcdXnO5fPdwZaQRAEQRAEoUkR0+LCU5s9ezY3btxQ22ZmZkZUVNQzPW9YWNMpq9ZY10gQBEEQGpoILoWnJgKkxxPXSBAEQXhRiOBSEARBg125eJFVs6Y1djfqXVNYLV7JyKw9vsFNZ6ZFEJ6WCC4fEhkZSVFREUuWPHp15bFjxzAxMalTdZX4+HhOnDjBxo0b69rNJ+bg4EBUVBQ9evSo0/F5eXns3btXrSZ5QEAArq6uDBw4sMr+T7vYJiIigq+++orWrVsTHR1dpzaaml9//ZXAwECKi4vp0KEDa9eupU2bNo89bsGCBfzwww98++23VXKeCkJ5cTFTZBr4en2ZCprIuKJvZD1+J0HQIE3jb+Zz6NixY6SmpjZ2NxpMXl4e27dvV9sWEhJSbWBZH3bu3El0dHS1geWjkpI3VSqVig8++IDAwEC++uorBg4cSHh4+GOPu3v3LklJSXTu3JlvvvmmAXoqCIIgCI/WYE8uFy9ezOXLlyktLaVz586EhobSokULYmNj+eyzzwCQy+Vs2bKFtm3b8s033xAZGUlZWRlaWlqEhYVhZGSEm5sbKSkpAFy/fl36XPnnyZMn891333H//n3Cw8OJiYnhzJkz6OnpsXnzZoyNjas8nazpaWV6ejoffvgh9+7do7i4mMmTJzNjxgy+++47jh8/zqlTp/jiiy+YOXMmLi4u7N+/n+joaMrLyzEyMmLVqlVSQvPg4GBSUlIwNTXlpZdeeuz1cnBwwNnZmVOnTnHz5k08PT2lp4A9e/bkl19+kRJ8P/j59OnTrFmzhsLCQgD8/PwYOnSoWts5OTkEBweTlZVFcXExjo6OzJ49G4DU1FS1/JgBAQFYWloSFBREfn4+zs7O6OvrExMTg4eHB56enowcOZLs7Gz8/PzIzc2lY8eOagFgQUEBq1evJj09neLiYgYNGsTSpUvR1q6+7vGUKVMoLi5m+vTpDB06lBEjRhAaGsrAgQNJS0tjzpw5dOnShdDQUHJzcyktLWX69Om4uVWkW/n666/5+OOPadmyJcOHD2fDhg388ssv5Obm1nj/AJw8eZJPP/2UkpIS5HI5S5cuZcCAAaSkpBAaGkr//v05ffo0MpmMiIgIqYRndffwJ598QqdOnZg1axZQUQt84cKFHD16tNrqRGlpaejq6krBuru7O6NGjWL16tWPvE8OHTqEvb09Q4cOJS4ujtdee+2R+1dH1BZXp0ljErXFnw9PUmNck+6/Spo4JtDccdWHBgsuAwICaN26NVAx5blt2zaGDRvGli1biI6OxtjYmMLCQnR0dLh8+TLLly9nz549dO3alZKSEkpKSrh79+4jz3H37l1eeeUVFi9ezPbt25kxYwa7du0iODiYVatWsXv3bhYuXFjrPnfo0IF///vfKBQKCgsLmTRpEsOGDWPYsGE4ODioTfv+9NNPfPnll+zZsweFQsHJkydZtmwZMTEx7N27l+vXr5OYmEhZWRlvv/02HTt2fOz579+/Lx3r5OSEq6vrI6sC3b17l/nz5xMZGYm1tbVU2edhS5YsYe7cudjY2FBSUsKMGTPo168fNjY2+Pj4EBoayuDBg0lKSsLHx4evv/6awMBA3NzcOHjwYLXnDg4OxsbGhvnz53Pt2jXGjx/PsGHDAFi9ejU2NjaEhISgVCrx9fUlLi5OSnj+sOjoaHr27ElMTAyGhoakpKTw22+/sWrVKikB+uTJk1m7di3m5uYUFBTg5ubGgAEDaNmyJStWrODzzz/npZdeYtu2bY+9zgCZmZls3ryZHTt2YGRkxO+//867777LiRMnALh48SKrV68mKCiITz/9lM2bN7Nu3TpSUlKqvYc9PDyYPXs2np6eyGQydu/ezZQpU2ose3njxg3at/+7mkfr1q1RKpXcvXuXli1b1tjv+Ph4lixZgpWVFaGhoWRnZz+y3nt1RJ7Lv2namERt8edDbWuMa9r9B5o5JtDMcTXJ2uIHDx4kISGB0tJSioqK6Nq1K+Xl5Tg7O2NsbAwgBU6nTp1i+PDhdO3aFUCqLPO44NLAwECqdNO3b1/atWtH7969pc+nTp16oj7fv3+fVatWkZ6ejkwmIycnhwsXLkhPrB50/PhxLly4wKRJk4CKac68vDwAUlJScHFxQS6XI5fLGT9+PL/88stjzz9u3DgAOnbsSPPmzfnzzz+rPXelX3/9FXNzc6ytrYGKutYPlowEKCoq4ocffuDOnTvStsLCQjIyMmjbti1yuVyqBGRnZ4dcLufy5cuPLXWZkpLC8uXLgYqKO3Z2dmrXJjU1lZ07dwIV1/VJA6AuXbpgZWUFwJUrV8jIyGDRokXS96WlpVy6dAktLS369OkjPR1+8803azW9/N1335GZmcnbb78tbSsrK+PWrVsAdOvWjT59+gAwYMAAaQr6xIkT1d7D5ubmdOrUiW+//ZYBAwZw/Phxli5d+kRjfpxz586Rl5eHra0tMpmMf/zjHxw8eBAvL696PY8gCIIgPIkGCS5/+uknPv/8c2JiYmjdujUJCQns27evxv1rKhqko6Oj9l1xcbHa9wqFQvqzlpaW2mdtbW1pqlZbWxul8u9auw+3U+njjz/G2NiYsLAwdHR08PT0rHFflUqFm5sbCxYsqPV4HkdXV7fG/le2+WB/anMepVKJTCYjNja2yuKPCxcuVPtkraanbbWlUqnYvHlzjWUia8PAwECtvVatWlX7FPXYsWM1tvG4+2fYsGGsWbOmynEZGRlV7q2ysrLH9tnDw4PPP/+cjIwMXnvtNZo1q/lfhWZmZmolKO/cuYNMJnvkU8vY2Fjy8vIYNWoUACUlJRgaGorgUhAEQWhUDRJc5uXlYWRkRMuWLSkpKSEuLg6AkSNHEhAQgLu7O23btqWwsBC5XM7QoUP59NNPuXLlitq0eNu2bSktLeXq1at06dKFxMTEOvWnc+fOxMTEoFQqKSoq4sSJE9IU7oPy8/Pp2bMnOjo6/Pbbb/z000+88cYbABgZGZGf//cjcQcHB5YsWcKbb75Ju3btKC8v5/z581hYWGBnZ8fBgwcZN24cZWVlJCYmqk2BPqlOnTqRlpaGnZ0dCQkJ0nYrKyuWL1/O6dOnsbKykqbFH3x6aWRkxCuvvMLWrVuZN28eUDElq6OjI70fmpycjK2tLcnJyZSVldG1a1eKioq4f/8+ZWVl6OhUvW1sbW2Ji4tj7ty5XLt2jaSkJOkJqIODA1u3bmXVqlVoa2tz584dCgsL6xxsduvWDT09PQ4cOICLiwtQEQCamppiZWVFQECAdO988cUX0nGPun+GDBnCJ598wu+//0737t2BivdPH5cNoKZ7WKFQYG9vT1hYGGfPnn3s9LyFhQX379/np59+YuDAgcTExDB27Nga9y8pKeHw4cPExsZKT/gBxowZI7UhCADaurpEq5SP37GJaWqpiAThRdIgweXw4cM5dOgQY8eOxdTUFAsLC9LS0nj11Vfx8vJi5syZyGQyFAoFUVFRdO3alY8++oiFCxdSXl6OtrY2YWFh9OzZk4CAAGbOnEmHDh0YNGhQnfrz2muv8eWXX+Lo6EiXLl3o27dvtfvNmTMHPz8/Dh06ROfOnbGxsZG+Gz9+PEuXLuXo0aPSgp7333+fOXPmUF5eTmlpKWPGjMHCwoLJkyeTnp6Oo6Mj7dq1w8bGhj/++KNOfQdYtmwZgYGBGBsbS68BALRs2ZLIyEjCwsIoKipCS0uLJUuWSEFepfDwcFavXo2TkxNQMZUbEhKCsbExGzduVFvQs2HDBum1BCcnJ5ycnGjRogUxMTFqbQYEBODn58fRo0fp1q0bQ4YMUevv2rVrcXZ2RiaTIZfLWbZsWZ2DSx0dHaKioggNDWXHjh0olUratGnD+vXradOmDR999BGzZ8+mZcuWjBkzRu24mu6frl27snbtWgICArh//z6lpaVYW1s/Nris6R5u27YtWlpauLi48O2339KrV69HtqOlpcWaNWtYuXKlWiqimhw7dozOnTurBZYAb7zxBnFxcSK4FCRdX36ZeQsenVqtKdLEd94EQVPIVHWdsxWEJuLh1fUNaebMmUyePPmRTyEbm1jQ8zdNG9OBA/uQy7VxdHRr7K7UO037rUCMqSnRxHHV54IekedSEJ6BtLQ0Ro8eTbNmzXj99dcbuzuCIAiC0GBEhZ5G9MUXX7B79+4q28PCwqRV7pps9uzZ3LhxQ22bmZlZvdfhTk9Pr9f2aqNfv37VLi4KDAzkzJkzatu0tbWJj4+vtp3z58/j7+9fZfvUqVOlzASCIAiC8Dyp87T4tWvX0NLSokOHDvXdJ0EQGpCYFv+bpo1JTIs3LWJMTYcmjqtR8lwuWrSIqVOnYm1tTVxcHB9++CFaWloEBAS8ME9QIiMj8fb2VktL8yBnZ2f27t2Lnp5evZ+7qdUGbyqys7Px9fVl165dQNXfeMOGDXTv3l3KOVqfbt26RXh4OD/++CNGRkYolUpsbGxYuHChWtqiiRMnUlpaWiX1UmpqKhEREWRmZqKvr0+rVq3w8fFRW3gmCFcuXmTVrGmN3Y1619RWi/sGhzV2NwShwdQ6uExKSiIsrOIvx7///W927txJ8+bNmTdv3gsTXH7yySd4enpWCS4r0/PUVL3meVBZG/zB4DIkJKQRe/R8MDU1lQJLqPobV5e3tD7cu3ePt99+GxcXF0JCQtDW1qa4uJjt27dz+/ZtKbj8/fffuX37NnK5nLNnz0qZDdLT0/H29mbNmjVSGq2rV69y4cKFZ9JfoekqLy5mikwDX68vU0ETGVf0jazH7yQIGqTWwWVpaSkKhYLs7GypzCIgVTDRdB9++CFQUfO58nUAMzMzrly5Qm5uLvHx8Wqrkl/k2uBQfb3ttm3bcuDAAXbs2AFU5BsNCgqiTZs2xMfHk5iYSLNmzUhPT8fU1JQVK1awZs0arl69ioWFBeHh4chkMvz9/dHR0eH69evcuHEDGxsbAgMDUSgU3Lp1i5UrV5KZmQnArFmzcHFxQalUEhQURHJyMgqFAgMDA2JiYtTqiz/8G+/atYvQ0FDpCW5hYSHBwcGkpaUBFemoKhOWe3h4YGFhwa+//kpOTg5jx47F19e3xuuTmJhIy5YtmTNnjrRNV1dXyj364HV0dnZGoVAQFxcnBZfbtm1j4sSJavlZu3TpQpcuXWo8pyAIgiA0hFoHl71792bLli388ccfUm7F7OxsjIyMnlXfnisrV64kOjpaqnft7+/P6dOn2b17t1r1mAe9qLXBa6q3/dtvvxEeHk58fDwmJiasX7+ejz76iPXr1wMVK6wTEhJo164d3t7eLF68mN27d6Ovr4+rq6taYvYzZ84QExODrq4uXl5e7Nu3j6lTpxIcHEz37t3ZtGkTOTk5TJgwgT59+lBWVkZSUhJffvklWlpa/PXXX4/9jR+2efNmlEolCQkJFBYW8uabb9KzZ0/s7e2BimT0e/bsobCwkNGjRzNx4sQqeSgrnT179rE5NEtLS0lISCAmJga5XI6Liwv+/v4oFArOnTunlsPzabRpU/e/w/X5js7zQpPGJJfX/A9AoeEo5Nq1vq806f6rpIljAs0dV32odXAZEhLChg0b0NHRwc/PD4DTp09LibhfRGPGjKkxsIQXtzZ4TfW2U1JSsLe3x8TEBKh4Qujs7CwdZ21tTbt27YCKf8x06NBBmh7u1asXV69elcY2btw4qV0XFxe+/vprpk6dSlJSkrS62sTEBHt7e6m2e3l5OQEBAQwaNIiRI0c+8npUJykpiWXLliGTyTAyMsLR0ZGkpCQpuBwzZgxaWlo0a9YMc3NzMjMzawwuH3bgwAF27txJfn4+vr6+jBs3jm+++YZu3brRuXNnAPr06cN//vMfHB0d61xStDpiQc/fNG1MpU3knURNV1JaXqv7StPuP9DMMYFmjqtRFvR07tyZdevWqW0bM2ZMvT09aYoeFViCqA1eXTuP6svD16um6/e4dh8+h0wmo1mzZhw+fJiUlBSSkpIIDw9n//79T93/Bz/Xtr9QESg+mH7IxcUFFxcXfHx8uH//PgBxcXFcvHgRBwcHoOIfFnFxcTg6OtK3b19SU1MZPXr0E41BEARBEJ61WgeXKpWKL774gsOHD3Pnzh0SEhL48ccfuXnz5jNZSfs8MjQ0pKCg4KkrvWh6bfCa6m3b2dmxbds2bt68ibGxMfv27atSmrK2jh49yvTp01EoFBw6dEh6EmlnZ8fevXvx8fHh5s2bnDx5khkzZnDnzh20tbUZPnw4Q4YM4cSJE1y7dk16ilrpUb/x4MGDiY2NxdramsLCQo4cOSI9xX9Sb7zxBtu3b2fr1q3MmjVL+gdHZWCZk5PDjz/+yLfffiu9elJcXMywYcPIysrinXfeYcaMGdja2krX8NKlS5w/fx5HR8c69UnQTKK2eOMTtcWFF02tg8sNGzZw6tQppk+fzsqVKwFo164dq1evfmGCS09PT6ZNm4aent5T5ffU9NrgNdXb7t69O4sXL8bT0xOoCLKDgoLqdA1tbGyYN28eWVlZ2NjYSO9/Ll++nMDAQOna+Pr60r17d86ePcuKFSsoKyujvLyc4cOHM2DAALKy1FdxPvgbP7iKHGDu3Ll89NFHUtvjx49n+PDhdeq/gYEBu3fvZt26dfzjH/+gefPm6OnpYWFhwbBhw9i/fz/Dhw9Xe6dZV1eXUaNGER8fz/z584mKiiIiIoLAwEC1VESC8CBRW1wQhIZW6yTq9vb27N+/n9atW2NjY8OPP/6ISqXi1Vdf5ccff3zW/RQEyYuSg7OhiHcu/6ZpYxJJ1JsWMaamQxPH1Si1xcvLy6Wpwsr3zAoLCx/73qEgCIIgCILw4qj1tPjw4cNZvXo1y5YtAyrewdywYUOdVt0KmqGhaoM/rDKZf1PQWNdIEARBEBpLrYPLZcuWsWTJEl555RXKysqwsrJiyJAh/POf/3yW/ROeYyJAejxxjQRBEIQXTa2Cy/Lyco4ePcrHH39MQUEBf/zxB2ZmZlIeQ0EQBEEQBEGAWgaX2trahIWFMXHiRHR1dWnTps2z7pcgPDEHBwdpZbxSqWTOnDl1Ssvj4OBAVFQUPXr04N69e8yfPx8TExOCg4MJCAjg8OHDfPXVV7RvX5Fe5OEFRqmpqURERJCZmam2itvGxqbGc16+fJnw8HAuXLiAvr4+BgYGeHl58f333/PLL78AkJGRQceOHdHV1eW3337j5ZdflqoNFRQUSBkMJk+ezNtvv/3E435Wwpf7U9BEais3pfQ2tdGyx8vI9XQfv6MgCEI9qvW0+MiRIzl+/LiU0FkQnkcbN26kR48enDt3Dnd3d+zs7GjdunWd2srPz8fLy4u+ffsSEBAgLWQzNjYmMjKS1atXVzkmPT0db29v1qxZI5XPvHr1KhcuXKjxPDk5OUydOpUPPviATZs2SdtOnTolpf2CiqC3cnwPio+P58SJE2zcuLFO43zWCm5kMUVW67WDjatMBU2lr7XwE3DrgUINgiAIDaHWwWVxcTE+Pj5YWVnRrl07tcoka9aseSadE4S66tOnD4aGhsTGxrJ//37paeb69esxNzfn0qVLhIaGkpubS2lpKdOnT8fN7e90Lbdv38bPz4+RI0eyYMECtbbd3d3ZvXs3Fy9e5OWXX1b7btu2bUycOFEKLAG6dOlCly5dauzrnj17GDRoEC4uLtI2ExMTtc+CIAiC0FTUOrjs0aNHlScmgvC8Sk5Opri4mC1btpCYmIiZmRklJSWUl5dTVlaGr68va9euxdzcnIKCAtzc3BgwYIBU+/39999nypQpVQJLqEiA7u3tTUREhPSksdK5c+eeuCTquXPn1JLWN7Q2bYwev1MNapMXTSHXrngiKDSa+sxf9zzRxHGJMTUdmjqu+lDr4HL+/PnPsh+CUC98fHzQ1dXFyMiIyMhIoqOjWbp0KaNGjWLEiBF06tSJixcvkpGRwaJFi6TjSktLuXTpkhRc2tvbc+TIEd56660qJSKh4r3GnTt3cubMGbXttaxJ8NTH1KdnnUS9pLRco6aamyJNS/YMmpvEWoypadDEcdVnsFzr4DIpKanG7+zs7OqlM4LwtB5+J3Hw4MGkpaWRnJzMtGnTWLVqFe3bt6dVq1YcPHiwxnbeeecdvvnmGzw8PNi1a1eVAFMul/Pee++xbt06aWEPQN++fUlNTWX06NG17nPfvn1JS0t7glEKgiAIwvOr1sFlQECA2ufKd9VMTU3573//W+8dE4SnVVZWRlZWFpaWllhaWpKZmcn58+cZMmQIenp6HDhwQHqvMSMjA1NTU7Va3t7e3qhUqhoDTCcnJ3bs2MH169exsLAAKoLSGTNmYGtrK9WEv3TpEufPn69x5fqUKVNwdXUlISFBqluenZ3NyZMnpZrpTZmRWXuixWrxRtESkOuK1eKCIDSsWgeXx48fV/tcXl7Op59+KpWEFITnjVKpxN/fn/z8fGQyGWZmZixevBgdHR2ioqIIDQ1lx44dKJVK2rRpw/r166u0MXv2bCnA/Oyzz9S+09LSYuHChcyePVva1qtXL6KiooiIiCAwMFAtFVFNTE1N2bVrF+Hh4axfvx4DAwMMDAx499136+1aNCbf4KZTUUnTproqa4sLgiA0JJnqKV74Kisrw97enu+//74++yQIQgN61u9cNiWaNqbK4NLR0e3xOzcxmvZbgRhTU6KJ46rPdy6f6i3777//Xi0lkSAIgiAIgvBiq/W0uL29vVogee/ePUpKSggMDHwmHRMETTNhwgTKy9Xf5+vfvz9BQUGN1CNBEARBqH+1Di7Xrl2r9llfX59u3bqpLYAQBKFm8fHxjd0FQRAEQXjmah1cpqWlMWvWrCrbd+7cycyZM+u1U4IgCEL9uHLxIqtmTWvsbtS7pray38isfZNa3CYIT6PWweWmTZuqDS4//fRTEVwKL5RJkyZRUlJCaWkpV65coXv37kBFycl33nmHcePGsXTpUmbMmCEdo1Kp+Oyzz9i3bx8qlQqlUsnAgQPx8/OjefPm3Lx5k7Vr1/Lzzz+jr6+Pjo4OU6ZMqVUqoqVLlxIfH88vv/wisjcIVZQXFzed2u5PoonVgW8q6bgEoT48NrisTJ6uVCpJTk5WqyZy/fp18R8z4YXzxRdfABX3v5ubm1oy9n/+85/Y2toSFxenFlyuX7+eH3/8kf/7v/+jbdu2KJVKjh07xl9//YVcLmfq1KlMmDCBsLAwtLS0yMvL48iRI4/ty/Hjx8WiOkEQBOG58tjgsjJ5enFxMcuWLZO2y2QyjI2NWb58+bPrnSA0IWVlZSQkJLBnzx7effdd0tLS6NevH4WFhezcuZMDBw7Qtm1boCJH5muvvQZUBKstW7bE29tbaqt58+a4u7s/8ny5ubl88skn/N///R9xcXF17vezri3e1GjSmESOy+eHQq5dq3tLk+6/Spo4JtDccdWHxwaXlcnT/fz8WLNmzTPvkCA0VSdOnKBLly506dIFV1dX4uLi6NevHxkZGSgUCl566aVqjzt79iyWlpZPfL6goCDee+89mjV7uv+DE3ku/6ZpYyptQu8karqS0vLH3luadv+BZo4JNHNcjZLnUgSWgvBosbGxuLq6AuDi4sKXX35JcXExj6tTUJc6Bl9++SVyuZyRI0fWqa+CIAiC8KzUekFPQUEBkZGR/Pjjj+Tm5qr9B/HEiRPPom+C0GTcunWL77//ngsXLrB582agIhfs119/jYODA8XFxVy+fJlu3bpVOdbCwuKJp7VTUlJITk7GwcFB2vbGG2+wbds2Xn755acbjCAIgiA8hVoHl6tWrSI7O5u5c+fywQcfsHbtWnbs2MHrr7/+LPsnCE3C/v37ef311wkPD5e2JSYmEhsbi5OTEzNmzCAwMJD169fTpk0bVCoVCQkJDBgwAEdHR7Zt28b27duZNWsWMpmMvLw89u/fz/Tp06s936pVq1i1apX0uWfPniQmJooFdkIV2rq6RKuUjd2NetcUUxEJwoui1sHl999/z5EjR2jVqhXa2tqMHj2afv36MXv2bLVVsYLwItq/fz9LlixR2zZq1ChWrlzJ9evXWbRoEf/+97/x8PAAKqbCBw4cyIgRIzAwMGDXrl2sXbuWUaNGYWhoKKUiEoSn1fXll5m3YMnjd2xiNPGdN0HQFLUOLpVKpbRwwMDAgLy8PIyNjbl69eoz65wgPM86duxISkoKQLVpg/T19fn555+lzzNnzqwxJ6ypqanaU88nlZ6eXudjBUEQBKE+1Tq47NWrFz/++CN2dnYMHDiQDz/8EENDQ7p27foMuycIgiAIgiA0JbUOLoODg6VFPMuXL2fdunXk5eWJVeSC8AwFBv7/9u49IMf7f/z4867uO5JjkpxCznJq+ihGE9vQUo5rhiWUYW1oRMRaDh/aQmM5fewzp1BhYebja9gmbfuM1cdpk0Msk+ModLrv3x/9XHOrqNxUt9fjL/d1eF/v13Xd5dV1Xe/3K4Rff/1Vb5mpqanUKRdCCFFuFTu5bNiwofLvWrVqMXfu3GfSISHE30JDQ8u6C0IIIUSJFDu51Ol0bN26lZ07d3Lz5k3i4+P56aefuHr1Kv369XuWfRSiwnhc3fFWrVoZtLb4sWPHCAkJISsri/r167No0SKsrKyeV6iigjh/5gxzRo8s624YXEUcLR4YtqCsuyHEc1Hs5HLJkiUcPnyYd955h9mzZwNQt25d5s+fL8mlEP9fUXXHIyIi+OabbwxWW1yn0/Hhhx8yf/58OnfuzPLlywkPD2f+/PnPJU5RceRlZTFMVex6GRVHrg4qUFwbL6eVdReEeG6K/ZO5bds2oqKicHd3R6VSAfmjZS9evPjMOieEMXhQWzwsLKxAbfGGDRuyc+dOpba4iUn+j+STaosnJydjbm5O586dAfD29mbPnj3PPhghhBDiCYp95zIvL0+ZoPlBcpmZmYmFhcWz6ZkQRuJZ1Ba/fPky9er9PSlzrVq10Gq13Lp1ixo1apSoLSsryxJt/zBD1qItL4wpJrXatKy7IP4/jdq0WN8tY/r+PWCMMYHxxmUIxU4uXV1dmT9/PjNmzADyH8stWbJEahsL8QTPora4IV2/noFWW/I+GOMk1sYWU04FeifR2GXn5D3xu2Vs3z8wzpjAOOMyZLL8xMfiV69eBWD69Omkp6fTuXNn7ty5Q6dOnUhLSyMwMNBgnRHCGDVr1kypLV4YBwcHkpOTS9Smra0taWl/v8N148YNVCpVie9aCiGEEIb2xDuXr7/+Or/88guWlpYsX76csWPH8t5772Fra4u1tfXz6KMQFVqVKlUMXlvcwcGB+/fv8/PPP9O5c2eio6Pp27fvc45MVARSW7x8kNri4kXyxOTy0Ud2v/76a4nfDxPiRWfo2uImJiYsXLiQ2bNn601FJMSjpLa4EOJ5e2Jy+WDwjhCi+B6uOw75P0eGri3u6OhIfHz8U/VTCCGEMLQnJpd5eXkcOXJEuYOZm5ur9xnAxcXl2fVQCCGEEEJUGE9MLq2srJQR4gA1atTQ+6xSqfi///u/Z9M7IV5wUltcCCFERfPE5HL//v3Pox9CiEJIbXEhhBAVTcWpnSWEEEIIIcq9Yk+iLkR55ebmhkajQaPRoNVqeffdd3F3dy9VO1FRUbRo0YJ79+4xceJE6tSpQ1hYGMHBwezatYtvvvlGqYwTFBSEg4MDw4cPByApKYmIiAhSU1OpXLkyNWvWJCAgACcnpwLH+u6775QBPNeuXUOr1VKnTh0AJk6cyKuvvsr333/P8uXLSU9Pp1q1apiamvLWW28xcOBApZ2UlBT69evH9OnT8fHxKXHMz0L4zCAyKmgd5Yo2vc2T1GjRDHUl87LuhhDiBSPJpTAKS5cupUWLFpw4cQJvb29cXFyoVatWqdq6c+cOfn5+tG3bluDgYGXGBGtrayIjI5k/f36BfU6fPo2/vz8LFy6ke/fuAFy4cIFTp04Veozu3bsr20VGRnL37l2mTft7upjvv/+eGTNmsHTpUjp27AhAamoqMTExeu3ExMTg7OxMbGxsuUkuMy6nMUxVQR+K5Oqgova9ED8D17KyyrobQogXjPH8FhUCaNOmDVWqVCEmJoa+ffvi6emJh4cHKSkpAJw9e5YxY8YwaNAg+vfvT2xsrN7+169fZ8SIETg7OzNz5ky9qbi8vb354YcfOHPmTIHjrlq1isGDBysJI4CdnR2vv/56qeJYtmwZ48ePVxJLgEaNGjF58mTlc25uLvHx8YSGhpKVlVXiKj9CCCHEsyB3LoVROXLkCFlZWaxYsYKdO3dia2tLdnY2eXl55ObmEhgYyKJFi7C3tycjI4NBgwbRsWNH7O3tAfjggw8YNmwY77//foG2LSws8Pf3JyIigmXLlumtO3HiBH369DFYHCdOnCAkJOSx2xw4cAA7Ozvs7OwYMGAAsbGxtGvXrsTHsrKyLG03C61Fq1Gb5t8BFOWGIWsGlyfGGJfEVHEYa1yGIMmlMAoBAQGYm5tjaWlJZGQkGzduZPr06fTq1YtXXnmFhg0bcubMGVJSUvTu/uXk5HD27FkluXR1dWX37t289dZbyjuQDxs6dChr164tMD3Qo5WsnkV858+f5/r16/zwww9A/iPxAQMGAODl5YWXlxfTp0/H3Lxk79hdv56BVlvy/hdVISU7J8+oHi0bA2OsZGOMFXokporDGOMyZLIsyaUwCg/euXyga9euJCcnc+TIEUaOHMmcOXOoV68eNWvWZMeOHUW2M2bMGL799ltGjBjBunXrCiSYarWa9957j08++UQZ2APQtm1bkpKS6N27t0Hiad26NcnJybRu3VqJLzMzE0dHRyB/ENAPP/zAqVOnWL58OQD37t1j7969eHh4GKQPQgghRGlIcimMTm5uLmlpabRv35727duTmprKyZMn6datG5UqVWL79u14eXkB+aOtbWxssLT8+9Gwv78/Op2uyATTw8ODNWvWcOnSJRwcHID8pNTHxwdnZ2e6du0K5L/fefLkyVKNXB8/fjyzZs2iVatWtG/fHshPHh/Ytm0br7/+ul7JyJ07dxITE1PmyaWlbT02ymjxcqEGoC7hnWwhhHhaklwKo6PVagkKCuLOnTuoVCpsbW2ZMmUKZmZmREVFMW/ePNasWYNWq8XKyorFixcXaGPcuHFKgvnll1/qrTMxMWHSpEmMGzdOWdaqVSuioqKIiIggJCREbyqi0ujRowehoaHMmzePq1evUrt2bTQajTKp+rZt2/RGlwP06tWL2bNnc+nSJRo0aFCq4xpCYNiCMjv20zK2R13bt29BrTYt624IIV4wKt2zfllMCFGuGfqdy4rM2GJ6kFy6uw8q664YnLFdK5CYKhJjjMuQ71zKW/dCCCGEEMJg5LG4EM/YwIEDycvTf4+vQ4cOUjdcCCGEUZLkUohnLC4urqy7IIQQQjw3klwKIYQRO3/mDHNGjyzrbhhcRR7Zb2lbr0IPfBPiSSS5LIHIyEj8/f3RaDSFrvf09GTz5s1UqlTJ4Md2c3MjKipKby7Hkrh9+zabN29m7NixyrLg4GAGDBhA586dC2wfFBSEg4MDw4cPL3WfK4IrV64QGBjIunXrgILXeMmSJTRv3px+/foZ/NjXrl0jPDycn376CUtLS7RaLU5OTkyaNImqVf9+sXrw4MHk5OQUOj/nX3/9xcsvv4y3tzfBwcEG76Oo+PKysipurffHqcB14CvqVF1CFFfF/MksI5999hk5OTkFlufm5gKwY8eOZ5JYGsLt27dZvXq13rK5c+cWmli+SGxsbJTEEgpe4/fff/+ZJJb37t3j7bffxs7Ojr1797Jjxw5iYmKwsrLi+vXryna///47169f5969exw/frxAO/Hx8XTs2JFdu3aRnZ1t8H4KIYQQJSV3Lovpo48+AsDb2xsTExPq16+Pra0t58+f5+bNm8TFxdGyZUt++eUXqlSpgpubG56enhw+fJirV6/i6+ur3AV8eLtHPx89epSFCxeSmZkJwNSpU3n55Zf1+pKenk5YWBhpaWlkZWXh7u6uzLmYlJTE3LlzuXv3LhYWFgQHB9O+fXtCQ0O5c+cOnp6eVK5cmejoaEaMGIGvry89e/bkypUrTJ06lZs3b9KgQQO9ASgZGRnMnz+f06dPk5WVRZcuXZg+fTqmpkXPnxcTE6PMD6lWq1mxYgW1a9dm+/btrFmzBoBGjRoRGhqKlZUVcXFx7Ny5k6pVq3L69GlsbGyYNWsWCxcu5MKFCzg4OBAeHo5KpSIoKAgzMzMuXbrE5cuXcXJyIiQkBI1Gw7Vr15g9ezapqakAjB49Gi8vL7RaLaGhoRw5cgSNRoOFhQXR0dFcunSJQYMGkZiYWOAar1u3jnnz5il3cDMzMwkLCyM5ORmA/v374+fnB8CIESNwcHDg2LFjpKen07dvXwIDA4s8Pzt37qRGjRq8++67yjJzc3MmTJhQ4Dx6enqi0WiIjY2lbdu2eutjY2OZOnUqK1asYP/+/aWqb27o2uIVnTHFJHNclk8atWmR3zNj+v49YIwxgfHGZQiSXBbT7Nmz2bhxI9HR0VSpUoWgoCCOHj3K+vXrsbCwKHSf+/fvs3nzZi5duoSHhwcDBgxQEsrC3Lp1i4kTJxIZGYmjoyN5eXlkZGQU2G7atGmMHz8eJycnsrOz8fHxoV27djg5OREQEMC8efPo2rUrCQkJBAQEsHfvXkJCQhg0aFCRpQ/DwsJwcnJi4sSJXLx4kf79+9O9e3cA5s+fj5OTE3PnzkWr1RIYGEhsbCxDhw4ttK3ExERWrFjBxo0bsba2JjMzEzMzM3777TfCw8OJi4ujTp06LF68mI8//liZxDw5OZn4+Hjq1q2Lv78/U6ZMYf369VSuXJkBAwaQkJCgVL/59ddfiY6OxtzcHD8/P7Zs2cLw4cMJCwujefPmLFu2jPT0dAYOHEibNm3Izc0lISGBr7/+GhMTE/76668nXuNHLV++HK1WS3x8PJmZmbz55pu0bNkSV1dXAC5fvsyGDRvIzMykd+/eDB48mMaNGxd6jo4fP65U3ilKTk4O8fHxREdHo1ar8fLyIigoSHlkf+rUKf766y+cnZ25evUqsbGxpUouZZ7LvxlbTDkV9J1EY5edk1fo98zYvn9gnDGBccYl81yWE3369CkysQSUx6kNGjSgWrVq/Pnnn49t79ixY9jb2yv1o01NTalevbreNnfv3uXHH38kLCwMT09PhgwZQnp6OikpKZw7dw61Wq0kYC4uLqjVas6dO/fEWBITExkyZAgADRs2xMXFRVm3f/9+1qxZg6enJwMGDOD48eOPbfPAgQN4enpibW0NQJUqVTA3NycxMRFXV1elnKK3tzcJCQnKfo6OjtStWxfIr6390ksvUbVqVczMzGjVqhUXLlxQtu3Xrx9VqlTBzMwMLy8vjhw5AkBCQgLe3t4A1KlTB1dXVxITE2nYsCF5eXkEBwezffv2J56PwiQkJDBkyBBUKhWWlpa4u7vr9b9Pnz6YmJhQtWpV7O3tlbunxbF9+3Y8PT1xc3Nj9+7dAHz77bc0adKERo0aYWtrS5s2bfjPf/6j7PPgrqZKpeK1117j119/5cqVK6WKTQghhDAUuXP5FB6XWEL+Y84HTE1NlUfNpqamPCiMlJWVpWxTnGJJWq0WlUpFTEwMarVab92pU6dQqVQF9ilsWUnodDqWL19Ow4YNn7qdx/Xl0fNV1Pl7UruPHkOlUlG1alV27dpFYmIiCQkJhIeHs23btqfu/8Ofi9tfgDZt2uhNUeTl5YWXlxcBAQHcv38fyH/kfebMGdzc3ID8PyxiY2Nxd3cnOzub+Ph4zM3NlbvROTk5bNu2Ta8spRBCCPG8SXJZAlWqVCEjI+Oxj7aLo2HDhiQnJ+Pi4kJ8fLyyvFOnTsycOZOjR4/SqVMn5bH4w3cvLS0teemll1i5cqXyft7ly5cxMzOjadOmZGdnc+TIEZydnTly5Ai5ubk0btyYu3fvcv/+fXJzczEzK3jZnZ2diY2NZfz48Vy8eFHvEbSbmxsrV65kzpw5mJqacuPGDTIzM4tMNnv27ElwcDDe3t7Url2bzMxM1Go1Li4urFq1iqtXr2Jtbc2WLVuUY5TUnj17eOedd9BoNHz11Vf07NkTyL9bu3nzZgICArh69SoHDx7Ex8eHGzduYGpqSo8ePejWrRsHDhzg4sWLyl3UBx53jbt27UpMTAyOjo5kZmaye/dupk6dWqr+v/HGG6xevZqVK1cyevRo5Q+OB4lleno6P/30E4cOHcLSMv+dyKysLLp3705aWhrHjh2jadOmbNq0SWnz6NGjTJs2TZJLocfU3JyNOm1Zd8PgKvpUREIYM0kuS8DX15eRI0dSqVIl6tevX+p2ZsyYQUhICNbW1rzyyivK8ho1ahAZGcmCBQu4e/cuJiYmTJs2rUACFh4ezvz58/Hw8ADyE6K5c+dibW3N0qVL9Qb0LFmyBI1Gg0ajwcPDAw8PD6pXr050dLRem8HBwUydOpU9e/bQpEkTunXrptffRYsWKY9g1Wo1M2bMKDK5/Mc//oGfnx+jRo1CpVKh0WiIioqiefPmTJkyBV9fXyA/yS5tlRonJycmTJhAWloaTk5OyvufM2fOJCQkRDk3gYGBNG/enOPHjzNr1ixyc3PJy8ujR48edOzYkbQ0/SlBHr7GD48iBxg/fjwff/yx0nb//v3p0aNHqfpvYWHB+vXr+eSTT3j11VepVq0alSpVwsHBge7du7Nt2zZ69OihJJaQf2e0V69exMXFcezYMaUfD3Tq1AmtVstPP/2Ek5NTqfoljE/jZs2Y8P60su6GwRnjO29CGAuVrjjPYoUoR16UOTifFxnQ8zdji2n79i2o1aa4uw8q664YnLFdK5CYKhJjjEsG9AghhBBCiHJJHouLUhs3bhyXL1/WW2Zra0tUVNQzPe6CBRWnbFpZnSMhhBCirEhyKUpNEqQnk3MkhBDiRSPJpRAG4ubmpgyeunfvHs2aNWPs2LHKvKVJSUlERESQmppK5cqVqVmzJgEBATg5OaHT6fjyyy/ZsmULOp0OrVZL586dmTp1KtWqVSv0eJ9//jm7d+9WRpr7+/s/k1KVomI7f+YMc0aPLOtuGFxFHi0O+SPGA8MqzlMYIUpCkkshDGjp0qW0aNECgL179+Ln58eaNWuoVKkS/v7+LFy4UKl8dOHCBU6dOgXA4sWL+emnn/j3v/9N7dq10Wq17Nu3j7/++qvI5HL48OFK+cgrV67Qt29funXrVmDiffFiy8vKYpjKCF+vz9VBBY5r4+W0J28kRAUlyaUQz8hrr71GUlISa9asQaPRMHjwYCWxBLCzs8POzo7MzEzWrl3L9u3bqV27NgAmJia89tprj22/atW/R/bdvXsXlUqFVmt88xkKIYSoWCS5FOIZ6tChA/v37wcosu53SkoKGo2Gpk2blrj9TZs28e9//5s///yTefPmUbNmzRK3YWVl+eSNimDIqSvKC2OKSa02LesuiCJo1KaFfteM6fv3gDHGBMYblyFIcinEM/RgGtnHTSf7NFPNvvXWW7z11lucPn2awMBAXFxcSpxgyjyXfzO2mHIq8DuJxi47J6/Ad83Yvn9gnDGBccYl81wKUUEkJyfTvHlz2rZtS1JSUqHbNGvWjKysLM6dO1fq47Rs2ZI6derw448/lroNIYQQwhDkzqUQz8i+ffvYtGkTq1evplKlSvj4+ODs7KyU8zx79iwnT57E3d0dHx8fQkJCWLx4MVZWVuh0OuLj4+nYsSONGjUqtP2UlBTs7e0BuHjxIidPnqRZs2bPLT5RMUht8fJJ6osLYybJpRAGFBAQoExFZG9vz8qVK+nYsSOQP+dlREQEISEhelMRAUyePJkvvviCESNGAPmPyjt37qxXe/5RS5cu5cyZM5iZmWFqasrMmTOVZFOIB6S2uBDieZPa4kK84OSdy78ZW0xSW7xikZgqDmOMS965FEIIIYQQ5ZI8FheiHDt48CCffvppgeWTJ0/G1dW1DHokhBBCPJ4kl0KUY66urpJECiGEqFDksbgQQgghhDAYuXMpRAm4ubmh0WiUEeHNmjVj7NixODo6KtsMHjyYnJwcduzYobdvUlISERERpKam6o0Wd3JyQqfT8eWXX7JlyxZ0Oh1arZbOnTszderUImuL+/j4cPPmTQDy8vL4/fff2bFjB61atXp2J0BUOOfPnGHO6JFl3Q2DM4apiALDFpR1N4R4JiS5FKKEli5dSosWLQDYu3cvfn5+rFmzhg4dOvD7779z/fp11Go1x48fp23btgCcPn0af39/Fi5cqNQXv3DhAqdOnQJg8eLF/PTTT/z73/+mdu3aaLVa9u3bx19//VVkcvnFF18o/963bx+LFy+WxFIUkJeVxTCVET6kytVBBY5r4+W0su6CEM+MJJdCPIXXXnuNpKQk1qxZw9KlS4mJicHT0xONRkNsbKySXK5atYrBgwcriSWAnZ0ddnZ2ZGZmsnbtWrZv307t2rUBMDEx4bXXXit2P2JiYhg0yPimmxFCCFHxSHIpxFPq0KED+/fvJycnh/j4eKKjo1Gr1Xh5eREUFIRGo+HEiRP06dOn0P1TUlLQaDQ0bdq0VMe/du0aCQkJzJs3r1T7W1lZlmo/MOy8aOWFMcWkVpuWdRdEETRq00K/a8b0/XvAGGMC443LECS5FOIpPahD8O2339KkSROlXGObNm34z3/+g7u7O4+rVfC0dQy2bdtG9+7dqVWrVqn2l0nU/2ZsMeVU4HcSjV12Tl6B75qxff/AOGMC44xLJlEXohxJTk6mefPmxMbGcubMGdzc3HBzc+PkyZPExsYC0LZtW5KSkgrdv1mzZmRlZXHu3LlSHT8uLk4eiQshhCg35M6lEE9h3759bNq0iVWrVjF69GgOHTqEpWX+Y+asrCy6d+9OWloaY8aMwcfHB2dnZ7p27QrA2bNnOXnyJO7u7vj4+BASEsLixYuxsrJCp9MRHx9Px44dlTuhhfnll1+4c+cOPXr0eC7xiorH1NycjTptWXfD4IxhtLgQxkqSSyFKKCAgQJmKyN7enpUrV/LTTz/Ro0cPJbEEMDc3p1evXsTFxTFx4kSioqKIiIggJCREbyoiyK+488UXXzBixAgg/1F5586deeWVVx7bl7i4OLy8vDA1lXfrROEaN2vGhPenlXU3DM4YH0sKYSxUuqd94UsIUaHJO5d/M7aYtm/fglptiru78b02YWzXCiSmisQY45J3LoUQQgghRLkkj8WFKMcOHjzIp59+WmD55MmTpea4EEKIckmSSyHKMVdXV0kihRBCVCiSXAohnkr4zCAyjKSUXUUfgfyoGi2aoa5kXtbdEEK8YCS5FKIE3Nzc0Gg0ymjxZs2aMXbsWBwdHZVtBg8eTE5ODjt27NDbNykpiYiICFJTU/VGizs5OaHT6fjyyy/ZsmULOp0OrVZL586dmTp1apG1xT/66CMSEhLQaDRYWFgQHBxMu3btnmn8hcm4nGY8tasreL3qR/0MXMvKKutuCCFeMJJcClFCS5cupUWLFgDs3bsXPz8/1qxZQ4cOHfj999+5fv06arWa48ePK7XFT58+jb+/PwsXLlTqi1+4cIFTp04BsHjxYn766Sf+/e9/U7t2bbRaLfv27eOvv/4qMrns0aMHM2bMQK1W8+233zJp0iT27dv3HM6AEEIIUTRJLoV4Cq+99hpJSUmsWbOGpUuXEhMTg6enJxqNhtjYWCW5XLVqFYMHD1YSSwA7Ozvs7OzIzMxk7dq1bN++ndq1awNgYmLCa6+99thj9+zZU/l3x44d+fPPP9FqtZiYlOzO29PWFteoTfPv+Ilyy1hrIBtjXBJTxWGscRmCJJdCPKUOHTqwf/9+cnJyiI+PJzo6GrVajZeXF0FBQWg0Gk6cOEGfPn0K3T8lJQWNRkPTpk1L3YcNGzbwyiuvlDixhKef5zI7J8+oHiUbI2Objw+Md55BialiMMa4ZJ5LIcqRB3UIvv32W5o0aUKjRo2wtbWlTZs2/Oc//9Hb5nH7l9auXbuIj49nzpw5T9WOEEIIYQiSXArxlJKTk2nevDmxsbGcOXMGNzc33NzcOHnyJLGxsQC0bduWpKSkQvdv1qwZWVlZnDt3rsTH/s9//kNERARr1qxRHqkLIYQQZUkeiwvxFPbt28emTZtYtWoVo0eP5tChQ0p98aysLLp3705aWhpjxozBx8cHZ2dnunbtCsDZs2c5efIk7u7u+Pj4EBISwuLFi7GyskKn0xEfH0/Hjh1p1KhRocf+9ttvmT9/PmvXrqVBgwbPLeZHWdrWY6NMRVQu1QDU5jIVkRDi+ZLa4kKUwKNTEdnb2+Pn58fPP//MiRMnWLx4sd7206dPp379+kycOJFjx44RERHBH3/8oTcVUefOndHpdHzxxRds3boVyH9U3rlzZz788MMiR4s7OzujVqupVauWsuyLL76gZs2aJYpJaov/zdhiktriFYvEVHEYY1yGfOdSkkshXnCSXP7N2GKS5LJikZgqDmOMSwb0CCGEEEKIckneuRSiHDt48CCffvppgeWTJ0+WmuNCCCHKJUkuhSjHXF1dJYkUQghRoUhyKYQBPava44XZt28fy5cvJzs7G51Ox6BBg/D19X2m8RUmfGYQGTJavFyq0aIZ6koyWlwI8XxJcimEgT2L2uOFsba25vPPP8fGxoY7d+4wcOBA2rdvT+fOnZ99kA/JuJzGMGOp0JOrM6pqQz8D17KyyrobQogXjPH8FhWiHHrttdfw9vZmzZo1AErtcS8vL2WCdSi69vjrr79eZNsdOnTAxsYGgKpVq2Jvb88ff/zxjCIRQgghikfuXArxjD1t7fHiSElJ4dixY3z00Ucl3tfKyrLUx7W2ropGbZp/x0+UW4acYqQ8Mca4JKaKw1jjMgRJLoV4xgqrPQ4otcfd3d2fqr54eno648ePJyQkRLmTWRJPO89ldk6eUT1KNkbGNh8fGO88gxJTxWCMcck8l0JUIE9be/xxrl+/zqhRoxgzZgz9+vUzdNeFEEKIEpM7l0I8Q4aqPV6YmzdvMmrUKN5++22GDBny3GJ6lNQWL79qILXFhRDPnySXQhhYQECAXu3xlStX8tNPP9GjRw8lsQQwNzenV69exMXFMXHiRKKiooiIiCAkJERvKqKirFy5kvPnz7N582Y2b94MwMiRIxk06PmW+gsMW/Bcj/csGdujrgflH4UQ4nmS2uJCvOCktvjfjC0mqS1esUhMFYcxxiXvXAohhBBCiHJJHosLUc4NHDiQvDz99wA7dOhAaGhoGfVICCGEKJokl0KUc3FxcWXdBSGEEKLY5LG4EEIIIYQwGLlzKSo0Nzc3NBoNGo0GrVbLu+++W+TUPU9qJyoqihYtWnDv3j0mTpxInTp1CAsLIzg4mF27dvHNN99Qr149AIKCgnBwcGD48OEAJCUlERERQWpqqt5IbycnpyKPee7cOcLDwzl16hSVK1fGwsICPz8/evfuDcBXX33Fv/71L+7fv49KpaJVq1Z8+OGH1KtXj4yMDBYvXszBgwepVKkSKpWK1q1bM2nSJOrWrVuKMymM1fkzZ5gzemRZd8PgjG3aKEvbevxzxbKy7oYQBiHJpajwli5dSosWLThx4gTe3t64uLhQq1atUrV1584d/Pz8aNu2LcHBwahUKgCsra2JjIxk/vz5BfY5ffo0/v7+LFy4UKkNfuHCBU6dOlXkcdLT0xk+fDgffvghy5YtU5YdPnwYgK1bt7J27VqWL19O48aNAUhMTOTatWvY2tri7+9P8+bN2blzJ+bm5uTl5bFlyxYuXbokyaXQk5eVxTBjrKCUqzOqylDGMlesECDJpTAibdq0oUqVKsTExLBt2zblbubixYuxt7fn7NmzzJs3j5s3b5KTk8M777yjNyfk9evXmTp1Kj179uT999/Xa9vb25v169dz5swZmjVrprdu1apVDB48WEksAezs7LCzsyuyrxs2bKBLly54eXkpy+rUqaN8/uyzzwgLC1MSS4AuXboAcPjwYf744w+++OIL1Go1AKamprz11lslOl9CCCHEsyDJpTAaR44cISsrixUrVrBz505sbW3Jzs4mLy+P3NxcAgMDWbRoEfb29mRkZDBo0CA6duyIvb09AB988AHDhg0rkFgCWFhY4O/vT0REhHKn8YETJ07Qp0+fEvX1xIkTdOvWrdB1169f588//6RDhw6Frj9+/Dht2rRREsunZWVl+eSNimDIedHKC2OKSSZQrzg0//9aGdP37wFjjAmMNy5DkORSVHgBAQGYm5tjaWlJZGQkGzduZPr06fTq1YtXXnmFhg0bcubMGVJSUpg8ebKyX05ODmfPnlWSS1dXV3bv3s1bb71FnTp1Chxn6NChrF27ll9//VVveWnqEDxun5K2d/jwYf75z3+SmZnJW2+9xejRo0u0v0yi/jdjiynHiN5JNHYP3h81pu8fGN/P1APGGJchk2VJLkWF9+Cdywe6du1KcnIyR44cYeTIkcyZM4d69epRs2ZNduzYUWQ7Y8aM4dtvv2XEiBGsW7euQIKpVqt57733+OSTT5SBPQBt27YlKSlJGYhTHG3btiU5ObnQdbVr18bGxoakpCRefvnlAuvbtGnDxo0byc3NxczMjK5du7Jjxw7++c9/cvfu3WL3QQghhHgWJLkURiU3N5e0tDTat29P+/btSU1N5eTJk3Tr1o1KlSqxfft25b3GlJQUbGxs9Op9+/v7o9PpikwwPTw8WLNmDZcuXcLBwQHIT0p9fHxwdnama9euAJw9e5aTJ08WOXJ92LBhDBgwgPj4eDw8PAC4cuUKBw8eZOjQoYwfP54FCxawfPlyGjVqBMB3331HtWrV6Nq1KzY2NsyfP5+pU6dibm4OwL1797CwsDDcyRRGwdTcnI06bVl3w+CMcbS4EMZCkkthVLRaLUFBQdy5cweVSoWtrS1TpkzBzMyMqKgo5s2bx5o1a9BqtVhZWbF48eICbYwbN05JML/88ku9dSYmJkyaNIlx48Ypy1q1akVUVBQRERGEhIToTUVUFBsbG9atW0d4eDiLFy/GwsICCwsLxo4dC+QPIKpUqRIBAQHcv38fExMTZSoilUrF6tWriYiIwN3dncqVK1OlShWaNGlC//79DXMihdFo3KwZE96fVtbdMDhjfCwphLFQ6UrzwpgQwmjIO5d/M7aYtm/fglptirv7oCdvXMEY27UCiakiMca4DPnOpfFMEiaEEEIIIcqcPBYX4hkaOHAgeXn674V16NCB0NDQMuqREEII8WxJcinEMxQXF1fWXRBCCCGeK0kuhRDCiElt8Yrj4ZgsbesRGLagjHskROlIcikqNDc3NzQajVLq8d133y1y+p8ntRMVFUWLFi24d+8eEydOpE6dOoSFhREcHMyuXbv45ptvlPktg4KCcHBwYPjw4QAkJSURERFBamqq3mhxJyenIo95/vx5Pv30U/73v/9RvXp18vLycHV15YMPPsDUNL9aR15eHq+88grt2rVj+fLlBdpISUmhX79+TJ8+HR8fnxLHLYyf1BavQB6KSWqNi4rMyH4yxYto6dKlfPXVVyxcuJDp06dz48aNUrd1584dfH19adKkCfPmzVOSPGtrayIjIwvd5/Tp0/j7++Pr68v//d//sXPnTsLCwh7bj/T0dN5++21eeeUV9u/fz7Zt29i4cSP37t0jOztb2e7QoUPUqVOH//73v1y7dq1AOzExMTg7OxMbG1vqmIUQQghDkuRSGI02bdpQpUoVYmJi6Nu3L56ennh4eJCSkgLkT2w+ZswYBg0aRP/+/QskZNevX2fEiBE4Ozszc+ZMVCqVss7b25sffviBM2fOFDjuqlWrGDx4MN27d1eW2dnZ8frrrxfZ1w0bNtClSxcGDhyoLLO0tGTmzJlUrlxZWRYbG4u3tze9e/cuUF0oNzeX+Ph4QkNDycrKKrLijxBCCPE8yWNxYTSOHDlCVlYWK1asYOfOndja2pKdnU1eXh65ubkEBgayaNEi7O3tycjIYNCgQXTs2FGpLf7BBx8wbNgw3n///QJtW1hY4O/vT0REBMuWLdNbd+LECfr06VOivp44cYJu3bo9dpsbN26QmJjIggULaNq0KbNmzdKrG37gwAHs7Oyws7NjwIABxMbG0q5duxL1A8DKyvLJGxXBkPOilRfGFJNabVrWXRClpFGbGs130VjieJSxxmUIklyKCi8gIABzc3MsLS2JjIxk48aNTJ8+nV69evHKK6/QsGFDzpw5Q0pKCpMnT1b2y8nJ4ezZs0py6erqyu7du3nrrbcKlH0EGDp0KGvXruXXX3/VW26IOgQrV65k165d3Lp1i4iICBwdHdmxYwc9e/bE0tKSl156iby8PI4dO0bHjh2B/EfiAwYMAMDLywsvLy+mT5+ulIMsLplE/W/GFlOOkQ14eZFk5+QZxXfR2H6mHjDGuAyZLEtyKSq8pUuX0qJFC+Vz165dSU5O5siRI4wcOZI5c+ZQr149atasWeDR8sPGjBnDt99+W2RdcbVazXvvvccnn3yiDOwBaNu2LUlJSfTu3bvYfW7Tpo3eY2w/Pz/8/PwYOHAgOTk5QP40Rjdu3MDNzQ3Ifx80NjaWjh07cu3aNX744QdOnTqlDPS5d+8ee/fuVWqVCyGEEGVBkkthVHJzc0lLS6N9+/a0b9+e1NRUTp48Sbdu3ahUqRLbt2/Hy8sLyB9pbWNjg6Xl34+F/f39lbrihSWYHh4erFmzhkuXLuHg4ADkJ6U+Pj44OzvTtWtXIP/9zpMnTxY5cn3YsGEMGDBArz95eXlKYpmUlMSdO3f4/vvvlXc/r1y5whtvvMGMGTPYtm0br7/+OuHh4UqbO3fuJCYmRpJLocfU3JyNOm1Zd8PgXoSpiISoqCS5FEZFq9USFBTEnTt3UKlU2NraMmXKFMzMzIiKimLevHmsWbMGrVaLlZUVixcvLtDGuHHjlATzyy+/1FtnYmLCpEmTGDdunLKsVatWREVFERERQUhIiN5UREWxsbFh/fr1fPrppyxdupQaNWqg0Wjo3bs3bdu2ZdGiRbi7u+sNKrKxsaF169Z88803bNu2jWnTpum12atXL2bPns2lS5do0KBBKc+gMDaNmzVjwvvTnrxhBWOsjyWNLSbxYlLpDPHCmBCiwpJ3Lv9mbDFt374FtdoUd/dBZd0VgzO2awUSU0VijHEZ8p1LmYpICCGEEEIYjDwWF+IZGjhwIHl5+u+FdejQgdDQ0DLqkRBCCPFsSXIpxDMUFxdX1l0QQgghnitJLoUQwoidP3OGOaNHlnU3DM7YR4sXxtK2HoFhC55jj4QoHUkuHxEZGcndu3cLjMR91L59+6hTpw7t27cv8THi4uI4cOAAS5cuLW03S8zNzY2oqCi9+SBL4vbt22zevJmxY8cqy4KDgxkwYACdO3cusH1QUBAODg4MHz68VMeLiIjgm2++oVatWmzcuLFUbVQk586dIyQkhKtXr2JmZka7du2YPXs2lSpVeux+3333HcuWLePGjRuYmZnRsGFDJk+eTMuWLZ9Tz0V5l5eVxTCVEb5en6sDY4vrCTFtvJz2HDsjROlJcllK+/btw8HBoVTJZUV0+/ZtVq9erZdczp0795kdb+3atRw4cIBatWoVWJeXl4epqXGVtVOr1UyfPp02bdqg1WqZPHkya9asYcKECUXu8/333xMcHMyyZcuUso8nTpzg6tWrklwKIYQoM88tuZwyZQrnzp0jJyeHRo0aMW/ePKpXr05MTIwyl6BarWbFihXUrl2bb7/9lsjISHJzczExMWHBggVYWloyaNAgEhMTAbh06ZLy+cG/hw4dynfffcf9+/cJDw8nOjqaX3/9lUqVKrF8+XKsra0L3J0s6m7l6dOn+eijj7h37x5ZWVkMHToUHx8fvvvuO/bv38/hw4fZunUro0aNwsvLi23btrFx40by8vKwtLRkzpw5NG3alOzsbMLCwkhMTMTGxoamTZs+8Xy5ubnh6enJ4cOHuXr1Kr6+vspdwJYtW/LLL79QpUqVAp+PHj3KwoULyczMBGDq1Km8/PLLem2np6cTFhZGWloaWVlZuLu7K/M2JiUlMXfuXO7evYuFhQXBwcG0b9+e0NBQ7ty5g6enJ5UrVyY6OpoRI0bg6+tLz549uXLlClOnTuXmzZs0aNBAbxBLRkYG8+fP5/Tp02RlZdGlSxemT59eZII4bNgwsrKyeOedd3j55Zd55ZVXmDdvHp07dyY5OZl3330XOzs75s2bx82bN8nJyeGdd95h0KD86Vb27t3Lp59+So0aNejRowdLlizhl19+4ebNm0V+fwAOHjzI559/TnZ2tpLsdezYkcTERObNm0eHDh04evQoKpWKiIgIpWxkYd/hzz77jIYNGyq1wE+cOMGkSZPYs2eP3tyVDzw8L6WJiQnt27cnJSXlsd+RZcuWMX78eL164m3atHnsPoWR2uL6jCkmqS1uXCpqvfGK2OfiMNa4DOG5JZfBwcHKXaiIiAhWrVpF9+7dWbFiBRs3bsTa2prMzEzMzMw4d+4cM2fOZMOGDTRu3Jjs7Gyys7O5devWY49x69YtXnrpJaZMmcLq1avx8fFh3bp1hIWFMWfOHNavX8+kSZOK3ef69evzxRdfoNFoyMzMZMiQIXTv3p3u3bvj5uam99j3559/5uuvv2bDhg1oNBoOHjzIjBkziI6OZvPmzVy6dImdO3eSm5vL22+/XaxJru/fv6/s6+HhwYABA5SEsqj4J06cSGRkJI6OjuTl5ZGRkVFgu2nTpjF+/HicnJzIzs7Gx8eHdu3a4eTkREBAAPPmzaNr164kJCQQEBDA3r17CQkJYdCgQUWWTwwLC8PJyYmJEydy8eJF+vfvT/fu3QGYP38+Tk5OzJ07F61WS2BgILGxsQwdOrTQtjZu3EjLli2Jjo6mSpUqJCYm8ttvvzFnzhxmzZpFbm4uQ4cOZdGiRdjb25ORkcGgQYPo2LEjNWrUYNasWWzatImmTZuyatWqJ55ngNTUVJYvX86aNWuwtLTk999/Z+zYsRw4cACAM2fOMH/+fEJDQ/n8889Zvnw5n3zyCYmJiYV+h0eMGMG4cePw9fVFpVKxfv16hg0bVmhi+aj79+8TGxurVwe9MCdOnCAkJKRY8T2OzHP5N2OLSWqLG5eKWG/c2H6mHjDGuCpkbfEdO3YQHx9PTk4Od+/epXHjxuTl5eHp6Ym1tTWAkjgdPnyYHj160LhxYwA0Gg0ajeaJyaWFhQWvvPIKkF/vuW7durRu3Vr5fPjw4RL1+f79+8yZM4fTp0+jUqlIT0/n1KlTyh2rh+3fv59Tp04xZMgQAHQ6Hbdv3wYgMTERLy8v1Go1arWa/v3788svvzzx+P369QPy72pVq1aNP//8s9BjP3Ds2DHs7e1xdHQEwNTUlOrVq+ttc/fuXX788Udu3LihLMvMzCQlJYXatWujVquVEoYuLi6o1WrOnTv32KT2QYwzZ84EoGHDhri4uOidm6SkJNauXQvkn1cbG5snxv8wOzs7OnXqBMD58+dJSUnRS75ycnI4e/YsJiYmtGnTRrk7/Oabb+qVSCzKd999R2pqKm+//bayLDc3l2vXrgHQpEkT5a5gx44d+fbbbwE4cOBAod9he3t7GjZsyKFDh+jYsSP79+9n+vTpT+xHbm4ukyZNwtnZmV69ej1xeyGEEKK8eS7J5c8//8ymTZuIjo6mVq1axMfHs2XLliK3L6pokJmZmd66rKwsvfUajUb5t4mJid5nU1NT5VGtqakpWu3ftXYfbeeBTz/9FGtraxYsWICZmRm+vr5FbqvT6Rg0aBDvv/9+seN5EnNz8yL7/6DNh/tTnONotVpUKhUxMTGo1Wq9dadOnSr0zlpx7rY9jk6nY/ny5TRs2LDUbVhYWOi1V7NmzULvou7bt6/INp70/enevTsLFy4ssF9KSkqB71Zubu4T+zxixAg2bdpESkoKr732GlWrPv6vwry8PAIDA6levbqSqD9OmzZtSEpKUv6AEqIwUlu84ijOaHEhKoLnklzevn0bS0tLatSoQXZ2NrGxsQD07NmT4OBgvL29qV27NpmZmajVal5++WU+//xzzp8/r/dYvHbt2uTk5HDhwgXs7OzYuXNnqfrTqFEjoqOj0Wq13L17lwMHDiiPcB92584dWrZsiZmZGb/99hs///wzb7zxBgCWlpbcufP3LXE3NzemTZvGm2++Sd26dcnLy+PkyZM4ODjg4uLCjh076NevH7m5uezcuZN69Ur/S6Jhw4YkJyfj4uJCfHy8srxTp07MnDmTo0eP0qlTJ+Wx+MN3Ly0tLXnppZdYuXKlMljk8uXLmJmZKe+HHjlyBGdnZ44cOUJubi6NGzfm7t273L9/n9zcXMzMCn5tnJ2diY2NZfz48Vy8eJGEhATlDqibmxsrV65kzpw5mJqacuPGDTIzM0udbDZp0oRKlSqxfft2vLy8gPwE0MbGhk6dOhEcHKx8d7Zu3ars97jvT7du3fjss8/4/fffad68OZD//umTBmwV9R3WaDS4urqyYMECjh8//sTH8w9qopuamjJ37txiJfTvvvsus2bNwsHBgbZt2yp9vnnzJq6urk/cX7wYpLZ4xWGMMYkX03NJLnv06MFXX31F3759sbGxwcHBgeTkZP7xj3/g5+fHqFGjUKlUaDQaoqKiaNy4MR9//DGTJk1SRgYvWLCAli1bEhwczKhRo6hfvz5dunQpVX9ee+01vv76a9zd3bGzs1P+Y37Uu+++y9SpU/nqq69o1KgRTk5Oyrr+/fszffp09uzZowzo+eCDD3j33XfJy8sjJyeHPn364ODgwNChQzl9+jTu7u7UrVsXJycn/vjjj1L1HWDGjBmEhIRgbW2tvAYAUKNGDSIjI1mwYAF3797FxMSEadOmKUneA+Hh4cyfPx8PDw8g/1Hu3Llzsba2ZunSpXoDepYsWaK8luDh4YGHhwfVq1cnOjpar83g4GCmTp3Knj17aNKkCd26ddPr76JFi/D09ESlUqFWq5kxY0apk0szMzOioqKYN28ea9asQavVYmVlxeLFi7GysuLjjz9m3Lhx1KhRgz59+ujtV9T3p3HjxixatIjg4GDu379PTk4Ojo6OT0wui/oO165dGxMTE7y8vDh06BCtWrV6bDuHDh3iq6++okWLFgwcOBAAR0dHZs+eXeQ+PXr0IDQ0lNDQUG7duoWZmRkNGjRgypQpxTmNQgghxDOh0pX2ma0QFcSjo+ufp1GjRjF06FD69u373I9dXDKg52/GFtP27VtQq01xdx9U1l0xOGO7ViAxVSTGGJchB/QY2Qy0QpQPycnJ9O7dm6pVq/L666+XdXeEEEKI50YmUS9DW7duZf369QWWL1iw4IUYpDFu3DguX76st8zW1paoqCiDHuf06dMGba842rVrV+jgopCQEH799Ve9ZaampkXWID958iRBQUEFlg8fPlyZmUAIIYQoT+SxuBAvOHks/jdji0kei1csElPFYYxxVch5Lo1JZGQk/v7+etPTPMzT05PNmzc/sS50aVS0GuEVxZUrVwgMDGTdunVAwWu8ZMkSmjdvrsw9aginT59m6tSpAPz1119kZGRQv359AIYOHYqnpycREREcOnRImZbqjTfe0KumFBERQWpqKpUrV6ZmzZoEBAToDTwT4vyZM8wZPbKsu2FwL+JURI9jaVuPwLAFBu6REKUjyWUpfPbZZ/j6+hZILh9M01NUFZvy4HnXCK8obGxslMQSCl7jwuYvfVotW7ZUvitxcXEcOHCApUuXAvlzeQ4fPpyWLVuya9cuNBoN9+/fV6ZWOn36NP7+/ixcuFCZRuvChQucOnXK4P0UFVteVhbDVEb4en2uDowtrqeIaePlNAN3RojSk+SyhD766CMAvL29MTExoX79+tja2nL+/Hlu3rxJXFyc3ujkF7lGOBRed7t27dps376dNWvWAPnzjoaGhmJlZUVcXBw7d+6katWqnD59GhsbG2bNmsXChQu5cOECDg4OhIeHo1KpCAoKwszMjEuXLnH58mWcnJwICQlBo9Fw7do1Zs+eTWpqKgCjR4/Gy8sLrVZLaGgoR44cQaPRYGFhQXR0tF6d8Uev8bp165g3b55yBzczM5OwsDCSk5OB/Gmp/Pz8gPyJ0x0cHDh27Bjp6en07duXwMDAEn/PEhIS+OOPP/jiiy+Uye4rVarEiBEjAFi1ahWDBw/Wm5/Vzs4OOzu7Eh9LCCGEMCRJLkto9uzZbNy4Ual7HRQUxNGjR1m/fr1eFZmHvag1wouqu/3bb78RHh5OXFwcderUYfHixXz88ccsXrwYyB9pHR8fT926dfH392fKlCmsX7+eypUrM2DAAL0J2n/99Veio6MxNzfHz8+PLVu2MHz4cMLCwmjevDnLli0jPT2dgQMH0qZNG3Jzc0lISODrr7/GxMSEv/7664nX+FHLly9Hq9USHx9PZmYmb775Ji1btlQmLr98+TIbNmwgMzOT3r17M3jwYKWUaXEdP36cNm3aFKii9MCJEyf05vB8GlZWlqXe15Dv6JQXxhSTWl30H37CuGjUpuX2u1te+/W0jDUuQ5Dk0gD69OlTZGIJL26N8KLqbicmJuLq6kqdOnWA/DuEnp6eyn6Ojo7UrVsXgNatW1O/fn2ldGKrVq24cOGCElu/fv2Udr28vNi7dy/Dhw8nISFBGWVdp04dXF1dlRrveXl5BAcH06VLF3r27PnY81GYhIQEZsyYgUqlwtLSEnd3dxISEpTksk+fPpiYmFC1alXs7e1JTU0tcXL5pHF2hhyHJwN6/mZsMeUY2TuJomjZOXnl8rtrbD9TDxhjXDKgp5x5XGIJUiO8sHYe15dHz1dR5+9J7T56DJVKRdWqVdm1axeJiYkkJCQQHh7Otm3bnrr/D38ubn8fx8HBgY0bNxZZbrNt27YkJSXRu3fvErcthBBCPEuSXJZClSpVyMjIeOqKL8ZeI7youtsuLi6sWrWKq1evYm1tzZYtWwqUqCyuPXv28M4776DRaPjqq6+UO5EuLi5s3ryZgIAArl69ysGDB/Hx8eHGjRuYmprSo0cPunXrxoEDB7h48aJyF/WBx13jrl27EhMTg6OjI5mZmezevVsZ9W0oLi4u1K1blwULFjB16lQ0Gg1ZWVn8+9//xs/PjzFjxuDj44Ozs7Ny7s6ePcvJkydxd3c3aF9ExWZqbs5Gnbasu2FwMlpcn6VtPQP3RojSk+SyFHx9fRk5ciSVKlVSpo4pDWOvEV5U3e3mzZszZcoUfH19gfwkOzQ0tFTn0MnJiQkTJpCWloaTk5Py/ufMmTMJCQlRzk1gYCDNmzfn+PHjzJo1i9zcXPLy8ujRowcdO3YkLU1/pOXD1/jhUeQA48eP5+OPP1ba7t+/Pz169ChV/4uiUqlYvXo1n3zyCf369aNy5coAyjFbtWpFVFQUERERhISE6E1FJMTDGjdrxoT3p5V1NwzOWB9LGltM4sUkk6iLCutFmYPzWZN3Lv9mbDHJJOoVi8RUcRhjXFJbXAghhBBClEvyWFw8tedVI/xRCxZUnGoUZXWOhBBCiOdNkkvx1CRBejI5R0IIIV4UklwKIYQRk9riFcfTxiT1xUV5IcllCURGRuLv71+gpvgDnp6ebN68mUqVKhn82G5ubkRFRdGiRYtS7X/79m02b96sV1M8ODiYAQMG0Llz5wLbvyiDZa5cuUJgYKAyIvzRa7xkyRKaN2+uTIRvSFevXmXRokX897//pXLlypiZmTFs2DBlxPt3333HsmXLuHHjBmZmZjRs2JDJkyfTsmVLcnJyiIqKYufOnZiZmWFmZoadnR0BAQE0a9bM4H0VFZfUFq9AnjImqS8uygtJLkvgs88+w9fXt0By+WDOyKJKKpYHt2/fZvXq1XrJ5dy5c8uwR+WDjY2N3lRDj17j999//5kc9969ewwfPpyBAweyYMECTExMuH37Nrt37wbg+++/Jzg4mGXLltGuXTsgv+Tj1atXadmyJdOnT+f+/fts3bqVatWqodPp2LNnDykpKZJcCiGEKFOSXBbTRx99BOSXKjQxMaF+/frY2tpy/vx5bt68SVxcHC1btuSXX36hSpUquLm54enpyeHDh7l69Sq+vr7KXcCHt3v089GjR1m4cCGZmZkATJ06lZdfflmvL+np6YSFhZGWlkZWVhbu7u6MGzcOgKSkJL35LYODg2nfvj2hoaHcuXMHT09PKleuTHR0NCNGjMDX15eePXty5coVpk6dys2bN2nQoIFeVZmMjAzmz5/P6dOnycrKokuXLkyfPh1T06LrFsfExPDll18CoFarWbFiBbVr12b79u2sWbMGgEaNGhEaGoqVlRVxcXHs3LmTqlWrcvr0aWxsbJg1axYLFy7kwoULODg4EB4ejkqlIigoCDMzMy5dusTly5dxcnIiJCQEjUbDtWvXmD17NqmpqQCMHj0aLy8vtFotoaGhHDlyBI1Gg4WFBdHR0Vy6dIlBgwaRmJhY4BqvW7eOefPmKXdwMzMzCQsLIzk5Gcif39LPzw+AESNG4ODgwLFjx0hPT6dv374EBgYWeX527txJjRo18Pf3V5ZVq1YNb29vAJYtW8b48eOVxBKgTZs2AJw/f559+/Zx8OBBqlWrBuTPi9m3b98ijyeEEEI8L5JcFtPs2bPZuHEj0dHRVKlShaCgII4ePcr69euLLP94//59Nm/ezKVLl/Dw8GDAgAGPrepz69YtJk6cSGRkJI6OjkplnkdNmzaN8ePH4+TkRHZ2Nj4+PrRr1w4nJycCAgKYN28eXbt2JSEhgYCAAPbu3UtISAiDBg0q8u5qWFgYTk5OTJw4kYsXL9K/f3+6d+8OwPz583FycmLu3LlotVoCAwOJjY1VHt8+KjExkRUrVrBx40asra3JzMzEzMyM3377jfDwcOLi4qhTpw6LFy/m448/ZvHixQAkJycTHx9P3bp18ff3Z8qUKaxfv57KlSszYMAAvWpBv/76K9HR0Zibm+Pn58eWLVsYPnw4YWFhNG/enGXLlpGens7AgQNp06YNubm5JCQk8PXXX2NiYsJff/31xGv8qOXLl6PVaomPjyczM5M333yTli1bKjXFL1++zIYNG8jMzKR3794MHjy4yJrix48fp3379oWug/y7lCEhIUWus7OzK1BvvrSsrCxLva8h50UrL4wpJrW66D8AhfHRqE3L5fe3PPbJEIw1LkOQ5PIp9OnT57F1xR+8p9egQQOqVavGn3/+ib29fZHbHzt2DHt7exwdHYH8utSPJhB3797lxx9/5MaNG8qyzMxMUlJSqF27Nmq1WknAXFxcUKvVnDt37omlKhMTE5k5cyaQXzHHxcVFWbd//36SkpJYu3YtkJ8029jYFNnWgQMH8PT0xNraGkA5dmJiIq6urkqpRW9vbzw9PZX9HB0dqVu3LgCtW7emfv36VK2a/8PbqlUrLly4oMTWr18/pV0vLy/27t3L8OHDSUhIICgoCIA6derg6upKYmIiXl5e5OXlERwcTJcuXZQykSWRkJDAjBkzUKlUWFpa4u7uTkJCgpJc9unTBxMTE6pWrYq9vT2pqalFJpeGrF1w5swZpkyZwv379+nevbtyHYtLJlH/m7HFlGNkA17E42Xn5JW776+x/Uw9YIxxGTJZluTyKTwusQQwNzdX/m1qaqo8ajY1NVWSi6ysLGWb4iQcWq0WlUpFTEwMarVab92pU6dQqVQF9ilsWUnodDqWL19eZJnHkrTzuL48er6KOn9PavfRY6hUKqpWrcquXbtITEwkISGB8PBwtm3b9tT9f/hzcfsL4ODgQGxsbJHr27RpQ1JSEq1bty503YULF7h9+zbVqlWjWbNm7Nixg/Xr1/O///2vJCEJIYQQBifJZQlUqVKFjIyMJ94FfJKGDRuSnJyMi4sL8fHxyvJOnToxc+ZMjh49SqdOnZTH4g/fvbS0tOSll15i5cqVTJgwAch/HGtmZkbTpk3Jzs7myJEjODs7c+TIEXJzc2ncuDF3797l/v37yuCjRzk7OxMbG8v48eO5ePGi3iNoNzc3Vq5cyZw5czA1NeXGjRtkZmYWmWz27NmT4OBgvL29qV27NpmZmajValxcXFi1ahVXr17F2tqaLVu2FKiXXlx79uzhnXfeQaPR8NVXXyl3Il1cXNi8eTMBAQFcvXqVgwcP4uPjw40bNzA1NaVHjx5069aNAwcOcPHiReUu6gOPu8Zdu3YlJiYGR0dHMjMz2b17N1OnTi1V/93d3Vm1ahWrV69m9OjRqFQqbt++zbZt23jnnXd49913mTVrFg4ODrRt2xbIf5/25s2buLq60qtXL2bOnMncuXOVu7t3794tVV+EcTM1N2ejTlvW3TA4mYqoIEvbegbsjRClJ8llCfj6+jJy5EgqVapE/fr1S93OjBkzCAkJwdramldeeUVZXqNGDSIjI1mwYAF3797FxMSEadOmFUjAwsPDmT9/Ph4eHkB+QjR37lysra1ZunSp3oCeJUuWoNFo0Gg0eHh44OHhQfXq1YmOjtZrMzg4mKlTp7Jnzx6aNGlCt27d9Pq7aNEiPD09UalUqNVqZsyYUWRy+Y9//AM/Pz9GjRqFSqVCo9EQFRVF8+bNmTJlCr6+vkB+kh0aGlqqc+jk5MSECRNIS0vDyclJef9z5syZhISEKOcmMDCQ5s2bc/z4cWbNmkVubi55eXn06NGDjh07kpamP3XHw9f44VHkAOPHj+fjjz9W2u7fvz89evQoVf8tLCxYt24dixYtolevXlSpUkWZigigR48ehIaGEhoayq1btzAzM6NBgwZMmTIFyH8Pdvny5QwePBgzMzOqVatGnTp1lAFGQjzQuFkzJrw/ray7YXDG+ljS2GISLyaVzpAvfwnxHLwoc3A+L/LO5d+MLabt27egVpvi7j6orLticMZ2rUBiqkiMMS5DvnNpZDPQCiGEEEKIsiSPxUWpjRs3jsuXL+sts7W1feZ1tBcsqDjlzcrqHAkhhBBlRZJLUWqSID2ZnCMhhBAvGkkuhRDCiJ0/c4Y5o0eWdTcMTkaLVwzGGBOUXVyWtvUIDCv/T+8kuRQVmpubmzIaXqvV8u677+Lu7l6qdqKiomjRogX37t1j4sSJ1KlTh7CwMIKDg9m1axfffPMN9erlT/Xx6KCipKQkIiIiSE1NpXLlytSsWZOAgACcnJyKPOa5c+cIDw/n1KlTVK5cGQsLC/z8/OjduzeQXyLyX//6FxkZGVhaWmJhYcHo0aP1JoA/ePAgfn5+LFu2TNlPiIflZWUxTGWEr9fn6sDY4pKYKo4yimvj5bQnb1QOSHIpKrylS5fSokULTpw4gbe3Ny4uLtSqVatUbd25cwc/Pz/atm1LcHCwMkm6tbU1kZGRzJ8/v8A+p0+fxt/fn4ULFyolMy9cuMCpU6eKPE56ejrDhw/nww8/ZNmyZcqyw4cPA7B161bWrl1LZGSkUtXp+PHj/PDDD3rJZWxsLM7OzsTExEhyKYQQolyQ5FIYjTZt2lClShViYmLYtm2bcjdz8eLF2Nvbc/bsWebNm8fNmzfJycnhnXfeYdCgv6douX79OlOnTqVnz568//77em17e3uzfv16zpw5Q7NmzfTWrVq1isGDByuJJYCdnR12dnZF9nXDhg106dIFLy8vZVmdOnWUz5GRkcydO1evXGjbtm2VCdUBbt68qdRLd3d3VyanLympLa7PmGKS2uJCGJfyWj/+UZJcCqNx5MgRsrKyWLFiBTt37sTW1pbs7Gzy8vLIzc0lMDCQRYsWYW9vT0ZGBoMGDaJjx45KAvfBBx8wbNiwAokl5E967u/vT0REhHKn8YETJ07Qp0+fEvX1xIkTehPVP+z69etcuXKFDh06PLaNHTt20LNnT2rXrs2rr77K9u3bGTt2bIn6kX88mefyAWOLSWqLC2FcnmX9eJnnUoiHBAQE4OnpSWRkJJGRkTg7OzN9+nTWrVvHlStXqFy5MufPnyclJYXJkyfj6enJ22+/TU5ODmfPnlXacXV1Zffu3aSnpxd6nKFDh3L69Gl+/fVXveWlqUNQ0n28vb1544039O60xsXFMWDAAAAGDBjw2FrlQgghxPMidy5FhffgncsHunbtSnJyMkeOHGHkyJHMmTOHevXqUbNmTXbs2FFkO2PGjOHbb79lxIgRrFu3rkDdcbVazXvvvccnn3yiDOyB/MfVSUlJJXrnsW3btiQnJxe6zsrKChsbG5KTk5W7m9HR0fz222+MGzcOgOTkZFJSUggODlb2S09P55dffsHR0bHY/RDGT2qLVxwSU8VRlqPFKwJJLoVRyc3NJS0tjfbt29O+fXtSU1M5efIk3bp1o1KlSmzfvl15rzElJQUbGxssLf9+59Df3x+dTldkgunh4cGaNWu4dOkSDg4OQH5S6uPjg7Ozs1IH/uzZs5w8ebLIkevDhg1jwIABxMfHK7XKr1y5wsGDBxk6dCgTJkxg/vz5REZG0qRJEwDu3bun7B8bG8uYMWOYNGmSsmzFihXExsZKcin0SG3xikNiqjiMNS5DkeRSGBWtVktQUBB37txBpVJha2vLlClTMDMzIyoqinnz5rFmzRq0Wi1WVlYsXry4QBvjxo1TEswvv/xSb52JiQmTJk1S7iACtGrViqioKCIiIggJCdGbiqgoNjY2rFu3jvDwcBYvXoyFhQUWFhbKO5NvvvkmlSpVYsqUKWRkZFCrVi0qV65MUFAQWVlZ7N69m02bNum1+cYbb9C/f3+Cg4OxsLB4irMohBBClJ5KV5oXxoQQRkMG9PzN2GLavn0LarUp7u6DnrxxBWNs1wokporEGOOSAT1CCCGEEKJcksfiQjxDAwcOJC9P/6XvDh06EBoaWkY9EkIIIZ4tSS6FeIbi4uLKugtCCCHEcyXJZQlERkbi7++PRqMpdL2npyebN2+mUqVKBj/2w7WvS+P27dts3rxZb5Lt4OBgBgwYQOfOnQts/2jtbGN15coVAgMDWbduHVDwGi9ZsoTmzZvTr18/gx/76tWrLFq0iP/+979UrlwZMzMzhg0bxtChQwH4/vvvWb58Oenp6VSrVg1TU1PeeustBg4cqLSRkpJCv379mD59Oj4+Pgbvo6j4zp85w5zRI8u6GwZnjFPcSEwVR3mLy9K2HoFhC8q6GwpJLkvgs88+w9fXt0BymZubi5mZ2WPnUCxrt2/fZvXq1XrJ5dy5c8uwR+XDg1HbDzx6jQur1mMI9+7dY/jw4QwcOJAFCxZgYmLC7du32b17N5CfWM6YMYOlS5fSsWNHAFJTU4mJidFrJyYmBmdnZ2JjYyW5FIXKy8pimMoIX6/P1YGxxSUxVRzlLK6Nl9PKugt6JLkspo8++gjIr5RiYmJC/fr1sbW15fz589y8eZO4uDhatmzJL7/8QpUqVXBzc8PT05PDhw9z9epVfH19lbuAD2/36OejR4+ycOFCMjMzAZg6dSovv/yyXl/S09MJCwsjLS2NrKws3N3dlalxkpKSmDt3Lnfv3sXCwoLg4GDat29PaGgod+7cwdPTk8qVKxMdHc2IESPw9fWlZ8+eXLlyhalTp3Lz5k0aNGig955gRkYG8+fP5/Tp02RlZdGlSxemT5+OqWnRdYtjYmKUaXzUajUrVqygdu3abN++nTVr1gDQqFEjQkNDsbKyIi4ujp07d1K1alVOnz6NjY0Ns2bNYuHChVy4cAEHBwfCw8NRqVQEBQVhZmbGpUuXuHz5Mk5OToSEhKDRaLh27RqzZ88mNTUVgNGjR+Pl5YVWqyU0NJQjR46g0WiwsLAgOjqaS5cuMWjQIBITEwtc43Xr1jFv3jzlDm5mZiZhYWHK5Of9+/fHz88PgBEjRuDg4MCxY8dIT0+nb9++BAYGFnl+du7cSY0aNfD391eWVatWDW9vbwCWLVvG+PHjlcTywfmaPHmy8jk3N5f4+Hg2bNjA2LFjSU5Opl27dkUeUwghhHgeJLksptmzZ7Nx40aio6OpUqUKQUFBHD16lPXr1xc5p+D9+/fZvHkzly5dwsPDgwEDBigJZWFu3brFxIkTiYyMxNHRkby8PDIyMgpsN23aNMaPH4+TkxPZ2dn4+PjQrl07nJycCAgIYN68eXTt2pWEhAQCAgLYu3cvISEhDBo0qMi7q2FhYTg5OTFx4kQuXrxI//796d69OwDz58/HycmJuXPnotVqCQwMJDY2Vnl8+6jExERWrFjBxo0bsba2JjMzEzMzM3777TfCw8OJi4ujTp06LF68mI8//liZazI5OZn4+Hjq1q2Lv78/U6ZMYf369VSuXJkBAwaQkJCgTFL+66+/Eh0djbm5OX5+fmzZsoXhw4cTFhZG8+bNWbZsGenp6QwcOJA2bdqQm5tLQkICX3/9NSYmJvz1119PvMaPWr58OVqtlvj4eDIzM3nzzTdp2bIlrq6uAFy+fJkNGzaQmZlJ7969GTx4MI0bNy70HB0/fpz27dsXug7ya4+HhIQUuR7gwIED2NnZYWdnp5R/LE1yaWVl+eSNimDIqSvKC2OKSa0u+g9AIYTx0KhNy9XvLkkun0KfPn0eO1n1g/f0GjRoQLVq1fjzzz+xt7cvcvtjx45hb2+vVFgxNTWlevXqetvcvXuXH3/8kRs3bijLMjMzSUlJoXbt2qjVaiUBc3FxQa1Wc+7cuccmtZCfEM6cOROAhg0b4uLioqzbv38/SUlJrF27FshPmm1sbIps68CBA3h6emJtbQ2gHDsxMRFXV1el6o23tzeenp7Kfo6OjtStWxeA1q1bU79+fapWzf9hadWqFRcuXFBi69evn9Kul5cXe/fuZfjw4SQkJBAUFARAnTp1cHV1JTExES8vL/Ly8ggODqZLly707NnzseejMAkJCcyYMQOVSoWlpSXu7u4kJCQoyWWfPn0wMTGhatWq2Nvbk5qaWmRyWdLpZQMCAjh//jzXr1/nhx9+APLvDj+oLe7l5YWXlxfTp0/H3Ny8RG3LPJd/M7aYcsrRO2FCiGcnOyfvqX93GTI5leTyKTypCsrD/8mbmpoqj5pNTU2V5CIrK0vZpjgJh1arRaVSERMTg1qt1lt36tQpVCpVgX0KW1YSOp2O5cuX07Bhw6du53F9efR8FXX+ntTuo8dQqVRUrVqVXbt2kZiYSEJCAuHh4Wzbtu2p+//w5+L2F8DBwYHY2Ngi17du3Zrk5GRat24N5NdPz8zMVP7wuHbtGj/88AOnTp1i+fLlQP57nHv37lXKSQohhBBlQZLLEqhSpQoZGRlPvAv4JA0bNiQ5ORkXFxfi4+OV5Z06dWLmzJkcPXqUTp06KY/FH757aWlpyUsvvcTKlSuZMGECkP841szMjKZNm5Kdnc2RI0dwdnbmyJEj5Obm0rhxY+7evcv9+/eVwUePejAoZPz48Vy8eFHvEbSbmxsrV65kzpw5mJqacuPGDTIzM4tMNnv27ElwcDDe3t7Url2bzMxM1Go1Li4urFq1iqtXr2Jtbc2WLVuUY5TUnj17eOedd9BoNHz11VfKnUgXFxc2b95MQEAAV69e5eDBg/j4+HDjxg1MTU3p0aMH3bp148CBA1y8eLFA7fDHXeOuXbsSExODo6MjmZmZ7N69m6lTp5aq/+7u7qxatYrVq1czevRoVCoVt2/fZtu2bbzzzjuMHz+eWbNm0apVK+Xx+cO1xbdt28brr79OeHi4smznzp3ExMRIcin0mJqbs1GnLetuGFx5G61rCBJTxVHe4rK0rVfWXdAjyWUJ+Pr6MnLkSCpVqkT9+vVL3c6MGTMICQnB2tqaV155RVleo0YNIiMjWbBgAXfv3sXExIRp06YVSMDCw8OZP3++kkRUqVKFuXPnYm1tzdKlS/UG9CxZsgSNRoNGo8HDwwMPDw+qV69OdHS0XpvBwcFMnTqVPXv20KRJE7p166bX30WLFuHp6YlKpUKtVjNjxowik8t//OMf+Pn5MWrUKFQqFRqNhqioKJo3b86UKVPw9fUF8pPs0k4m7uTkxIQJE0hLS8PJyUl5/3PmzJmEhIQo5yYwMJDmzZtz/PhxZs2aRW5uLnl5efTo0YOOHTuSlqY/wu7ha/zwKHKA8ePH8/HHHytt9+/fnx49epSq/xYWFqxbt45FixbRq1cvqlSpokxFBNCjRw9CQ0OZN28eV69epXbt2mg0GuV8bdu2jWnTpum12atXL2bPns2lS5do0KBBqfoljE/jZs2Y8P60J29YwRjbKwwgMVUkxhqXoUhtcVHhvChzcD4vN29mluqdSysrS65fLzjgrCIztpj27t2FWm1Kz559yrorBmds1wokporEGON6msGdj5I7l0K84GrWLP1rHob8ZVReGFNMb731Zll34Zkypmv1gMRUcRhrXIYgdy5FqY0bN47Lly/rLbO1tSUqKqqMelT+yDkSQgjxopHkUgghhBBCGEz5qV0khBBCCCEqPEkuhRBCCCGEwUhyKYQQQgghDEaSSyGEEEIIYTCSXAohhBBCCIOR5FIIIYQQQhiMJJdCCCGEEMJgJLkUQhTq3r17fPDBB7z66qv06dOHb7/9ttDtrly5wogRI3jppZcYOHBggfVbtmzh1VdfpXfv3oSGhqLVap9114tU3Jig6H4nJibSoUMHPD098fT0ZMiQIc+r+3rOnTvHm2++yeuvv86bb77J+fPnC2yTl5fHRx99RO/evXn11VfZunVrsdaVlaeNKTIyEhcXF+XafPTRR8+x90UrTlzff/89AwcOxMHBgX/+85966yrqtXpcTOXxWhUnpmXLluHu7k7//v0ZOHAg3333nbKuPF4nePq4SnWtdEIIUYjIyEjdjBkzdDqdTnfu3Dld165ddRkZGQW2u337tu7HH3/U7d+/XzdgwAC9dampqbru3bvrrl+/rsvLy9P5+vrqtm3b9jy6X6jixvS4fh85cqRAnGVhxIgRuu3bt+t0Op1u+/btuhEjRhTYZtu2bTpfX19dXl6e7vr167ru3bvrLl68+MR1ZeVpY1q6dKluwYIFz7XPxVGcuM6fP687fvy47tNPPy0QQ0W9Vo+LqTxeq+LEdOjQId3du3d1Op1Od/LkSd1LL72ku3fvnk6nK5/XSad7+rhKc63kzqUQolBff/013t7eADRu3BgHBwcOHTpUYLuqVavi5OSEhYVFgXXffPMNvXv3platWpiYmDBkyBB27979zPtelOLGVN76/ajr169z4sQJ3njjDQDeeOMNTpw4wY0bN/S22717N0OGDMHExIRatWrRu3dv9uzZ88R1ZcEQMZVHxY3Lzs6ONm3aYGZmVqCN8hazIWIqb4obU/fu3alcuTIALVu2RKfTcevWLaD8XScwTFylIcmlEKJQaWlp1K9fX/lsa2vLn3/+WaI2Ll++TL169ZTP9erVK1Br/XkqbkxP6vf58+cZMGAAQ4YMYdu2bc+204W4fPkyNjY2mJqaAmBqakqdOnUKnNtH43g43setKwuGiAlg165deHh44Ovry9GjR59P5x+juHE9qY2KeK2epDxdq9LEtH37dho1akTdunWVNsrTdQLDxAUlv1bl/88JIcQzMWDAANLS0gpdd/jw4efcG8N4HjG1bduWgwcPUrVqVS5evMioUaOwsbGha9euBmlflJ63tzfjxo1DrVbzww8/MH78eHbv3k3NmjXLumviERX9Wv34448sWbKEf/3rX2XdFYMqLK7SXCtJLoV4QT3pjlu9evX4448/qFWrFpD/F3CXLl1KdAxbW1u9ZC8tLQ1bW9uSd7aYDBXT4/ptaWmpLG/YsCG9e/fml19+ea7Jpa2tLVeuXCEvLw9TU1Py8vJIT08vcG4fxNG+fXtA/87K49aVBUPEZG1trWzXrVs3bG1t+f333/nHP/7x/AJ5RHHjelIbFfFaPU55u1Ylieno0aN8+OGHLF++nKZNm+q1UZ6u04M+PW1cpblW8lhcCFGoPn36sHnzZiD/MXBycjLdu3cvURuvv/46+/bt48aNG2i1WrZu3Urfvn2fRXeLpbgxPa7f6enp6HQ6AG7dusUPP/xAq1atnl8QgJWVFa1bt2bnzp0A7Ny5k9atWytJ8wN9+vRh69ataLVabty4wb59+3j99defuK4sGCKmK1euKNudPHmSP/74gyZNmjy/IApR3Lgep6Jeq8cpb9equDElJSUxadIkli5dStu2bfXWlbfrBIaJqzTXSqV78FtSCCEecvfuXYKCgjh58iQmJiZ8+OGH9O7dG4AlS5ZQp04d3nrrLfLy8ujZsyfZ2dlkZGRQq1YthgwZwnvvvQdAdHQ0q1evBvL/6g0JCVHe/ymvMT2u3+vXr2fTpk2YmZmRl5eHp6cnY8eOfe6xpKSkEBQUxO3bt6lWrRr//Oc/adq0KWPHjiUgIIB27dqRl5dHaGgoP/zwAwBjx47lzTffBHjsurLytDFNmzaN48ePY2JiglqtJiAgAFdX17IMCSheXD///DOTJ08mIyMDnU5H1apVmTt3Lt27d6+w1+pxMZXHa1WcmAYNGsQff/yBjY2Nst/ChQtp2bJlubxO8PRxleZaSXIphBBCCCEMRh6LCyGEEEIIg5HkUgghhBBCGIwkl0IIIYQQwmAkuRRCCCGEEAYjyaUQQgghhDAYSS6FEEKIp/Sf//wHV1dXOnXqxIkTJzh79ixeXl506tSJL7/8kpCQEJYtW/bEdsaMGVMmJUWFMCSZikgIIQQAbm5uXLt2TW8e0j179ujNfVeaNsPCwspFeczs7GxWrFhBfHw86enp1KpViy5dujBhwgQaNGjwVG337t2boKAgZd7UGTNmYGlpyYwZMwzR9RKLi4tj69atbNq0qUyOL15sUv5RCCGEIioqqlwkgg/k5uZiZmaY/6oCAgK4cuUK4eHhtGnThnv37vHVV1+RkJDAkCFDnqrttLQ0mjdvrvfZ3d39abssRIUkj8WFEEI81p07d5gxYwYvv/wy3bt3JyIigry8PABSU1MZOXIkXbp0oUuXLkyZMoXbt28D8OGHH5KWlsa4cePo1KkTq1atIjExkR49eui17+bmxuHDhwGIjIwkICCAwMBAHB0d2bZt22OPf+HCBYYPH85LL71Ely5d+OCDDwqN4fDhwxw+fJjly5fTvn17zMzMqFq1Km+//baSWF65coVx48bxj3/8g1dffZUtW7Yo+2u1WlauXEnv3r3p0qUL77//Prdu3SI7O5tOnTop1Zp69+7NyJEjSUxMJDQ0lE6dOnHu3DmCgoKIiIhQ2tu3bx+enp44OjrSu3dvDh06BMCIESPYunWrsl1MTAx9+/bFycmJ0aNH88cffyjrWrZsyaZNm3jttddwcnLio48+QqfTkZKSwuzZszl27BidOnWic+fOABw8eJB+/frRqVMnunfvzpo1a0r+ZRCiGCS5FEII8VjTpk3DzMyMvXv3sn37dn744QclAdLpdPj7+/Pdd9/x9ddf8+effxIZGQnAokWLqFevHlFRURw9erTYZTL/7//+jz59+vDzzz/j4eHx2OMvWbKEbt268dNPP3Ho0CGGDx9eaJuHDx+mffv22NraFnncKVOmULduXb777juWLl3Kp59+SkJCAgBffvkl+/btY/369Xz33XdUr16d0NBQNBoNR48eBWDHjh3s27ePL7/8ks6dOxMSEsLRo0cL1GFOSkpi2rRpTJ06lZ9//pkNGzZQv379Av3Zt28fK1as4LPPPiMhIYGXXnqJKVOm6G1z4MABYmJi2LFjB19//TXfffcd9vb2fPTRR3Ts2JGjR4/y888/AxAcHExoaChHjx5l586dODs7F+t6CFFSklwKIYRQTJgwgc6dO9O5c2fGjx/PtWvXOHToEDNmzMDCwgIrKyt8fHzYtWsXAHZ2dnTr1g2NRkOtWrUYNWoUP/3001P1oWPHjvTu3RsTExMyMjIee3wzMzPS0tJIT0/H3NxcuUv3qFu3bmFtbV3kMS9fvsx///tfAgMDMTc3p3Xr1gwZMoQdO3YAsHnzZiZNmkTdunXRaDRMnDiRb775htzc3BLHFxMTw6BBg+jWrRsmJibY2Nhgb29fYLvo6Gj8/Pywt7fHzMyMcePGcfLkSb27l2PHjqVatWrUq1ePLl26cOrUqSKPa2ZmxpkzZ8jIyKB69eq0bdu2xH0XojjknUshhBCKZcuW6b1zmZSURG5uLi+//LKyTKvVKncAr1+/TlhYGD///DOZmZnodDqqVav2VH2oW7eu8u+0tLTHHv/DDz9kyZIlDB48mOrVqzNq1CgGDx5coM0aNWpw/vz5Io+Znp5O9erVsbS0VJbVq1eP//3vf0o/JkyYgInJ3/dkTExMuH79eokHPF2+fBlXV9cnbpeWlsa8efP45z//qSzT6XRcuXJFudP5cMJcuXJlMjMzi2xv6dKlfP7553zyySe0bNmSKVOm0KlTpxL1XYjikORSCCFEkR7cqTty5EihA2s++eQTVCoVX331FTVr1mTfvn2EhoYW2V7lypW5f/++8jkvL48bN27obaNSqYp9fGtra8LCwgD4+eefGTVqFE5OTtjZ2elt17VrV7788kv+/PNPveT1gTp16vDXX3+RkZGhJJiXL19WEse6desyb948XnrppSJjKy5bW1tSU1OLtd24cePo379/iY/x8Dl8oH379nz++efk5OSwYcMGPvjgAw4ePFjitoV4EnksLoQQokh16tShW7duLFiwgIyMDLRaLampqfz4448AZGZmYmFhQbVq1bhy5QqrV6/W27927dpcvHhR+dykSROysrI4cOAAOTk5fP7552RnZ5f6+A/e8wSoXr06KpVK7+7iA127dqVr165MmDCB//3vf+Tm5pKRkcGmTZuIiYnB1taWTp068emnn5KVlcWpU6eIiYnBw8MDgLfeeovFixcrj6Rv3LjBvn37SnVOBw8eTFxcHAkJCWi1Wq5cuUJKSkqB7by9vVm5ciW///47kD+w6uuvvy7WMaysrLhy5YpybrOzs/nqq6+4c+cOarWaKlWq6E05JYQhSXIphBDisRYuXEhOTg79+vXDycmJgIAArl69CsDEiRM5ceIEnTt3xs/Pj9dee01vXz8/Pz7//HM6d+7MmjVrqFq1KrNnz2bmzJn06NGDypUrF3onsbjHT05OZsiQIXTq1Il3332X4OBgGjZsWGg7S5cuxdXVlUmTJtG5c2c8PDz43//+p7wG8Omnn/LHH3/QvXt3Jk6cyHvvvUe3bt0AGDlyJG5ubvj6+tKpUyeGDh1KUlJSqc5n+/btmT9/vnIndPjw4aSlpRXY7tVXX2XMmDFMnjwZR0dH3njjDWVU+ZM4OzvTrFkzXn75Zbp06QLkDzhyc3PD0dGR6OhoFi5cWKr+C/EkMom6EEIIIYQwGLlzKYQQQgghDEaSSyGEEEIIYTCSXAohhBBCCIOR5FIIIYQQQhiMJJdCCCGEEMJgJLkUQgghhBAGI8mlEEIIIYQwGEkuhRBCCCGEwUhyKYQQQgghDOb/AX6WQJf/Q8lMAAAAAElFTkSuQmCC","text/plain":["
"]},"metadata":{},"output_type":"display_data"},{"data":{"text/plain":["
"]},"metadata":{},"output_type":"display_data"}],"source":["ml.features_importances_plot(\n"," classifier=best_linear_svm_model, \n"," model_name='linear_svm',\n"," column_to_plot=0,\n"," top_features=30, \n"," show=True, \n"," path_save=None\n",")"]},{"cell_type":"markdown","metadata":{},"source":["We've reached the end of the machine learning pipeline. As we could see, the implemented descriptors were used as features for all the models, obtaining great performances for all of them."]}],"metadata":{"kernelspec":{"display_name":"Python 3.8.13 ('dna-conda')","language":"python","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.8.13"},"orig_nbformat":4,"vscode":{"interpreter":{"hash":"c0bcc559ef33487cab7f2a2558a52d71f1a0ee6852f4c5d070acd011fceb9d44"}}},"nbformat":4,"nbformat_minor":2} diff --git a/src/propythia/DNA/outputs/essential_genes/tmp b/src/propythia/DNA/outputs/essential_genes/tmp new file mode 100644 index 0000000..e69de29 diff --git a/src/propythia/DNA/outputs/h3/tmp b/src/propythia/DNA/outputs/h3/tmp new file mode 100644 index 0000000..e69de29 diff --git a/src/propythia/DNA/outputs/primer/tmp b/src/propythia/DNA/outputs/primer/tmp new file mode 100644 index 0000000..e69de29 diff --git a/src/propythia/DNA/read_sequence.py b/src/propythia/DNA/read_sequence.py new file mode 100644 index 0000000..6cbcf85 --- /dev/null +++ b/src/propythia/DNA/read_sequence.py @@ -0,0 +1,137 @@ +""" +######################################################################################### + +A class used for reading sequences. +The main objective is to create dataframes with valid sequences to calculate descriptors. + +Authors: João Nuno Abreu +Date: 03/2022 +Email: + +######################################################################################### +""" + +from utils import checker, checker_cut +import pandas as pd +import os +from Bio.SeqIO.FastaIO import SimpleFastaParser + +class ReadDNA: + def __init__(self): + pass + + def read_sequence(self, sequence: str) -> str: + """ + Reads a sequence, checks if it's valid and returns a dataframe with the sequence. + """ + if checker(sequence): + return sequence.strip().upper() + else: + raise ValueError("Error! Invalid character in sequence:", sequence) + + def read_fasta(self, filename: str, with_labels: bool = False) -> pd.DataFrame: + """ + Reads the input file in fasta format. + It always reads sequences, and labels if the user wants. + If the user wants the labels, he must specify the with_labels parameter as True and the FASTA format must be the following: + >sequence_id1,label1 + ACTGACTG... + >sequence_id2,label2 + ACTGACTG... + """ + labels = [] + sequences = [] + + if not os.path.isfile(filename): + raise ValueError("Error! File does not exist:", filename) + + if 'fasta' not in filename: + raise ValueError("Error! File must be in fasta format:", filename) + + with open(filename) as handle: + for key, sequence in SimpleFastaParser(handle): + # get label and check if it's valid + if with_labels: + label = int(key.split(',')[1]) + if(label not in [0,1]): + raise ValueError("Error! Label must be either 0 or 1 and it is:", label) + else: + labels.append(label) + + # get sequence and check if it's valid + sequence = sequence.strip().upper() + if checker(sequence): + sequences.append(sequence) + else: + raise ValueError("Error! Invalid character in sequence:", key) + + # add labels to result if the user wants + if with_labels: + return pd.DataFrame(list(zip(sequences, labels)), columns=['sequence', 'label']) + else: + return pd.DataFrame(sequences, columns=['sequence']) + + + def read_csv(self, filename: str, with_labels: bool = False) -> pd.DataFrame: + """ + Reads the input file in csv format. + It always reads sequences, and labels if the user wants. + There must be a column with the sequence. + If the user wants the labels, he must specify the with_labels parameter as True and the column with the labels must be named "label". + """ + + if not os.path.isfile(filename): + raise ValueError("Error! File does not exist:", filename) + + if 'csv' not in filename: + raise ValueError("Error! File must be in csv format:", filename) + + dataset = pd.read_csv(filename) + + # check column names + if 'sequence' not in dataset.columns: + raise ValueError("The dataset must always have the column 'sequence'") + if with_labels and 'label' not in dataset.columns: + raise ValueError("Since with_labels is True, the dataset must have the column 'label'") + + # get sequences and labels + sequences = dataset['sequence'].to_list() + + if with_labels: + labels = dataset['label'].to_list() + + # check if sequences are valid + valid_sequences = [] + for sequence in sequences: + if checker(sequence) and "cut" not in filename: + valid_sequences.append(sequence.strip().upper()) + elif checker_cut(sequence) and "cut" in filename: + valid_sequences.append(sequence.strip().upper()) + else: + raise ValueError("Error! Invalid character in sequence:", sequence) + + # check if labels are valid + valid_labels = [] + if with_labels: + for label in labels: + if(label not in [0,1]): + raise ValueError("Error! Label must be either 0 or 1 and it is:", label) + else: + valid_labels.append(label) + + # add labels to result if the user wants + if with_labels: + return pd.DataFrame(list(zip(sequences, labels)), columns=['sequence', 'label']) + else: + return pd.DataFrame(sequences, columns=['sequence']) + + +if __name__ == "__main__": + reader = ReadDNA() + data = reader.read_csv('datasets/primer/dataset.csv', with_labels=True) + print(data) + + data = reader.read_fasta('datasets/primer/dataset.fasta', with_labels=True) + print(data) + + \ No newline at end of file diff --git a/src/propythia/DNA/requirements.txt b/src/propythia/DNA/requirements.txt new file mode 100644 index 0000000..3df5305 --- /dev/null +++ b/src/propythia/DNA/requirements.txt @@ -0,0 +1,250 @@ +# This file may be used to create an environment using: +# $ conda create --name --file +# platform: linux-64 +@EXPLICIT +https://repo.anaconda.com/pkgs/main/linux-64/_libgcc_mutex-0.1-main.conda +https://repo.anaconda.com/pkgs/main/linux-64/_tflow_select-2.3.0-mkl.conda +https://repo.anaconda.com/pkgs/main/linux-64/blas-1.0-mkl.conda +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.6.15-ha878542_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/intel-openmp-2021.4.0-h06a4308_3561.conda +https://repo.anaconda.com/pkgs/main/linux-64/ld_impl_linux-64-2.35.1-h7274673_9.conda +https://repo.anaconda.com/pkgs/main/linux-64/libgfortran4-7.5.0-ha8ba4b0_17.conda +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.1.0-ha89aaad_16.tar.bz2 +https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2 +https://conda.anaconda.org/pytorch/noarch/pytorch-mutex-1.0-cuda.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/libgfortran-ng-7.5.0-ha8ba4b0_17.conda +https://repo.anaconda.com/pkgs/main/linux-64/libgomp-11.2.0-h1234567_1.conda +https://repo.anaconda.com/pkgs/main/linux-64/mkl-2021.4.0-h06a4308_640.conda +https://repo.anaconda.com/pkgs/main/linux-64/_openmp_mutex-4.5-1_gnu.tar.bz2 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-12_linux64_mkl.tar.bz2 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-12_linux64_mkl.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/libgcc-ng-11.2.0-h1234567_1.conda +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-12_linux64_mkl.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/brotli-1.0.9-he6710b0_2.conda +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/c-ares-1.18.1-h7f8727e_0.conda +https://conda.anaconda.org/conda-forge/linux-64/cudatoolkit-10.2.89-h8f6ccaa_8.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/expat-2.4.4-h295c915_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/giflib-5.2.1-h7b6447c_0.conda +https://conda.anaconda.org/conda-forge/linux-64/gmp-6.2.1-h58526e2_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/icu-58.2-he6710b0_3.conda +https://repo.anaconda.com/pkgs/main/linux-64/jpeg-9d-h7f8727e_0.conda +https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h7f98852_1001.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/libffi-3.3-he6710b0_2.conda +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/libsodium-1.0.18-h7b6447c_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/libuuid-1.0.3-h7f8727e_2.conda +https://repo.anaconda.com/pkgs/main/linux-64/libuv-1.40.0-h7b6447c_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/libwebp-base-1.2.2-h7f8727e_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/libxcb-1.14-h7b6447c_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/lz4-c-1.9.3-h295c915_1.conda +https://repo.anaconda.com/pkgs/main/linux-64/mpich-3.3.2-hc856adb_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/ncurses-6.3-h7f8727e_2.conda +https://conda.anaconda.org/conda-forge/linux-64/nettle-3.6-he412f7d_0.tar.bz2 +https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1o-h166bdaf_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/pcre-8.45-h295c915_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/tbb-2021.5.0-hd09550d_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/xz-5.2.5-h7b6447c_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/yaml-0.2.5-h7b6447c_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/zlib-1.2.11-h7f8727e_4.conda +https://conda.anaconda.org/conda-forge/linux-64/dal-2021.5.1-ha770c72_803.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/glib-2.69.1-h4ff587b_1.conda +https://conda.anaconda.org/conda-forge/linux-64/gnutls-3.6.13-h85f3911_1.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/hdf5-1.10.6-hb1b8bf9_0.conda +https://conda.anaconda.org/conda-forge/linux-64/libllvm11-11.1.0-hf817b99_2.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/libpng-1.6.37-hbc83047_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/libprotobuf-3.19.1-h4ff587b_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/libxml2-2.9.12-h03d6c58_0.conda +https://conda.anaconda.org/conda-forge/linux-64/openh264-2.1.1-h780b84a_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/readline-8.1.2-h7f8727e_1.conda +https://repo.anaconda.com/pkgs/main/linux-64/tk-8.6.11-h1ccaba5_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/zeromq-4.3.4-h2531618_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/zstd-1.4.9-haebb681_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/dbus-1.13.18-hb2f20db_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/freetype-2.11.0-h70c0345_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/gstreamer-1.14.0-h28cd5cc_2.conda +https://repo.anaconda.com/pkgs/main/linux-64/libtiff-4.2.0-h85742a9_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/sqlite-3.38.2-hc218d9a_0.conda +https://conda.anaconda.org/pytorch/linux-64/ffmpeg-4.3-hf484d3e_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/fontconfig-2.13.1-h6c09931_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/gst-plugins-base-1.14.0-h8213a91_2.conda +https://repo.anaconda.com/pkgs/main/linux-64/lcms2-2.12-h3be6417_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/libwebp-1.2.2-h55f646e_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/python-3.8.13-h12debd9_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/astor-0.8.1-py38h06a4308_0.conda +https://repo.anaconda.com/pkgs/main/noarch/attrs-21.4.0-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/backcall-0.2.0-pyhd3eb1b0_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/blinker-1.4-py38h06a4308_0.conda +https://repo.anaconda.com/pkgs/main/noarch/cachetools-4.2.2-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/charset-normalizer-2.0.4-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/click-8.0.4-py38h06a4308_0.conda +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.4-pyh9f0ad1d_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/cycler-0.11.0-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/dataclasses-0.8-pyh6d0b6a4_7.conda +https://repo.anaconda.com/pkgs/main/linux-64/debugpy-1.5.1-py38h295c915_0.conda +https://repo.anaconda.com/pkgs/main/noarch/decorator-5.1.1-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/defusedxml-0.7.1-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/entrypoints-0.3-py38_0.conda +https://repo.anaconda.com/pkgs/main/noarch/executing-0.8.3-pyhd3eb1b0_0.conda +https://conda.anaconda.org/conda-forge/noarch/filelock-3.7.1-pyhd8ed1ab_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/frozenlist-1.2.0-py38h7f8727e_0.conda +https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.5.0-pyhd8ed1ab_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/gast-0.4.0-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/idna-3.3-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/ipython_genutils-0.2.0-pyhd3eb1b0_1.conda +https://repo.anaconda.com/pkgs/main/noarch/jupyterlab_widgets-1.0.0-pyhd3eb1b0_1.conda +https://repo.anaconda.com/pkgs/main/linux-64/kiwisolver-1.3.2-py38h295c915_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/llvmlite-0.38.0-py38h4ff587b_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/markupsafe-2.0.1-py38h27cfd23_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/mistune-0.8.4-py38h7b6447c_1000.conda +https://repo.anaconda.com/pkgs/main/linux-64/msgpack-python-1.0.3-py38hd09550d_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/multidict-5.2.0-py38h7f8727e_2.conda +https://repo.anaconda.com/pkgs/main/noarch/munkres-1.1.4-py_0.conda +https://repo.anaconda.com/pkgs/main/noarch/nest-asyncio-1.5.1-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/pandocfilters-1.5.0-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/parso-0.8.3-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/pickleshare-0.7.5-pyhd3eb1b0_1003.conda +https://repo.anaconda.com/pkgs/main/linux-64/pillow-9.0.1-py38h22f2fdc_0.conda +https://repo.anaconda.com/pkgs/main/noarch/prometheus_client-0.13.1-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/protobuf-3.19.1-py38h295c915_0.conda +https://repo.anaconda.com/pkgs/main/noarch/ptyprocess-0.7.0-pyhd3eb1b0_2.conda +https://repo.anaconda.com/pkgs/main/noarch/pure_eval-0.2.2-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/pyasn1-0.4.8-pyhd3eb1b0_0.conda +https://conda.anaconda.org/conda-forge/noarch/pycodestyle-2.8.0-pyhd8ed1ab_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/pycparser-2.21-pyhd3eb1b0_0.conda +https://conda.anaconda.org/conda-forge/noarch/pydeprecate-0.3.2-pyhd8ed1ab_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/pygments-2.11.2-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/pyjwt-2.1.0-py38h06a4308_0.conda +https://repo.anaconda.com/pkgs/main/noarch/pyparsing-3.0.4-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/pyrsistent-0.18.0-py38heee7806_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/pysocks-1.7.1-py38h06a4308_0.conda +https://repo.anaconda.com/pkgs/main/noarch/python-flatbuffers-2.0-pyhd3eb1b0_0.conda +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-2_cp38.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/pytz-2021.3-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/pyyaml-6.0-py38h7f8727e_1.conda +https://repo.anaconda.com/pkgs/main/linux-64/pyzmq-22.3.0-py38h295c915_2.conda +https://repo.anaconda.com/pkgs/main/linux-64/qt-5.9.7-h5867ecd_1.conda +https://conda.anaconda.org/conda-forge/noarch/redis-py-3.5.3-pyh9f0ad1d_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/send2trash-1.8.0-pyhd3eb1b0_1.conda +https://repo.anaconda.com/pkgs/main/linux-64/sip-4.19.13-py38h295c915_0.conda +https://repo.anaconda.com/pkgs/main/noarch/six-1.16.0-pyhd3eb1b0_1.conda +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.1-pyhd8ed1ab_0.tar.bz2 +https://conda.anaconda.org/conda-forge/noarch/tabulate-0.8.9-pyhd8ed1ab_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/termcolor-1.1.0-py38h06a4308_1.conda +https://repo.anaconda.com/pkgs/main/noarch/testpath-0.5.0-pyhd3eb1b0_0.conda +https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.1.0-pyh8a188c0_0.tar.bz2 +https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/tornado-6.1-py38h27cfd23_0.conda +https://repo.anaconda.com/pkgs/main/noarch/traitlets-5.1.1-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/typing_extensions-4.1.1-pyh06a4308_0.conda +https://repo.anaconda.com/pkgs/main/noarch/wcwidth-0.2.5-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/webencodings-0.5.1-py38_1.conda +https://repo.anaconda.com/pkgs/main/noarch/wheel-0.37.1-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/wrapt-1.13.3-py38h7f8727e_2.conda +https://repo.anaconda.com/pkgs/main/noarch/zipp-3.7.0-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/absl-py-0.15.0-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/aiosignal-1.2.0-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/asttokens-2.0.5-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/astunparse-1.6.3-py_0.conda +https://conda.anaconda.org/conda-forge/noarch/autopep8-1.6.0-pyhd8ed1ab_1.tar.bz2 +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.1-pyha770c72_0.tar.bz2 +https://conda.anaconda.org/conda-forge/linux-64/certifi-2022.6.15-py38h578d9bd_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/cffi-1.15.0-py38hd667e15_1.conda +https://repo.anaconda.com/pkgs/main/noarch/fonttools-4.25.0-pyhd3eb1b0_0.conda +https://conda.anaconda.org/conda-forge/linux-64/future-0.18.2-py38h578d9bd_5.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/google-pasta-0.2.0-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/grpcio-1.42.0-py38hce63b2e_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/importlib-metadata-4.11.3-py38h06a4308_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/jedi-0.18.1-py38h06a4308_1.conda +https://repo.anaconda.com/pkgs/main/noarch/jinja2-3.0.3-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/jupyter_core-4.9.2-py38h06a4308_0.conda +https://repo.anaconda.com/pkgs/main/noarch/jupyterlab_pygments-0.1.2-py_0.conda +https://repo.anaconda.com/pkgs/main/noarch/matplotlib-inline-0.1.2-pyhd3eb1b0_2.conda +https://repo.anaconda.com/pkgs/main/linux-64/mkl-service-2.4.0-py38h7f8727e_0.conda +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.21.2-py38he2449b9_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/packaging-21.3-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/pexpect-4.8.0-pyhd3eb1b0_3.conda +https://repo.anaconda.com/pkgs/main/noarch/prompt-toolkit-3.0.20-pyhd3eb1b0_0.conda +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.1-py38h0a891b7_0.tar.bz2 +https://conda.anaconda.org/conda-forge/linux-64/pyahocorasick-1.4.4-py38h0a891b7_2.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/pyasn1-modules-0.2.8-py_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/pyqt-5.9.2-py38h05f1152_4.conda +https://repo.anaconda.com/pkgs/main/noarch/python-dateutil-2.8.2-pyhd3eb1b0_0.conda +https://conda.anaconda.org/pytorch/linux-64/pytorch-1.11.0-py3.8_cuda10.2_cudnn7.6.5_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/rsa-4.7.2-pyhd3eb1b0_1.conda +https://conda.anaconda.org/conda-forge/linux-64/setproctitle-1.1.10-py38h497a2fe_1004.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/terminado-0.13.1-py38h06a4308_0.conda +https://conda.anaconda.org/conda-forge/noarch/tqdm-4.64.0-pyhd8ed1ab_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/typing-extensions-4.1.1-hd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/werkzeug-2.0.3-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/yarl-1.6.3-py38h27cfd23_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/argon2-cffi-bindings-21.2.0-py38h7f8727e_0.conda +https://repo.anaconda.com/pkgs/main/noarch/async-timeout-4.0.1-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/biopython-1.78-py38h7f8727e_0.conda +https://repo.anaconda.com/pkgs/main/noarch/bleach-4.1.0-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/bottleneck-1.3.4-py38hce1f21e_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/brotlipy-0.7.0-py38h27cfd23_1003.conda +https://repo.anaconda.com/pkgs/main/linux-64/cryptography-3.4.8-py38hd23ed53_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/daal4py-2021.5.0-py38h78b71dc_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/h5py-2.10.0-py38hd6299e0_1.conda +https://repo.anaconda.com/pkgs/main/noarch/importlib_metadata-4.11.3-hd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/jupyter_client-7.1.2-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/markdown-3.3.4-py38h06a4308_0.conda +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.5.1-py38hf4fb855_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/mkl_fft-1.3.1-py38hd3c417c_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/mkl_random-1.2.2-py38h51133e4_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/numexpr-2.8.1-py38h6abb31d_0.conda +https://repo.anaconda.com/pkgs/main/noarch/opt_einsum-3.3.0-pyhd3eb1b0_1.conda +https://conda.anaconda.org/conda-forge/linux-64/pandas-1.4.1-py38h43a58ef_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/prompt_toolkit-3.0.20-hd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/qtpy-2.0.1-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/ray-core-1.6.0-py38h295c915_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/scipy-1.7.3-py38hc147768_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/setuptools-58.0.4-py38h06a4308_0.conda +https://repo.anaconda.com/pkgs/main/noarch/stack_data-0.2.0-pyhd3eb1b0_0.conda +https://conda.anaconda.org/conda-forge/noarch/tensorboardx-2.5.1-pyhd8ed1ab_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/tensorflow-estimator-2.6.0-pyh7b7c402_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/aiohttp-3.8.1-py38h7f8727e_1.conda +https://repo.anaconda.com/pkgs/main/noarch/argon2-cffi-21.3.0-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/ipython-8.2.0-py38h06a4308_0.conda +https://conda.anaconda.org/conda-forge/noarch/joblib-1.1.0-pyhd8ed1ab_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/jsonschema-3.2.0-pyhd3eb1b0_2.conda +https://repo.anaconda.com/pkgs/main/noarch/keras-preprocessing-1.1.2-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/matplotlib-3.5.1-py38h06a4308_1.conda +https://repo.anaconda.com/pkgs/main/linux-64/numba-0.55.1-py38h51133e4_0.conda +https://repo.anaconda.com/pkgs/main/noarch/oauthlib-3.2.0-pyhd3eb1b0_0.conda +https://conda.anaconda.org/conda-forge/noarch/patsy-0.5.2-pyhd8ed1ab_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/pip-21.2.4-py38h06a4308_0.conda +https://repo.anaconda.com/pkgs/main/noarch/pyopenssl-21.0.0-pyhd3eb1b0_1.conda +https://conda.anaconda.org/conda-forge/noarch/seaborn-base-0.11.2-pyhd8ed1ab_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/tensorboard-plugin-wit-1.6.0-py_0.conda +https://conda.anaconda.org/conda-forge/noarch/torchmetrics-0.9.1-pyhd8ed1ab_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/ipykernel-6.9.1-py38h06a4308_0.conda +https://repo.anaconda.com/pkgs/main/noarch/nbformat-5.1.3-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/scikit-learn-1.0.2-py38h51133e4_1.conda +https://conda.anaconda.org/conda-forge/linux-64/statsmodels-0.13.2-py38h6c62de6_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/tensorflow-base-2.4.1-mkl_py38h43e0292_0.conda +https://repo.anaconda.com/pkgs/main/noarch/urllib3-1.26.8-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/jupyter_console-6.4.3-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/nbclient-0.5.11-pyhd3eb1b0_0.conda +https://conda.anaconda.org/conda-forge/noarch/pynndescent-0.5.6-pyh6c4a22f_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/qtconsole-5.3.0-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/requests-2.27.1-pyhd3eb1b0_0.conda +https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-intelex-2021.5.0-py38h578d9bd_1.tar.bz2 +https://conda.anaconda.org/conda-forge/noarch/seaborn-0.11.2-hd8ed1ab_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/google-auth-2.6.0-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/nbconvert-6.3.0-py38h06a4308_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/ray-tune-1.6.0-py38h06a4308_0.conda +https://repo.anaconda.com/pkgs/main/noarch/requests-oauthlib-1.3.0-py_0.conda +https://conda.anaconda.org/pytorch/linux-64/torchvision-0.12.0-py38_cu102.tar.bz2 +https://conda.anaconda.org/conda-forge/linux-64/umap-learn-0.5.2-py38h578d9bd_1.tar.bz2 +https://repo.anaconda.com/pkgs/main/noarch/google-auth-oauthlib-0.4.4-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/notebook-6.4.8-py38h06a4308_0.conda +https://repo.anaconda.com/pkgs/main/noarch/tensorboard-2.4.0-pyhc547734_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/widgetsnbextension-3.5.2-py38h06a4308_0.conda +https://repo.anaconda.com/pkgs/main/noarch/ipywidgets-7.6.5-pyhd3eb1b0_1.conda +https://conda.anaconda.org/conda-forge/noarch/pytorch-lightning-1.6.4-pyhd8ed1ab_0.tar.bz2 +https://repo.anaconda.com/pkgs/main/linux-64/tensorflow-2.4.1-mkl_py38hb2083e0_0.conda +https://repo.anaconda.com/pkgs/main/linux-64/jupyter-1.0.0-py38_7.conda +https://repo.anaconda.com/pkgs/main/noarch/keras-base-2.4.3-pyhd3eb1b0_0.conda +https://repo.anaconda.com/pkgs/main/noarch/keras-2.4.3-hd3eb1b0_0.conda diff --git a/src/propythia/DNA/src/encoding.py b/src/propythia/DNA/src/encoding.py new file mode 100644 index 0000000..78cc002 --- /dev/null +++ b/src/propythia/DNA/src/encoding.py @@ -0,0 +1,122 @@ +""" +############################################################################## +A class used for computing different types of DNA encodings. +It contains encodings such one-hot-encoding. +Authors: João Abreu +Date: 03/2022 +Email: +############################################################################## +""" + +import sys +from typing import Union +import numpy as np +sys.path.append('../') +from utils import calculate_kmer_onehot, calculate_kmer_list + +class DNAEncoder: + def __init__(self, data: Union[str, np.ndarray]): + + if(isinstance(data, str)): + self.dna_sequence = data.strip().upper() + self.sequences = None + else: + self.sequences = data + self.dna_sequence = None + + def one_hot_encode(self, dimension = 3): + """ + From: https://www.ncbi.nlm.nih.gov/pmc/articles/PMC8138820/ + Calculates binary encoding. Each nucleotide is encoded by a four digit binary vector. + :return: list with values of binary encoding + """ + binary = { + 'A': [1, 0, 0, 0], + 'C': [0, 1, 0, 0], + 'G': [0, 0, 1, 0], + 'T': [0, 0, 0, 1], + 'N': [0, 0, 0, 0], + 1: [1, 0], + 0: [0, 1] + } + + binary2 = { + 'A': 1, + 'C': 2, + 'G': 3, + 'T': 4, + 'N': 0, + } + + values = binary if dimension == 3 else binary2 + + if(self.sequences is not None): + return np.array([[values[i] for i in x] for x in self.sequences]) + elif(self.dna_sequence is not None): + return np.array([values[i] for i in self.dna_sequence]) + else: + print("Unexpected error: self.sequences and self.dna_sequence are None.") + sys.exit(1) + + def chemical_encode(self): + """ + From: https://academic.oup.com/bioinformatics/article/33/22/3518/4036387 + + Calculates nucleotide chemical property + + Chemical property | Class | Nucleotides + ------------------------------------------- + Ring structure | Purine | A, G + | Pyrimidine | C, T + ------------------------------------------- + Hydrogen bond | Weak | A, T + | Strong | C, G + ------------------------------------------- + Functional group | Amino | A, C + | Keto | G, T + + :return: list with values of nucleotide chemical property + """ + chemical_property = { + 'A': [1, 1, 1], + 'C': [0, 0, 1], + 'G': [1, 0, 0], + 'T': [0, 1, 0], + 'N': [0, 0, 0], + 1: [1, 0], + 0: [0, 1] + } + if(self.sequences is not None): + return np.array([[chemical_property[i] for i in x] for x in self.sequences]) + elif(self.dna_sequence is not None): + return np.array([chemical_property[i] for i in self.dna_sequence]) + else: + print("Unexpected error: self.sequences and self.dna_sequence are None.") + sys.exit(1) + + def kmer_one_hot_encode(self, k): + """ + From: https://www.ncbi.nlm.nih.gov/pmc/articles/PMC8138820/ + Calculates binary encoding. Each nucleotide is encoded by a four digit binary vector. + :return: list with values of binary encoding + """ + if(self.sequences is not None): + res = [] + d = calculate_kmer_onehot(k) + for sequence in self.sequences: + l = calculate_kmer_list(sequence, k) + res.append(np.array([d[i] for i in l])) + return np.array(res) + elif(self.dna_sequence is not None): + d = calculate_kmer_onehot(k) + l = calculate_kmer_list(self.dna_sequence, k) + return np.array([d[i] for i in l]) + else: + print("Unexpected error: self.sequences and self.dna_sequence are None.") + sys.exit(1) + +if __name__ == "__main__": + encoder = DNAEncoder("ACGT") + print(encoder.one_hot_encode()) + print(encoder.chemical_encode()) + print(encoder.kmer_one_hot_encode(2)) \ No newline at end of file diff --git a/src/propythia/DNA/src/hyperparameter_tuning.py b/src/propythia/DNA/src/hyperparameter_tuning.py new file mode 100644 index 0000000..30f614e --- /dev/null +++ b/src/propythia/DNA/src/hyperparameter_tuning.py @@ -0,0 +1,123 @@ +import torch +import os +from .train import traindata +from .test import test +from .models import * +from .prepare_data import prepare_data +from ray import tune +from ray.tune.schedulers import HyperBandScheduler +from ray.tune import CLIReporter +from functools import partial + +import sys +sys.path.append("../") +from utils import seed_everything, print_metrics + +def hyperparameter_tuning(device, config): + """ + Hyperparameter tuning for the deep learning model. + :param device: The device to use for the model. + :param config: The configuration for the model. + """ + + cpus_per_trial = config['fixed_vals']['cpus_per_trial'] + gpus_per_trial = config['fixed_vals']['gpus_per_trial'] + num_samples = config['fixed_vals']['num_samples'] + epochs = config['fixed_vals']['epochs'] + model_label = config['combination']['model_label'] + data_dir = config['combination']['data_dir'] + mode = config['combination']['mode'] + class_weights = config['combination']['class_weights'] + kmer_one_hot = config['fixed_vals']['kmer_one_hot'] + output_size = config['fixed_vals']['output_size'] + + seed_everything() + + # ------------------------------------------------------------------------------------------ + + scheduler = HyperBandScheduler( + metric="loss", + mode="min", + max_t=epochs, + reduction_factor=2 + ) + + reporter = CLIReporter( + metric_columns=["loss", "accuracy", "training_iteration", 'mcc'] + ) + + result = tune.run( + partial( + prepare_and_train, + device=device, + config_from_json=config + ), + resources_per_trial={"cpu": cpus_per_trial, "gpu": gpus_per_trial}, + config=config['hyperparameter_search_space'], + num_samples=num_samples, + scheduler=scheduler, + progress_reporter=reporter + ) + + best_trial = result.get_best_trial('mcc', 'max', 'last') + print("Best trial config:", best_trial.config) + print("Best trial final validation loss:", best_trial.last_result["loss"]) + print("Best trial final validation accuracy:", best_trial.last_result["accuracy"]) + print("Best trial final validation mcc:", best_trial.last_result["mcc"]) + + _, testloader, _, input_size, sequence_length = prepare_data( + data_dir=data_dir, + mode=mode, + batch_size=best_trial.config['batch_size'], + k=kmer_one_hot, + ) + + if model_label == 'mlp': + best_trained_model = MLP(input_size, best_trial.config['hidden_size'], output_size, best_trial.config['dropout']) + elif model_label == 'cnn': + best_trained_model = CNN(input_size, best_trial.config['hidden_size'], output_size, best_trial.config['dropout'], sequence_length) + elif model_label == 'lstm': + best_trained_model = LSTM(input_size, best_trial.config['hidden_size'], False, best_trial.config['num_layers'], output_size, sequence_length, best_trial.config['dropout'], device) + elif model_label == 'bi_lstm': + best_trained_model = LSTM(input_size, best_trial.config['hidden_size'], True, best_trial.config['num_layers'], output_size, sequence_length, best_trial.config['dropout'], device) + elif model_label == 'gru': + best_trained_model = GRU(input_size, best_trial.config['hidden_size'], False, best_trial.config['num_layers'], output_size, sequence_length, best_trial.config['dropout'], device) + elif model_label == 'bi_gru': + best_trained_model = GRU(input_size, best_trial.config['hidden_size'], True, best_trial.config['num_layers'], output_size, sequence_length, best_trial.config['dropout'], device) + elif model_label == 'cnn_lstm': + best_trained_model = CNN_LSTM(input_size, best_trial.config['hidden_size'], False, best_trial.config['num_layers'], sequence_length, output_size, best_trial.config['dropout'], device) + elif model_label == 'cnn_bi_lstm': + best_trained_model = CNN_LSTM(input_size, best_trial.config['hidden_size'], True, best_trial.config['num_layers'], sequence_length, output_size, best_trial.config['dropout'], device) + elif model_label == 'cnn_gru': + best_trained_model = CNN_GRU(input_size, best_trial.config['hidden_size'], False, best_trial.config['num_layers'], sequence_length, output_size, best_trial.config['dropout'], device) + elif model_label == 'cnn_bi_gru': + best_trained_model = CNN_GRU(input_size, best_trial.config['hidden_size'], True, best_trial.config['num_layers'], sequence_length, output_size, best_trial.config['dropout'], device) + else: + raise ValueError('Model label not implemented', model_label) + + + best_trained_model.to(device) + + best_checkpoint_dir = best_trial.checkpoint.value + model_state, optimizer_state = torch.load(os.path.join( + best_checkpoint_dir, "checkpoint")) + best_trained_model.load_state_dict(model_state) + + metrics = test(device, best_trained_model, testloader) + print_metrics(model_label, mode, data_dir, kmer_one_hot, class_weights, metrics) + +def prepare_and_train(config, device, config_from_json): + + data_dir = config_from_json['combination']['data_dir'] + mode = config_from_json['combination']['mode'] + kmer_one_hot = config_from_json['fixed_vals']['kmer_one_hot'] + batch_size = config['batch_size'] + + trainloader, _, validloader, input_size, sequence_length = prepare_data( + data_dir=data_dir, + mode=mode, + batch_size=batch_size, + k=kmer_one_hot, + ) + + traindata(config, device, config_from_json, trainloader, validloader, input_size, sequence_length) \ No newline at end of file diff --git a/src/propythia/DNA/src/models.py b/src/propythia/DNA/src/models.py new file mode 100644 index 0000000..5407980 --- /dev/null +++ b/src/propythia/DNA/src/models.py @@ -0,0 +1,189 @@ +import torch +from torch import nn + +import sys +sys.path.append("../") +from utils import calc_maxpool_output + +class MLP(nn.Module): + """ + Implementation of DeepHe's MLP model + - Paper: https://journals.plos.org/ploscompbiol/article?id=10.1371/journal.pcbi.1008229 + - Code using Keras: https://github.com/xzhang2016/DeepHE/blob/master/DNN.py + """ + + def __init__(self, input_size, hidden_size, output_size, dropout): + super(MLP, self).__init__() + + self.linear = nn.Linear(input_size, hidden_size * 2) + self.relu = nn.ReLU() + self.dropout = nn.Dropout(dropout) + self.fc_last = nn.Linear(hidden_size * 2, output_size) + self.sigmoid = nn.Sigmoid() + + def forward(self, x): + x = self.linear(x) + x = self.relu(x) + x = self.dropout(x) + x = self.fc_last(x) + x = self.sigmoid(x) + return x + + +class CNN(nn.Module): + """ + Implementation of Primer's CNN model (https://github.com/abidlabs/deep-learning-genomics-primer/blob/master/A_Primer_on_Deep_Learning_in_Genomics_Public.ipynb) + """ + + def __init__(self, input_size, hidden_size, output_size, dropout, sequence_length): + super(CNN, self).__init__() + + self.conv1 = nn.Conv1d(input_size, hidden_size, kernel_size=12, stride=1, padding=0, dilation=1) + self.maxpool = nn.MaxPool1d(kernel_size=12, stride=5, padding=0, dilation=1) + + max_pool_output = calc_maxpool_output(hidden_size, sequence_length) + + self.linear = nn.Linear(max_pool_output, hidden_size * 2) + self.relu = nn.ReLU() + self.dropout = nn.Dropout(dropout) + + self.fc_last = nn.Linear(hidden_size * 2, output_size) + self.softmax = nn.Softmax(dim=1) + + def forward(self, x): + x = x.permute(0, 2, 1) + x = self.conv1(x) + x = self.maxpool(x) + x = torch.flatten(x, 1) + x = self.linear(x) + x = self.relu(x) + x = self.dropout(x) + x = self.fc_last(x) + x = self.softmax(x) + return x + +class LSTM(nn.Module): + """ + https://github.com/aladdinpersson/Machine-Learning-Collection/blob/master/ML/Pytorch/Basics/pytorch_rnn_gru_lstm.py + """ + + def __init__(self, input_size, hidden_size, is_bidirectional, num_layers, output_size, sequence_length, dropout, device): + super(LSTM, self).__init__() + self.num_directions = 2 if is_bidirectional else 1 + self.hidden_size = hidden_size + self.device = device + self.num_layers = num_layers + + self.lstm = nn.LSTM(input_size, hidden_size, num_layers, batch_first=True, bidirectional=is_bidirectional, dropout=dropout) + self.fc = nn.Linear(hidden_size * sequence_length * self.num_directions, output_size) + + def forward(self, x): + h0 = torch.zeros(self.num_layers * self.num_directions, x.size(0), self.hidden_size).to(self.device) + c0 = torch.zeros(self.num_layers * self.num_directions, x.size(0), self.hidden_size).to(self.device) + + out, _ = self.lstm(x, (h0, c0)) + out = out.reshape(out.shape[0], -1) + out = self.fc(out) + return out + +class GRU(nn.Module): + """ + https://github.com/aladdinpersson/Machine-Learning-Collection/blob/master/ML/Pytorch/Basics/pytorch_rnn_gru_lstm.py + """ + + def __init__(self, input_size, hidden_size, is_bidirectional, num_layers, output_size, sequence_length, dropout, device): + super(GRU, self).__init__() + self.hidden_size = hidden_size + self.num_layers = num_layers + self.device = device + self.num_directions = 2 if is_bidirectional else 1 + + self.gru = nn.GRU(input_size, hidden_size, num_layers, batch_first=True, bidirectional=is_bidirectional, dropout=dropout) + self.fc = nn.Linear(hidden_size * sequence_length * self.num_directions, output_size) + + def forward(self, x): + h0 = torch.zeros(self.num_layers * self.num_directions, x.size(0), self.hidden_size).to(self.device) + + out, _ = self.gru(x, h0) + out = out.reshape(out.shape[0], -1) + out = self.fc(out) + return out + +class CNN_LSTM(nn.Module): + + def __init__(self, input_size, hidden_size, is_bidirectional, num_layers, sequence_length, output_size, dropout, device): + super(CNN_LSTM, self).__init__() + self.num_directions = 2 if is_bidirectional else 1 + self.num_layers = num_layers + self.hidden_size = hidden_size + self.device = device + + self.conv1 = nn.Conv1d(input_size, hidden_size, kernel_size=12, stride=1, padding=0, dilation=1) + self.maxpool = nn.MaxPool1d(kernel_size=12, stride=5, padding=0, dilation=1) + + max_pool_output = calc_maxpool_output(hidden_size, sequence_length) + + self.linear = nn.Linear(max_pool_output, hidden_size * 2) + self.relu = nn.ReLU() + self.dropout = nn.Dropout(dropout) + + self.lstm = nn.LSTM(hidden_size * 2, hidden_size, num_layers, batch_first=True, bidirectional=is_bidirectional, dropout=dropout) + self.last_linear = nn.Linear(hidden_size * self.num_directions, output_size) + + def forward(self, x): + h0 = torch.zeros(self.num_layers * self.num_directions, x.size(0), self.hidden_size).to(self.device) + c0 = torch.zeros(self.num_layers * self.num_directions, x.size(0), self.hidden_size).to(self.device) + + x = x.permute(0, 2, 1) + x = self.conv1(x) + x = self.maxpool(x) + x = torch.flatten(x, 1) + x = self.linear(x) + x = self.relu(x) + x = self.dropout(x) + x = x.reshape(x.size(0), 1, -1) + out, _ = self.lstm(x, (h0, c0)) + out = out.reshape(out.shape[0], -1) + y = self.last_linear(out) + return y + +class CNN_GRU(nn.Module): + """ + https://medium.com/geekculture/recap-of-how-to-implement-lstm-in-pytorch-e17ec11b061e + """ + def __init__(self, input_size, hidden_size, is_bidirectional, num_layers, sequence_length, output_size, dropout, device): + super(CNN_GRU, self).__init__() + + self.num_directions = 2 if is_bidirectional else 1 + self.num_layers = num_layers + self.hidden_size = hidden_size + self.device = device + + self.conv1 = nn.Conv1d(input_size, hidden_size, kernel_size=12, stride=1, padding=0, dilation=1) + self.maxpool = nn.MaxPool1d(kernel_size=12, stride=5, padding=0, dilation=1) + + max_pool_output = calc_maxpool_output(hidden_size, sequence_length) + + self.linear = nn.Linear(max_pool_output, hidden_size * 2) + self.relu = nn.ReLU() + self.dropout = nn.Dropout(dropout) + + self.gru = nn.GRU(hidden_size * 2, hidden_size, num_layers, batch_first=True, bidirectional=is_bidirectional, dropout=dropout) + self.last_linear = nn.Linear(hidden_size * self.num_directions, output_size) + + def forward(self, x): + h0 = torch.zeros(self.num_layers * self.num_directions, x.size(0), self.hidden_size).to(self.device) + + x = x.permute(0, 2, 1) + x = self.conv1(x) + x = self.maxpool(x) + x = torch.flatten(x, 1) + x = self.linear(x) + x = self.relu(x) + x = self.dropout(x) + x = x.reshape(x.size(0), 1, -1) + out, _ = self.gru(x, h0) + out = out.reshape(out.shape[0], -1) + y = self.last_linear(out) + return y + \ No newline at end of file diff --git a/src/propythia/DNA/src/prepare_data.py b/src/propythia/DNA/src/prepare_data.py new file mode 100644 index 0000000..23e982e --- /dev/null +++ b/src/propythia/DNA/src/prepare_data.py @@ -0,0 +1,131 @@ +import os +import torch +import torch.utils.data as data_utils +import pickle +from calculate_features import calculate_and_normalize +from read_sequence import ReadDNA +from sklearn.model_selection import train_test_split +from sklearn.preprocessing import StandardScaler +from .encoding import DNAEncoder +import sys +sys.path.append("../") +from utils import seed_everything + +def data_splitting(fps_x, fps_y, batch_size, train_size, test_size, validation_size): + """ + Split data into train, test and validation sets. + """ + + if(train_size + test_size + validation_size != 1): + raise ValueError("The sum of train_size, test_size and validation_size must be 1.") + + x, x_test, y, y_test = train_test_split( + fps_x, fps_y, + test_size=test_size, + train_size=train_size + validation_size, + stratify=fps_y + ) + x_train, x_cv, y_train, y_cv = train_test_split( + x, y, + test_size=validation_size/(1-test_size), + train_size=1-(validation_size/(1-test_size)), + stratify=y + ) + + train_data = data_utils.TensorDataset( + torch.tensor(x_train, dtype=torch.float), + torch.tensor(y_train, dtype=torch.long) + ) + test_data = data_utils.TensorDataset( + torch.tensor(x_test, dtype=torch.float), + torch.tensor(y_test, dtype=torch.long) + ) + valid_data = data_utils.TensorDataset( + torch.tensor(x_cv, dtype=torch.float), + torch.tensor(y_cv, dtype=torch.long) + ) + + trainloader = data_utils.DataLoader( + train_data, + shuffle=True, + batch_size=batch_size + ) + testloader = data_utils.DataLoader( + test_data, + shuffle=True, + batch_size=batch_size + ) + validloader = data_utils.DataLoader( + valid_data, + shuffle=True, + batch_size=batch_size + ) + + return trainloader, testloader, validloader, x_train + +def prepare_data(data_dir, mode, batch_size, k, train_size=0.6, test_size=0.2, validation_size=0.2): + """ + Prepare data for training and testing. + :param data_dir: str, the path to the data directory. + :param mode: str, the mode to use. Must be either 'descriptor', 'one_hot', 'chemical' or 'kmer_one_hot'. + :param batch_size: int, the batch size to use. + :param k: int, value for the kmer one hot encoding. + :param train_size: float, the proportion of the data to use for training. + :param test_size: float, the proportion of the data to use for testing. + :param validation_size: float, the proportion of the data to use for validation. + :return: trainloader: torch.utils.data.DataLoader, the training data. + :return: testloader: torch.utils.data.DataLoader, the testing data. + :return: validloader: torch.utils.data.DataLoader, the validation data. + :return: input_size: int, the size of the input. + :return: sequence_length: int, the size of the length of sequence. + """ + + seed_everything() + + fps_x_file = data_dir + '/fps_x_descriptor.pkl' if mode == 'descriptor' else data_dir + '/fps_x.pkl' + fps_y_file = data_dir + '/fps_y_descriptor.pkl' if mode == 'descriptor' else data_dir + '/fps_y.pkl' + + # check if fps_x_file and fps_y_file exist + if not os.path.isfile(fps_x_file) or not os.path.isfile(fps_y_file): + + # read data from csv + reader = ReadDNA() + data = reader.read_csv(filename=data_dir + '/dataset.csv', with_labels=True) + + # calculate features + if mode == 'descriptor': + fps_x, fps_y = calculate_and_normalize(data) + else: + fps_x = data['sequence'].values + fps_y = data['label'].values + + # save fps_x and fps_y to files + with open(fps_x_file, 'wb') as f: + pickle.dump(fps_x, f) + with open(fps_y_file, 'wb') as f: + pickle.dump(fps_y, f) + else: + # load fps_x and fps_y from files + with open(fps_x_file, 'rb') as f: + fps_x = pickle.load(f) + with open(fps_y_file, 'rb') as f: + fps_y = pickle.load(f) + + if(mode in ['one_hot', 'chemical', 'kmer_one_hot']): + encoder = DNAEncoder(fps_x) + possibilities = { + 'one_hot': encoder.one_hot_encode, + 'chemical': encoder.chemical_encode, + 'kmer_one_hot': encoder.kmer_one_hot_encode + } + fps_x = possibilities[mode]() if mode != 'kmer_one_hot' else possibilities[mode](k) + elif(mode == 'descriptor'): + scaler = StandardScaler().fit(fps_x) + fps_x = scaler.transform(fps_x) + fps_y = fps_y.to_numpy() + else: + raise ValueError("mode must be either 'one_hot', 'descriptor', 'chemical' or 'kmer_one_hot_encode'.") + + trainloader, testloader, validloader, x_train = data_splitting(fps_x, fps_y, batch_size, train_size, test_size, validation_size) + + return trainloader, testloader, validloader, x_train.shape[-1], x_train.shape[1] diff --git a/src/propythia/DNA/src/test.py b/src/propythia/DNA/src/test.py new file mode 100644 index 0000000..988a3e9 --- /dev/null +++ b/src/propythia/DNA/src/test.py @@ -0,0 +1,45 @@ +import torch +from numpy import argmax +from numpy import vstack +from sklearn.metrics import accuracy_score, f1_score, matthews_corrcoef, confusion_matrix, recall_score, roc_auc_score + + +def test(device, model, test_loader): + """ + Test the model. + :param model: Model to be tested. + :param device: Device to be used for testing. + :param test_loader: Data loader for testing. + :return: The accuracy, mcc and confusion matrix of the model. + """ + model.eval() + predictions, actuals = list(), list() + with torch.no_grad(): + for (inputs, targets) in test_loader: + inputs, targets = inputs.to(device), targets.to(device) + yhat = model(inputs) + yhat = yhat.cpu().detach().numpy() + actual = targets.cpu().numpy() + actual = actual.reshape((len(actual), 1)) + yhat = argmax(yhat, axis=1) + yhat = yhat.reshape((len(yhat), 1)) + predictions.append(yhat) + actuals.append(actual) + + predictions, actuals = vstack(predictions), vstack(actuals) + acc = accuracy_score(actuals, predictions) + mcc = matthews_corrcoef(actuals, predictions) + roc_auc = roc_auc_score(actuals, predictions) + f1 = f1_score(actuals, predictions) + recall = recall_score(actuals, predictions) + report = confusion_matrix(actuals, predictions) + + metrics = { + 'accuracy': acc, + 'mcc': mcc, + 'roc_auc': roc_auc, + 'f1': f1, + 'recall': recall, + 'confusion_matrix': report + } + return metrics diff --git a/src/propythia/DNA/src/train.py b/src/propythia/DNA/src/train.py new file mode 100644 index 0000000..957159d --- /dev/null +++ b/src/propythia/DNA/src/train.py @@ -0,0 +1,162 @@ +import numpy as np +import torch +from torch.optim.lr_scheduler import ReduceLROnPlateau +from torch.optim import Adam, SGD +from .test import test +from .models import * +from .prepare_data import prepare_data +from ray import tune +import os + +import sys +sys.path.append("../") +from utils import seed_everything + + +def traindata(config, device, config_from_json, trainloader, validloader, input_size, sequence_length, checkpoint_dir=None): + """ + Train the model for a number of epochs. + :param config: Dictionary of hyperparameters to be tuned. + :param device: Device to be used for training. + :param fixed_vals: Dictionary of fixed parameters. + :param checkpoint_dir: Directory to save the model. + """ + + seed_everything() + + # Fixed values + do_tuning = config_from_json['do_tuning'] + model_label = config_from_json['combination']['model_label'] + output_size = config_from_json['fixed_vals']['output_size'] + optimizer_label = config_from_json['fixed_vals']['optimizer_label'] + epochs = config_from_json['fixed_vals']['epochs'] + patience = config_from_json['fixed_vals']['patience'] + loss_function = config_from_json['fixed_vals']['loss_function'] + last_loss = 100 + + # Hyperparameters to tune + hidden_size = config['hidden_size'] + dropout = config['dropout'] + lr = config['lr'] + num_layers = config['num_layers'] + + if model_label == 'mlp': + model = MLP(input_size, hidden_size, output_size, dropout).to(device) + elif model_label == 'cnn': + model = CNN(input_size, hidden_size, output_size, dropout, sequence_length).to(device) + elif model_label == 'lstm': + model = LSTM(input_size, hidden_size, False, num_layers, output_size, sequence_length, dropout, device).to(device) + elif model_label == 'bi_lstm': + model = LSTM(input_size, hidden_size, True, num_layers, output_size, sequence_length, dropout, device).to(device) + elif model_label == 'gru': + model = GRU(input_size, hidden_size, False, num_layers, output_size, sequence_length, dropout, device).to(device) + elif model_label == 'bi_gru': + model = GRU(input_size, hidden_size, True, num_layers, output_size, sequence_length, dropout, device).to(device) + elif model_label == 'cnn_lstm': + model = CNN_LSTM(input_size, hidden_size, False, num_layers, sequence_length, output_size, dropout, device).to(device) + elif model_label == 'cnn_bi_lstm': + model = CNN_LSTM(input_size, hidden_size, True, num_layers, sequence_length, output_size, dropout, device).to(device) + elif model_label == 'cnn_gru': + model = CNN_GRU(input_size, hidden_size, False, num_layers, sequence_length, output_size, dropout, device).to(device) + elif model_label == 'cnn_bi_gru': + model = CNN_GRU(input_size, hidden_size, True, num_layers, sequence_length, output_size, dropout, device).to(device) + else: + raise ValueError('Model label not implemented', model_label) + + if(optimizer_label == 'adam'): + optimizer = Adam(model.parameters(), lr=lr) + elif(optimizer_label == 'sgd'): + optimizer = SGD(model.parameters(), lr=lr) + else: + raise ValueError("optimizer_label must be either 'adam' or 'sgd'") + + scheduler = ReduceLROnPlateau(optimizer, 'min') + + # ------------------------------------------------------------------------------------------------ + + if do_tuning and checkpoint_dir: + model_state, optimizer_state = torch.load( + os.path.join(checkpoint_dir, "checkpoint")) + model.load_state_dict(model_state) + optimizer.load_state_dict(optimizer_state) + + # ------------------------------------------------------------------------------------------------ + + trigger_times = 0 + for epoch in range(1, epochs+1): + model.train() + + for i, (inputs, targets) in enumerate(trainloader): + inputs, targets = inputs.to(device), targets.to(device) + + # Zero the gradients + optimizer.zero_grad() + + # Forward and backward propagation + output = model(inputs) + loss = loss_function(output, targets) + loss.backward() + optimizer.step() + + # Show progress + if i % 100 == 0 or i == len(trainloader): + print(f'[{epoch}/{epochs}, {i}/{len(trainloader)}] loss: {loss.item():.8}') + + # Early stopping + current_loss, val_acc, val_mcc = validation(model, device, validloader, loss_function) + print('The Current Loss:', current_loss) + + if current_loss >= last_loss or torch.isnan(loss): + trigger_times += 1 + if torch.isnan(loss): + print('NAN loss! trigger_times:', trigger_times) + else: + print('trigger Times:', trigger_times) + + if trigger_times >= patience: + print('Early stopping!\nStart to test process.') + if do_tuning: + with tune.checkpoint_dir(epoch) as checkpoint_dir: + path = os.path.join(checkpoint_dir, "checkpoint") + torch.save((model.state_dict(), optimizer.state_dict()), path) + tune.report(loss=current_loss, accuracy=val_acc, mcc=val_mcc) + return + else: + return model + + else: + print('trigger times: 0') + trigger_times = 0 + + last_loss = current_loss + scheduler.step(current_loss) + if do_tuning: + tune.report(loss=current_loss, accuracy=val_acc, mcc=val_mcc) + + if do_tuning == False: + return model + + +def validation(model, device, validloader, loss_function): + """ + Validate the model. + :param model: Model to be validated. + :param device: Device to be used for validation. + :param validloader: Data loader for validation. + :param loss_function: Loss function to be used. + :return: The loss, accuracy and mcc of the model. + """ + model.eval() + loss_total = 0 + + with torch.no_grad(): + for (inputs, targets) in validloader: + inputs, targets = inputs.to(device), targets.to(device) + + output = model(inputs) + loss = loss_function(output, targets) + loss_total += loss.item() + + metrics = test(device, model, validloader) + + return loss_total / len(validloader), metrics['accuracy'], metrics['mcc'] diff --git a/src/propythia/DNA/utils.py b/src/propythia/DNA/utils.py new file mode 100644 index 0000000..121d9b8 --- /dev/null +++ b/src/propythia/DNA/utils.py @@ -0,0 +1,384 @@ +import os +import pickle +import random +import sys +import math +import numpy as np +from itertools import product +import torch +import json +from torch import nn +from ray import tune + +ALPHABET = 'ACGT' +ALPHABET_CUT = 'ACGTN' +pairs = { + 'A': 'T', + 'T': 'A', + 'G': 'C', + 'C': 'G' +} + +combinations = { + 'mlp': ['descriptor'], + 'mlp_half': ['descriptor'], + 'cnn': ['one_hot', 'chemical', 'kmer_one_hot'], + 'cnn_half': ['one_hot', 'chemical', 'kmer_one_hot'], + 'lstm': ['one_hot', 'chemical', 'kmer_one_hot'], + 'bi_lstm': ['one_hot', 'chemical', 'kmer_one_hot'], + 'gru': ['one_hot', 'chemical', 'kmer_one_hot'], + 'bi_gru': ['one_hot', 'chemical', 'kmer_one_hot'], + 'cnn_lstm': ['one_hot', 'chemical', 'kmer_one_hot'], + 'cnn_bi_lstm': ['one_hot', 'chemical', 'kmer_one_hot'], + 'cnn_gru': ['one_hot', 'chemical', 'kmer_one_hot'], + 'cnn_bi_gru': ['one_hot', 'chemical', 'kmer_one_hot'] +} + +def print_metrics(model_label, mode, data_dir, kmer_one_hot, class_weights, metrics): + print("-" * 40) + print("Results in test set: ") + print("-" * 40) + print("model: ", model_label) + print("mode: ", mode) + print("dataset: ", data_dir.split("/")[-1]) + if mode == "kmer_one_hot": + print("kmer_one_hot: ", kmer_one_hot) + if "essential_genes" in data_dir: + print("class_weights:", class_weights) + print("-" * 40) + + for key in metrics: + if key == 'confusion_matrix': + print(f"{key:<{20}}= {metrics[key][0]}") + print(f"{'':<{20}} {metrics[key][1]}") + else: + print(f"{key:<{20}}= {metrics[key]:.3f}") + + print("-" * 40) + +# ----------------------------------------------------------------------------- +def seed_everything(seed=42): + random.seed(seed) + os.environ['PYTHONHASHSEED'] = str(seed) + os.environ["CUBLAS_WORKSPACE_CONFIG"]=":4096:2" + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False + torch.backends.cudnn.enabled = False + +def checker(sequence): + """ + Checks if the input sequence is a valid DNA sequence. + """ + return all(i in ALPHABET for i in sequence) + +def checker_cut(sequence): + """ + Checks if the input sequence is a valid DNA sequence. Includes the 'N' character as valid because it is used to fill the sequence to the right length. + """ + return all(i in ALPHABET_CUT for i in sequence) + +def normal_round(n): + """ + Equivalent to python's round but its rounds up if it ends up with 0.5. + """ + if n - math.floor(n) < 0.5: + return math.floor(n) + return math.ceil(n) + + +def normalize_dict(dic): + """Normalize the value of a dictionary.""" + N = sum(dic.values()) + for key in dic: + dic[key] = round(dic[key] / N, 3) + return dic + + +def make_kmer_list(k): + try: + return ["".join(e) for e in product(ALPHABET, repeat=k)] + except TypeError: + print("TypeError: k must be an inter and larger than 0, alphabet must be a string.") + raise TypeError + except ValueError: + print("TypeError: k must be an inter and larger than 0") + raise ValueError + + +def make_kmer_dict(k): + try: + return {''.join(i): 0 for i in product(ALPHABET, repeat=k)} + except TypeError: + print("TypeError: k must be an inter and larger than 0, alphabet must be a string.") + raise TypeError + except ValueError: + print("TypeError: k must be an inter and larger than 0") + raise ValueError + +def calculate_kmer_onehot(k): + nucleotides = [''.join(i) for i in product(ALPHABET_CUT, repeat=k)] + encoded = [] + for i in range(5 ** k): + encoded.append(np.zeros(5 ** k).tolist()) + encoded[i][i] = 1.0 + + return {nucleotides[i]: encoded[i] for i in range(len(nucleotides))} + +def calculate_kmer_list(sequence, k): + l = [] + for i in range(len(sequence) - k + 1): + l.append(sequence[i:i+k]) + return l + +def read_config(device, filename='config.json'): + """ + Reads the configuration file and validates the values. Returns the configuration. + """ + with open(filename) as f: + config = json.load(f) + + # ------------------------------------ check if data_dir exists ------------------------------------ + current_path = os.getcwd() + current_path = current_path.replace("/notebooks", "") # when running from notebook + + config['combination']['data_dir'] = current_path + '/datasets/' + config['combination']['data_dir'] + if not os.path.exists(config['combination']['data_dir']): + raise ValueError("Data directory does not exist:", config['combination']['data_dir']) + + # --------------------------- check if model and mode combination is valid ------------------------- + model_label = config['combination']['model_label'] + mode = config['combination']['mode'] + if(model_label in combinations): + if(mode not in combinations[model_label]): + raise ValueError(model_label, 'does not support', mode, ', please choose one of', combinations[model_label]) + else: + raise ValueError('Model label:', model_label, 'not implemented in', combinations.keys()) + + # --------------------------- check if it's binary classification ---------------------------------- + loss = config['fixed_vals']['loss_function'] + output_size = config['fixed_vals']['output_size'] + if(loss != "cross_entropy" or output_size != 2): + raise ValueError( + 'Model is not binary classification, please set loss_function to cross_entropy and output_size to 2') + + # --------------------------- create the cross entropy pytorch object ------------------------------ + class_weights = torch.tensor(config['combination']['class_weights']).to(device) + config['fixed_vals']['loss_function'] = nn.CrossEntropyLoss(weight=class_weights) + + # --------------------------- create ray tune objects ---------------------------------------------- + config['hyperparameter_search_space']["hidden_size"] = tune.choice(config['hyperparameter_search_space']['hidden_size']) + config['hyperparameter_search_space']["lr"] = tune.choice(config['hyperparameter_search_space']['lr']) + config['hyperparameter_search_space']["batch_size"] = tune.choice(config['hyperparameter_search_space']['batch_size']) + config['hyperparameter_search_space']["dropout"] = tune.choice(config['hyperparameter_search_space']['dropout']) + + if config['combination']['model_label'] not in ['mlp', 'cnn']: + config['hyperparameter_search_space']["num_layers"] = tune.choice(config['hyperparameter_search_space']['num_layers']) + + return config + +# ---------------------------------------------------------------------------------------------------- +# ---------------------------------- Auxiliary function for models ----------------------------------- +# ---------------------------------------------------------------------------------------------------- + +def calc_maxpool_output(hidden_size, sequence_length): + conv1_padding = 0 + conv1_dilation = 1 + conv1_kernel_size = 12 + conv1_stride = 1 + + l_out = ((sequence_length + 2*conv1_padding - conv1_dilation*(conv1_kernel_size-1) - 1)/conv1_stride + 1) + maxpool_padding = 0 + maxpool_dilation = 1 + maxpool_stride = 5 + maxpool_kernel_size = 12 + max_pool_output = int((l_out+2*maxpool_padding-maxpool_dilation*(maxpool_kernel_size-1)-1)/maxpool_stride+1) + + max_pool_output *= hidden_size + + return max_pool_output + + +# ---------------------------------------------------------------------------------------------------- +# ------------------- The following functions were retrieved from repDNA package --------------------- +# ---------------------------------------------------------------------------------------------------- + +def ready_acc(k, phyche_index=None, all_property=False, extra_phyche_index=None): + """Public function for get sequence_list and phyche_value. + """ + if phyche_index is None: + phyche_index = [] + if extra_phyche_index is None: + extra_phyche_index = {} + phyche_value = generate_phyche_value(k, phyche_index, all_property, extra_phyche_index) + + return phyche_value + + +def generate_phyche_value(k, phyche_index=None, all_property=False, extra_phyche_index=None): + """Combine the user selected phyche_list, is_all_property and extra_phyche_index to a new standard phyche_value.""" + if phyche_index is None: + phyche_index = [] + if extra_phyche_index is None: + extra_phyche_index = {} + + diphyche_list = ['Base stacking', 'Protein induced deformability', 'B-DNA twist', 'Dinucleotide GC Content', + 'A-philicity', 'Propeller twist', 'Duplex stability:(freeenergy)', + 'Duplex tability(disruptenergy)', 'DNA denaturation', 'Bending stiffness', 'Protein DNA twist', + 'Stabilising energy of Z-DNA', 'Aida_BA_transition', 'Breslauer_dG', 'Breslauer_dH', + 'Breslauer_dS', 'Electron_interaction', 'Hartman_trans_free_energy', 'Helix-Coil_transition', + 'Ivanov_BA_transition', 'Lisser_BZ_transition', 'Polar_interaction', 'SantaLucia_dG', + 'SantaLucia_dH', 'SantaLucia_dS', 'Sarai_flexibility', 'Stability', 'Stacking_energy', + 'Sugimoto_dG', 'Sugimoto_dH', 'Sugimoto_dS', 'Watson-Crick_interaction', 'Twist', 'Tilt', + 'Roll', 'Shift', 'Slide', 'Rise'] + triphyche_list = ['Dnase I', 'Bendability (DNAse)', 'Bendability (consensus)', 'Trinucleotide GC Content', + 'Nucleosome positioning', 'Consensus_roll', 'Consensus-Rigid', 'Dnase I-Rigid', 'MW-Daltons', + 'MW-kg', 'Nucleosome', 'Nucleosome-Rigid'] + + # Set and check physicochemical properties. + if 2 == k: + if all_property is True: + phyche_index = diphyche_list + else: + for e in phyche_index: + if e not in diphyche_list: + raise ValueError(" ".join(["Sorry, the physicochemical properties", e, "is not exit."])) + elif 3 == k: + if all_property is True: + phyche_index = triphyche_list + else: + for e in phyche_index: + if e not in triphyche_list: + raise ValueError(" ".join(["Sorry, the physicochemical properties", e, "is not exit."])) + + return extend_phyche_index(get_phyche_index(k, phyche_index), extra_phyche_index) + + +def get_phyche_index(k, phyche_list): + """get phyche_value according phyche_list.""" + phyche_value = {} + if 0 == len(phyche_list): + for nucleotide in make_kmer_list(k): + phyche_value[nucleotide] = [] + return phyche_value + + nucleotide_phyche_value = get_phyche_factor_dic(k) + for nucleotide in make_kmer_list(k): + if nucleotide not in phyche_value: + phyche_value[nucleotide] = [] + for e in nucleotide_phyche_value[nucleotide]: + if e[0] in phyche_list: + phyche_value[nucleotide].append(e[1]) + + return phyche_value + + +def extend_phyche_index(original_index, extend_index): + """Extend {phyche:[value, ... ]}""" + if extend_index is None or len(extend_index) == 0: + return original_index + for key in list(original_index.keys()): + original_index[key].extend(extend_index[key]) + return original_index + + +def get_phyche_factor_dic(k): + """Get all {nucleotide: [(phyche, value), ...]} dict.""" + full_path = os.path.realpath(__file__) + if 2 == k: + file_path = "%s/data/mmc3.data" % os.path.dirname(full_path) + elif 3 == k: + file_path = "%s/data/mmc4.data" % os.path.dirname(full_path) + else: + sys.stderr.write("The k can just be 2 or 3.") + sys.exit(0) + + try: + with open(file_path, 'rb') as f: + phyche_factor_dic = pickle.load(f) + except: + with open(file_path, 'r') as f: + phyche_factor_dic = pickle.load(f) + + return phyche_factor_dic + + +def make_ac_vector(sequence_list, lag, phyche_value, k): + phyche_values = list(phyche_value.values()) + len_phyche_value = len(phyche_values[0]) + + vec_ac = [] + for sequence in sequence_list: + len_seq = len(sequence) + each_vec = [] + + for temp_lag in range(1, lag + 1): + for j in range(len_phyche_value): + + # Calculate average phyche_value for a nucleotide. + ave_phyche_value = 0.0 + for i in range(len_seq - temp_lag - k + 1): + nucleotide = sequence[i: i + k] + ave_phyche_value += float(phyche_value[nucleotide][j]) + ave_phyche_value /= len_seq + + # Calculate the vector. + temp_sum = 0.0 + for i in range(len_seq - temp_lag - k + 1): + nucleotide1 = sequence[i: i + k] + nucleotide2 = sequence[i + temp_lag: i + temp_lag + k] + temp_sum += (float(phyche_value[nucleotide1][j]) - ave_phyche_value) * ( + float(phyche_value[nucleotide2][j])) + + try: + val = round(temp_sum / (len_seq - temp_lag - k + 1), 3) + except ZeroDivisionError: + val = 0.0 + each_vec.append(val) + vec_ac.append(each_vec) + + return vec_ac + + +def make_cc_vector(sequence_list, lag, phyche_value, k): + phyche_values = list(phyche_value.values()) + len_phyche_value = len(phyche_values[0]) + + vec_cc = [] + for sequence in sequence_list: + len_seq = len(sequence) + each_vec = [] + + for temp_lag in range(1, lag + 1): + for i1 in range(len_phyche_value): + for i2 in range(len_phyche_value): + if i1 != i2: + # Calculate average phyche_value for a nucleotide. + ave_phyche_value1 = 0.0 + ave_phyche_value2 = 0.0 + for j in range(len_seq - temp_lag - k + 1): + nucleotide = sequence[j: j + k] + ave_phyche_value1 += float(phyche_value[nucleotide][i1]) + ave_phyche_value2 += float(phyche_value[nucleotide][i2]) + ave_phyche_value1 /= len_seq + ave_phyche_value2 /= len_seq + + # Calculate the vector. + temp_sum = 0.0 + for j in range(len_seq - temp_lag - k + 1): + nucleotide1 = sequence[j: j + k] + nucleotide2 = sequence[j + temp_lag: j + temp_lag + k] + temp_sum += (float(phyche_value[nucleotide1][i1]) - ave_phyche_value1) * \ + (float(phyche_value[nucleotide2][i2]) - ave_phyche_value2) + try: + val = round(temp_sum / (len_seq - temp_lag - k + 1), 3) + except ZeroDivisionError: + val = 0.0 + each_vec.append(val) + + vec_cc.append(each_vec) + + return vec_cc diff --git a/src/propythia/adjuv_functions/features_functions/binary.py b/src/propythia/adjuv_functions/features_functions/binary.py index 1d09dd2..be51b0a 100644 --- a/src/propythia/adjuv_functions/features_functions/binary.py +++ b/src/propythia/adjuv_functions/features_functions/binary.py @@ -20,7 +20,7 @@ import os import numpy as np import getopt -from keras.utils import to_categorical +from tensorflow.keras.utils import to_categorical def bin_aa_ct(seq, alphabet = "ARNDCEQGHILKMFPSTWYV"): diff --git a/src/propythia/adjuv_functions/ml_deep/parameters_deep.py b/src/propythia/adjuv_functions/ml_deep/parameters_deep.py index 1536121..3fd7cd1 100644 --- a/src/propythia/adjuv_functions/ml_deep/parameters_deep.py +++ b/src/propythia/adjuv_functions/ml_deep/parameters_deep.py @@ -15,6 +15,7 @@ ############################################################################## """ import keras +import tensorflow as tf # https://blog.usejournal.com/a-comparison-of-grid-search-and-randomized-search-using-scikit-learn-29823179bc85 # https://blog.usejournal.com/a-comparison-of-grid-search-and-randomized-search-using-scikit-learn-29823179bc85 @@ -46,14 +47,15 @@ # OPTIMIZERS -lr_schedule = keras.optimizers.schedules.ExponentialDecay(initial_learning_rate=1e-2, decay_steps=10000, decay_rate=0.9) -opt1 = keras.optimizers.SGD(learning_rate=lr_schedule) -opt2 = keras.optimizers.SGD(learning_rate=0.001) -opt3 = keras.optimizers.Adam(learning_rate=lr_schedule) -opt4 = keras.optimizers.Adam(learning_rate=0.001) -opt5 = keras.optimizers.Adam(learning_rate=0.01) -opt6 = keras.optimizers.RMSprop(learning_rate=lr_schedule) -opt7 = keras.optimizers.RMSprop(learning_rate=0.001) +lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay( + initial_learning_rate=1e-2, decay_steps=10000, decay_rate=0.9) +opt1 = tf.keras.optimizers.SGD(learning_rate=lr_schedule) +opt2 = tf.keras.optimizers.SGD(learning_rate=0.001) +opt3 = tf.keras.optimizers.Adam(learning_rate=lr_schedule) +opt4 = tf.keras.optimizers.Adam(learning_rate=0.001) +opt5 = tf.keras.optimizers.Adam(learning_rate=0.01) +opt6 = tf.keras.optimizers.RMSprop(learning_rate=lr_schedule) +opt7 = tf.keras.optimizers.RMSprop(learning_rate=0.001) def param_deep(): @@ -77,7 +79,7 @@ def param_deep(): 'l2': [0, 1e-3, 1e-4, 1e-5], # 'batch_size': [128, 256, 512,1024], }], - }, + }, 'run_dnn_embedding': {'param_grid': [{ @@ -99,7 +101,7 @@ def param_deep(): 'l2': [0, 1e-3, 1e-4, 1e-5], # 'batch_size': [128, 256, 512,1024], }], - }, + }, 'run_lstm_simple': {'param_grid': [{ @@ -125,7 +127,7 @@ def param_deep(): 'dropout_rate_dense': [(0.0,), (0.1,), (0.2,), (0.25,), (0.3,), (0.35,), (0.4,), (0.5,)], # 'batch_size': [256, 512,1024], }], - }, + }, 'run_lstm_embedding': {'param_grid': @@ -154,7 +156,7 @@ def param_deep(): 'dropout_rate_dense': [(0.0,), (0.1,), (0.2,), (0.25,), (0.3,), (0.35,), (0.4,), (0.5,)], # 'batch_size': [256, 512,1024], }], - }, + }, 'run_cnn_1D': {'param_grid': [{ @@ -179,7 +181,7 @@ def param_deep(): 'l1': [0, 1e-4, 1e-5], 'l2': [0, 1e-4, 1e-5], }], - }, + }, 'run_cnn_2D': {'param_grid': @@ -207,7 +209,7 @@ def param_deep(): 'l1': [0, 1e-4, 1e-5], 'l2': [0, 1e-4, 1e-5], }], - }, + }, 'run_cnn_lstm': {'param_grid': [{ @@ -238,6 +240,6 @@ def param_deep(): 'dense_layers': [(32,), (64,), (64, 32)], 'dropout_rate_dense': [(0.0,), (0.1,), (0.2,), (0.3,), (0.4,), (0.5,)] }], - }, + }, } return param diff --git a/src/propythia/manifold.py b/src/propythia/manifold.py index 1363401..ef0cb9a 100644 --- a/src/propythia/manifold.py +++ b/src/propythia/manifold.py @@ -21,7 +21,7 @@ import seaborn as sns import matplotlib.pyplot as plt import umap -import umap.plot +#import umap.plot from sklearn.manifold import TSNE from propythia.adjuv_functions.ml_deep.utils import timer sns.set()