From 65bb7b02b075a8c225e8663ee65d7ac38789e595 Mon Sep 17 00:00:00 2001 From: chcwww Date: Thu, 12 Feb 2026 05:04:55 +0000 Subject: [PATCH 1/5] start fixing the workflow --- .github/workflows/basic_test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/basic_test.yml b/.github/workflows/basic_test.yml index c786801f..2d756dcf 100644 --- a/.github/workflows/basic_test.yml +++ b/.github/workflows/basic_test.yml @@ -1,9 +1,9 @@ name: Basic Test on: - pull_request: + push: branches: - - master + - fix_wf jobs: test: From 5557a6a8cb0d1e496b5069fee72c26e0ea973740 Mon Sep 17 00:00:00 2001 From: chcwww Date: Thu, 12 Feb 2026 06:21:33 +0000 Subject: [PATCH 2/5] set weights_only to False --- libmultilabel/nn/attentionxml.py | 4 +++- torch_trainer.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/libmultilabel/nn/attentionxml.py b/libmultilabel/nn/attentionxml.py index 16c02b7c..a138a68e 100644 --- a/libmultilabel/nn/attentionxml.py +++ b/libmultilabel/nn/attentionxml.py @@ -287,7 +287,7 @@ def fit(self, datasets): logger.info(f"Finish training level 0") logger.info(f"Best model loaded from {best_model_path}") - model_0 = Model.load_from_checkpoint(best_model_path) + model_0 = Model.load_from_checkpoint(best_model_path, weights_only=False) logger.info( f"Predicting clusters by level-0 model. We then select {self.beam_width} clusters for each instance and " @@ -422,11 +422,13 @@ def test(self, dataset): model_0 = Model.load_from_checkpoint( self.get_best_model_path(level=0), save_k_predictions=self.beam_width, + weights_only=False, ) model_1 = PLTModel.load_from_checkpoint( self.get_best_model_path(level=1), save_k_predictions=self.save_k_predictions, metrics=self.metrics, + weights_only=False, ) word_dict_path = os.path.join(os.path.dirname(self.get_best_model_path(level=1)), self.WORD_DICT_NAME) diff --git a/torch_trainer.py b/torch_trainer.py index fba9f68c..8b90a706 100644 --- a/torch_trainer.py +++ b/torch_trainer.py @@ -150,7 +150,7 @@ def _setup_model( if checkpoint_path is not None: logging.info(f"Loading model from `{checkpoint_path}` with the previously saved hyper-parameter...") - self.model = Model.load_from_checkpoint(checkpoint_path, log_path=log_path) + self.model = Model.load_from_checkpoint(checkpoint_path, log_path=log_path, weights_only=False) word_dict_path = os.path.join(os.path.dirname(checkpoint_path), self.WORD_DICT_NAME) if os.path.exists(word_dict_path): with open(word_dict_path, "rb") as f: From ae94b8bc88770e41b418ed07a34265134c49f1b1 Mon Sep 17 00:00:00 2001 From: chcwww Date: Thu, 12 Feb 2026 07:15:50 +0000 Subject: [PATCH 3/5] remove torchscript --- libmultilabel/nn/networks/bert_attention.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libmultilabel/nn/networks/bert_attention.py b/libmultilabel/nn/networks/bert_attention.py index 078bc207..983dc80e 100644 --- a/libmultilabel/nn/networks/bert_attention.py +++ b/libmultilabel/nn/networks/bert_attention.py @@ -40,7 +40,7 @@ def __init__( self.lm = AutoModel.from_pretrained( lm_weight, - torchscript=True, + # torchscript=True, hidden_dropout_prob=encoder_hidden_dropout, attention_probs_dropout_prob=encoder_attention_dropout, ) From d831cdb75ed4c1d43b1a474ceea586c35f36464d Mon Sep 17 00:00:00 2001 From: chcwww Date: Thu, 12 Feb 2026 07:21:51 +0000 Subject: [PATCH 4/5] remove another torchscript --- libmultilabel/nn/networks/bert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libmultilabel/nn/networks/bert.py b/libmultilabel/nn/networks/bert.py index ab7cc221..a6120ba3 100644 --- a/libmultilabel/nn/networks/bert.py +++ b/libmultilabel/nn/networks/bert.py @@ -34,7 +34,7 @@ def __init__( hidden_dropout_prob=encoder_hidden_dropout, attention_probs_dropout_prob=encoder_attention_dropout, classifier_dropout=post_encoder_dropout, - torchscript=True, + # torchscript=True, ) def forward(self, input): From 2e9719ff164dd104edf6c2f1f677b6b13e3bac8c Mon Sep 17 00:00:00 2001 From: chcwww Date: Fri, 13 Feb 2026 11:31:23 +0000 Subject: [PATCH 5/5] remove "torchscript=True" since it is no longer supported by "AutoModel.from_pretrained()" --- .github/workflows/basic_test.yml | 4 ++-- libmultilabel/nn/networks/bert.py | 1 - libmultilabel/nn/networks/bert_attention.py | 1 - 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/basic_test.yml b/.github/workflows/basic_test.yml index 2d756dcf..c786801f 100644 --- a/.github/workflows/basic_test.yml +++ b/.github/workflows/basic_test.yml @@ -1,9 +1,9 @@ name: Basic Test on: - push: + pull_request: branches: - - fix_wf + - master jobs: test: diff --git a/libmultilabel/nn/networks/bert.py b/libmultilabel/nn/networks/bert.py index a6120ba3..8d88ebbb 100644 --- a/libmultilabel/nn/networks/bert.py +++ b/libmultilabel/nn/networks/bert.py @@ -34,7 +34,6 @@ def __init__( hidden_dropout_prob=encoder_hidden_dropout, attention_probs_dropout_prob=encoder_attention_dropout, classifier_dropout=post_encoder_dropout, - # torchscript=True, ) def forward(self, input): diff --git a/libmultilabel/nn/networks/bert_attention.py b/libmultilabel/nn/networks/bert_attention.py index 983dc80e..c820fee8 100644 --- a/libmultilabel/nn/networks/bert_attention.py +++ b/libmultilabel/nn/networks/bert_attention.py @@ -40,7 +40,6 @@ def __init__( self.lm = AutoModel.from_pretrained( lm_weight, - # torchscript=True, hidden_dropout_prob=encoder_hidden_dropout, attention_probs_dropout_prob=encoder_attention_dropout, )