pip install transformers==4.5.1
export task=QYTM
export model_name=RE2
CUDA_VISIBLE_DEVICES=1 python src/DL_model/${model_name}/main.py \
--task ${task} \
--model_name ${model_name} \
--output_dir ./output/${task}_${model_name}_20211007 \
--tensorboardx_path ./output/logs/runs/${task}_${model_name}_20211007 \
--train_path ./datasets/${task}/train.csv \
--dev_path ./datasets/${task}/dev.csv \
--test_path ./datasets/${task}/test.csv \
--label_file_level_dir ./datasets/${task}/labels_level.txt \
--label2freq_level_dir ./datasets/${task}/label2freq_level.json \
--vocab_file ./resources/word2vec/vocab.txt \
--w2v_file ./resources/word2vec/token_vec_300.bin \
--train_batch_size 256 \
--eval_batch_siz 256 \
--do_train \
--do_eval \
--do_predict \
--num_train_epochs 50
| model |
LCQMC(f1-score) |
| DSSM |
0.7114 |
| ABCNN |
0.7743 |
| BIMPM |
0.8590 |
| DecomposableAttention |
0.7526 |
| ESIM |
0.8573 |
| RE2 |
0.8182 |
| SiaGRU |
0.8302 |
| Bert |
0.8817 |
| RoBerta |
0.8897 |
| XlNet |
0.7828 |
| ELECTRA |
1 |
| DistilBert |
?? |
| AlBert |
?? |
| NEZHA |
1 |