TransQuest
commited on
Commit
•
c566f10
1
Parent(s):
306b748
from Google Colab
Browse files- config.json +6 -33
- model_args.json +1 -1
- modules.json +4 -4
- pytorch_model.bin +2 -2
- sentence_bert_config.json +2 -1
- similarity_evaluation_eval_results.csv +19 -0
- special_tokens_map.json +1 -1
- tokenizer.json +0 -0
- tokenizer_config.json +1 -1
config.json
CHANGED
@@ -1,55 +1,28 @@
|
|
1 |
{
|
2 |
-
"
|
3 |
"architectures": [
|
4 |
"XLMRobertaModel"
|
5 |
],
|
6 |
"attention_probs_dropout_prob": 0.1,
|
7 |
-
"bad_words_ids": null,
|
8 |
"bos_token_id": 0,
|
9 |
-
"decoder_start_token_id": null,
|
10 |
-
"do_sample": false,
|
11 |
-
"early_stopping": false,
|
12 |
"eos_token_id": 2,
|
13 |
-
"
|
14 |
"hidden_act": "gelu",
|
15 |
"hidden_dropout_prob": 0.1,
|
16 |
"hidden_size": 1024,
|
17 |
-
"id2label": {
|
18 |
-
"0": "LABEL_0",
|
19 |
-
"1": "LABEL_1"
|
20 |
-
},
|
21 |
"initializer_range": 0.02,
|
22 |
"intermediate_size": 4096,
|
23 |
-
"is_decoder": false,
|
24 |
-
"is_encoder_decoder": false,
|
25 |
-
"label2id": {
|
26 |
-
"LABEL_0": 0,
|
27 |
-
"LABEL_1": 1
|
28 |
-
},
|
29 |
"layer_norm_eps": 1e-05,
|
30 |
-
"length_penalty": 1.0,
|
31 |
-
"max_length": 20,
|
32 |
"max_position_embeddings": 514,
|
33 |
-
"min_length": 0,
|
34 |
"model_type": "xlm-roberta",
|
35 |
-
"no_repeat_ngram_size": 0,
|
36 |
"num_attention_heads": 16,
|
37 |
-
"num_beams": 1,
|
38 |
"num_hidden_layers": 24,
|
39 |
-
"num_return_sequences": 1,
|
40 |
-
"output_attentions": false,
|
41 |
-
"output_hidden_states": false,
|
42 |
"output_past": true,
|
43 |
"pad_token_id": 1,
|
44 |
-
"
|
45 |
-
"
|
46 |
-
"
|
47 |
-
"task_specific_params": null,
|
48 |
-
"temperature": 1.0,
|
49 |
-
"top_k": 50,
|
50 |
-
"top_p": 1.0,
|
51 |
-
"torchscript": false,
|
52 |
"type_vocab_size": 1,
|
53 |
-
"
|
54 |
"vocab_size": 250002
|
55 |
}
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "xlm-roberta-large",
|
3 |
"architectures": [
|
4 |
"XLMRobertaModel"
|
5 |
],
|
6 |
"attention_probs_dropout_prob": 0.1,
|
|
|
7 |
"bos_token_id": 0,
|
|
|
|
|
|
|
8 |
"eos_token_id": 2,
|
9 |
+
"gradient_checkpointing": false,
|
10 |
"hidden_act": "gelu",
|
11 |
"hidden_dropout_prob": 0.1,
|
12 |
"hidden_size": 1024,
|
|
|
|
|
|
|
|
|
13 |
"initializer_range": 0.02,
|
14 |
"intermediate_size": 4096,
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
"layer_norm_eps": 1e-05,
|
|
|
|
|
16 |
"max_position_embeddings": 514,
|
|
|
17 |
"model_type": "xlm-roberta",
|
|
|
18 |
"num_attention_heads": 16,
|
|
|
19 |
"num_hidden_layers": 24,
|
|
|
|
|
|
|
20 |
"output_past": true,
|
21 |
"pad_token_id": 1,
|
22 |
+
"position_embedding_type": "absolute",
|
23 |
+
"torch_dtype": "float32",
|
24 |
+
"transformers_version": "4.9.0",
|
|
|
|
|
|
|
|
|
|
|
25 |
"type_vocab_size": 1,
|
26 |
+
"use_cache": true,
|
27 |
"vocab_size": 250002
|
28 |
}
|
model_args.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"adam_epsilon": 1e-08, "best_model_dir": "temp/outputs/best_model", "cache_dir": "temp/cache_dir/", "config": {}, "cosine_schedule_num_cycles": 0.5, "custom_layer_parameters": [], "custom_parameter_groups": [], "dataloader_num_workers": 0, "do_lower_case": false, "dynamic_quantize": false, "early_stopping_consider_epochs": false, "early_stopping_delta": 0, "early_stopping_metric": "eval_loss", "early_stopping_metric_minimize": true, "early_stopping_patience": 10, "encoding": null, "adafactor_eps": [1e-30, 0.001], "adafactor_clip_threshold": 1.0, "adafactor_decay_rate": -0.8, "adafactor_beta1": null, "adafactor_scale_parameter": true, "adafactor_relative_step": true, "adafactor_warmup_init": true, "eval_batch_size": 8, "evaluate_during_training": true, "evaluate_during_training_silent": true, "evaluate_during_training_steps":
|
|
|
1 |
+
{"adam_epsilon": 1e-08, "best_model_dir": "temp/outputs/best_model", "cache_dir": "temp/cache_dir/", "config": {}, "cosine_schedule_num_cycles": 0.5, "custom_layer_parameters": [], "custom_parameter_groups": [], "dataloader_num_workers": 0, "do_lower_case": false, "dynamic_quantize": false, "early_stopping_consider_epochs": false, "early_stopping_delta": 0, "early_stopping_metric": "eval_loss", "early_stopping_metric_minimize": true, "early_stopping_patience": 10, "encoding": null, "adafactor_eps": [1e-30, 0.001], "adafactor_clip_threshold": 1.0, "adafactor_decay_rate": -0.8, "adafactor_beta1": null, "adafactor_scale_parameter": true, "adafactor_relative_step": true, "adafactor_warmup_init": true, "eval_batch_size": 8, "evaluate_during_training": true, "evaluate_during_training_silent": true, "evaluate_during_training_steps": 300, "evaluate_during_training_verbose": true, "evaluate_each_epoch": true, "fp16": false, "gradient_accumulation_steps": 1, "learning_rate": 1e-05, "local_rank": -1, "logging_steps": 300, "manual_seed": 777, "max_grad_norm": 1.0, "max_seq_length": 80, "model_name": null, "model_type": null, "multiprocessing_chunksize": 500, "n_gpu": 1, "no_cache": false, "no_save": false, "not_saved_args": [], "num_train_epochs": 6, "optimizer": "AdamW", "output_dir": "temp/outputs/", "overwrite_output_dir": true, "process_count": 1, "polynomial_decay_schedule_lr_end": 1e-07, "polynomial_decay_schedule_power": 1.0, "quantized_model": false, "reprocess_input_data": true, "save_best_model": true, "save_eval_checkpoints": true, "save_model_every_epoch": true, "save_optimizer_and_scheduler": true, "save_recent_only": true, "save_steps": 300, "scheduler": "linear_schedule_with_warmup", "silent": false, "skip_special_tokens": true, "tensorboard_dir": null, "thread_count": null, "train_batch_size": 8, "train_custom_parameters_only": false, "use_cached_eval_features": false, "use_early_stopping": true, "use_multiprocessing": true, "wandb_kwargs": {}, "wandb_project": null, "warmup_ratio": 0.06, "warmup_steps": 0, "weight_decay": 0, "model_class": "SiameseTransQuestModel", "labels_list": [], "labels_map": {}, "lazy_delimiter": "\t", "lazy_labels_column": 1, "lazy_loading": false, "lazy_loading_start_line": 1, "lazy_text_a_column": null, "lazy_text_b_column": null, "lazy_text_column": 0, "onnx": false, "regression": true, "sliding_window": false, "special_tokens_list": [], "stride": 0.8, "tie_value": 1}
|
modules.json
CHANGED
@@ -2,13 +2,13 @@
|
|
2 |
{
|
3 |
"idx": 0,
|
4 |
"name": "0",
|
5 |
-
"path": "
|
6 |
-
"type": "transquest.algo.
|
7 |
},
|
8 |
{
|
9 |
"idx": 1,
|
10 |
"name": "1",
|
11 |
-
"path": "
|
12 |
-
"type": "transquest.algo.
|
13 |
}
|
14 |
]
|
|
|
2 |
{
|
3 |
"idx": 0,
|
4 |
"name": "0",
|
5 |
+
"path": "best_model",
|
6 |
+
"type": "transquest.algo.sentence_level.siamesetransquest.models"
|
7 |
},
|
8 |
{
|
9 |
"idx": 1,
|
10 |
"name": "1",
|
11 |
+
"path": "best_model",
|
12 |
+
"type": "transquest.algo.sentence_level.siamesetransquest.models"
|
13 |
}
|
14 |
]
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6625d45e72ee0c29df0ad020506c10e306fe141b8373b130d01308164e2c53c7
|
3 |
+
size 2239718641
|
sentence_bert_config.json
CHANGED
@@ -1,3 +1,4 @@
|
|
1 |
{
|
2 |
-
"max_seq_length": 80
|
|
|
3 |
}
|
|
|
1 |
{
|
2 |
+
"max_seq_length": 80,
|
3 |
+
"do_lower_case": false
|
4 |
}
|
similarity_evaluation_eval_results.csv
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
|
2 |
+
0,300,0.43421419544814066,0.46018138167719835,0.36914309423692915,0.41453583919152487,0.3896244846211359,0.43621053468289556,0.399597016611896,0.4600784892921375
|
3 |
+
0,600,0.10307986928806603,0.1197191734099612,0.10722534580586883,0.11920397783154818,0.111918761893674,0.1205238226483862,0.10430823185133477,0.12140337586383106
|
4 |
+
0,-1,0.02066399227946609,0.019097977690581868,0.022461476638035858,0.021827284793459356,0.022681537321538367,0.024311063586796775,0.0018542140308502777,-0.0017073278760137683
|
5 |
+
1,300,-0.03497103572348365,-0.030087564246978907,-0.04126441323163809,-0.0300928739344057,-0.04609513305763243,-0.03442549699191442,-0.07798094022187575,-0.07618454322289714
|
6 |
+
1,600,-0.06312999778548155,-0.06432206711615747,-0.06935082292823,-0.06438432394455854,-0.07110764929926948,-0.0689239212795308,-0.04750192473402897,0.0008787589168289175
|
7 |
+
1,-1,-0.02861630346574171,-0.03885946332477979,-0.041127595411819554,-0.03868554260882171,-0.042197039662517456,-0.04061144245888101,-0.018656052759563377,-0.03178433613679866
|
8 |
+
2,300,-0.06770769691868293,-0.055792996703460024,-0.073782071985126,-0.05591576636704955,-0.07386289154650982,-0.05711580434731939,-0.04973270557817783,-0.02942651732688811
|
9 |
+
2,600,-0.08521356565010355,-0.07745342698864133,-0.08419595940761458,-0.07744287548426869,-0.08382204705269362,-0.07686976035136324,-0.09048291133813367,-0.08188981063810209
|
10 |
+
2,-1,-0.08739445578593986,-0.0636199628958587,-0.07455497903944068,-0.06349626925017123,-0.07437910330010956,-0.0636139430132288,0.0017343942509367746,-0.005741432641271282
|
11 |
+
3,300,0.020404747293811,0.016635297161445572,0.012667614637663714,0.016717825978016417,0.012451967850733797,0.016636047348271486,0.004808503689716194,-0.04528330868689847
|
12 |
+
3,600,0.06688960735114968,0.06798177636827125,0.0676546569886275,0.06784654249750083,0.06684153532568683,0.0655933706936666,0.031066575253979177,0.03199657140030514
|
13 |
+
3,-1,0.0716379216707581,0.06843196504447571,0.06928430943460527,0.06758588295486358,0.07000934426009944,0.06989588871977616,0.09533347465672247,0.10862016850219093
|
14 |
+
4,300,0.042224896400989975,0.05230842540284324,0.04339008344638997,0.05439667802786683,0.04569438838676287,0.05427139491637311,0.05102803396475966,0.050398635000925215
|
15 |
+
4,600,0.06344181652513446,0.08505596077007163,0.07186093733964342,0.08452730132761789,0.07190029483997941,0.08538661542549807,0.09064125844053054,0.08451141759445105
|
16 |
+
4,-1,0.03825772706604078,0.05735805147983748,0.04764552031304248,0.057913771353324595,0.04729756230025421,0.05794041281922866,0.07065644294885462,0.07609442312574838
|
17 |
+
5,300,0.041289881463694764,0.058309096580627266,0.0450898835651977,0.06066337005203183,0.044994214741551006,0.060958289688550805,0.08798193987564669,0.08694820455745349
|
18 |
+
5,600,0.04117239405994507,0.06471909592953569,0.04644739603289748,0.0638774888604453,0.04627269777043179,0.06364747662461297,0.11826482114821807,0.11584741502542488
|
19 |
+
5,-1,0.041675565024417036,0.0613703583093194,0.046229896587801494,0.062086633079184016,0.04600136392735565,0.06178136701123135,0.11855747305381929,0.11566922574112626
|
special_tokens_map.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": "<mask>"}
|
|
|
1 |
+
{"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"
|
|
|
1 |
+
{"bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "xlm-roberta-large", "tokenizer_class": "XLMRobertaTokenizer"}
|