Training in progress, step 500
Browse files
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "Alfahluzi/bert2bert-extreme
|
3 |
"architectures": [
|
4 |
"EncoderDecoderModel"
|
5 |
],
|
@@ -173,5 +173,5 @@
|
|
173 |
"model_type": "encoder-decoder",
|
174 |
"pad_token_id": 0,
|
175 |
"torch_dtype": "float32",
|
176 |
-
"transformers_version": "4.
|
177 |
}
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "Alfahluzi/bert2bert-extreme",
|
3 |
"architectures": [
|
4 |
"EncoderDecoderModel"
|
5 |
],
|
|
|
173 |
"model_type": "encoder-decoder",
|
174 |
"pad_token_id": 0,
|
175 |
"torch_dtype": "float32",
|
176 |
+
"transformers_version": "4.39.0"
|
177 |
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1002850732
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2a74c3caa7533270e342933a4b17c19dddd859795fbdbea7017fb792c59f3171
|
3 |
size 1002850732
|
runs/Mar22_06-17-18_a2bb6f6474da/events.out.tfevents.1711088243.a2bb6f6474da.395.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a656eaa9fcbec21abc6d671846cc51a7873e6041c849c6a4f32683b945fd3174
|
3 |
+
size 9127
|
tokenizer_config.json
CHANGED
@@ -46,12 +46,19 @@
|
|
46 |
"do_basic_tokenize": true,
|
47 |
"do_lower_case": true,
|
48 |
"mask_token": "[MASK]",
|
|
|
49 |
"model_max_length": 1000000000000000019884624838656,
|
50 |
"never_split": null,
|
|
|
51 |
"pad_token": "[PAD]",
|
|
|
|
|
52 |
"sep_token": "[SEP]",
|
|
|
53 |
"strip_accents": null,
|
54 |
"tokenize_chinese_chars": true,
|
55 |
"tokenizer_class": "BertTokenizer",
|
|
|
|
|
56 |
"unk_token": "[UNK]"
|
57 |
}
|
|
|
46 |
"do_basic_tokenize": true,
|
47 |
"do_lower_case": true,
|
48 |
"mask_token": "[MASK]",
|
49 |
+
"max_length": 512,
|
50 |
"model_max_length": 1000000000000000019884624838656,
|
51 |
"never_split": null,
|
52 |
+
"pad_to_multiple_of": null,
|
53 |
"pad_token": "[PAD]",
|
54 |
+
"pad_token_type_id": 0,
|
55 |
+
"padding_side": "right",
|
56 |
"sep_token": "[SEP]",
|
57 |
+
"stride": 0,
|
58 |
"strip_accents": null,
|
59 |
"tokenize_chinese_chars": true,
|
60 |
"tokenizer_class": "BertTokenizer",
|
61 |
+
"truncation_side": "right",
|
62 |
+
"truncation_strategy": "longest_first",
|
63 |
"unk_token": "[UNK]"
|
64 |
}
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8c9603d93423284fb32e8c70867779eb602d91c60b2c3152b61ee970543bc601
|
3 |
+
size 5112
|