Update config.json
Browse files- config.json +3 -5
config.json
CHANGED
@@ -4,9 +4,6 @@
|
|
4 |
"activation_function": "gelu",
|
5 |
"add_bias_logits": false,
|
6 |
"add_final_layer_norm": true,
|
7 |
-
"architectures": [
|
8 |
-
"BartForConditionalGeneration"
|
9 |
-
],
|
10 |
"attention_dropout": 0.0,
|
11 |
"bos_token_id": 0,
|
12 |
"classif_dropout": 0.0,
|
@@ -21,6 +18,7 @@
|
|
21 |
"encoder_layerdrop": 0.0,
|
22 |
"encoder_layers": 12,
|
23 |
"eos_token_id": 2,
|
|
|
24 |
"id2label": {
|
25 |
"0": "LABEL_0",
|
26 |
"1": "LABEL_1",
|
@@ -33,13 +31,13 @@
|
|
33 |
"LABEL_1": 1,
|
34 |
"LABEL_2": 2
|
35 |
},
|
|
|
|
|
36 |
"max_position_embeddings": 1024,
|
37 |
"model_type": "mbart",
|
38 |
"normalize_before": true,
|
39 |
-
"variant": "prelayernorm",
|
40 |
"normalize_embedding": true,
|
41 |
"num_beams": 5,
|
42 |
-
"max_length": 1000,
|
43 |
"num_hidden_layers": 12,
|
44 |
"output_past": true,
|
45 |
"pad_token_id": 1,
|
|
|
4 |
"activation_function": "gelu",
|
5 |
"add_bias_logits": false,
|
6 |
"add_final_layer_norm": true,
|
|
|
|
|
|
|
7 |
"attention_dropout": 0.0,
|
8 |
"bos_token_id": 0,
|
9 |
"classif_dropout": 0.0,
|
|
|
18 |
"encoder_layerdrop": 0.0,
|
19 |
"encoder_layers": 12,
|
20 |
"eos_token_id": 2,
|
21 |
+
"extra_pos_embeddings": 2,
|
22 |
"id2label": {
|
23 |
"0": "LABEL_0",
|
24 |
"1": "LABEL_1",
|
|
|
31 |
"LABEL_1": 1,
|
32 |
"LABEL_2": 2
|
33 |
},
|
34 |
+
"max_length": 1000,
|
35 |
+
"decoder_start_token_id": 250020,
|
36 |
"max_position_embeddings": 1024,
|
37 |
"model_type": "mbart",
|
38 |
"normalize_before": true,
|
|
|
39 |
"normalize_embedding": true,
|
40 |
"num_beams": 5,
|
|
|
41 |
"num_hidden_layers": 12,
|
42 |
"output_past": true,
|
43 |
"pad_token_id": 1,
|