Upload model
#2
by
cmama002
- opened
- config.json +6 -2
- generation_config.json +9 -0
- pytorch_model.bin +2 -2
config.json
CHANGED
@@ -13,6 +13,7 @@
|
|
13 |
"architectures": null,
|
14 |
"attention_dropout": 0.0,
|
15 |
"bad_words_ids": null,
|
|
|
16 |
"bos_token_id": 0,
|
17 |
"chunk_size_feed_forward": 0,
|
18 |
"classifier_dropout": 0.0,
|
@@ -71,6 +72,7 @@
|
|
71 |
"return_dict_in_generate": false,
|
72 |
"scale_embedding": true,
|
73 |
"sep_token_id": null,
|
|
|
74 |
"task_specific_params": null,
|
75 |
"temperature": 1.0,
|
76 |
"tf_legacy_loss": false,
|
@@ -81,7 +83,7 @@
|
|
81 |
"top_p": 1.0,
|
82 |
"torch_dtype": null,
|
83 |
"torchscript": false,
|
84 |
-
"transformers_version": "4.
|
85 |
"typical_p": 1.0,
|
86 |
"use_bfloat16": false,
|
87 |
"use_cache": true,
|
@@ -93,6 +95,7 @@
|
|
93 |
"architectures": null,
|
94 |
"attention_probs_dropout_prob": 0.0,
|
95 |
"bad_words_ids": null,
|
|
|
96 |
"bos_token_id": null,
|
97 |
"chunk_size_feed_forward": 0,
|
98 |
"cross_attention_hidden_size": null,
|
@@ -165,6 +168,7 @@
|
|
165 |
"return_dict": true,
|
166 |
"return_dict_in_generate": false,
|
167 |
"sep_token_id": null,
|
|
|
168 |
"task_specific_params": null,
|
169 |
"temperature": 1.0,
|
170 |
"tf_legacy_loss": false,
|
@@ -175,7 +179,7 @@
|
|
175 |
"top_p": 1.0,
|
176 |
"torch_dtype": null,
|
177 |
"torchscript": false,
|
178 |
-
"transformers_version": "4.
|
179 |
"typical_p": 1.0,
|
180 |
"use_absolute_embeddings": false,
|
181 |
"use_bfloat16": false,
|
|
|
13 |
"architectures": null,
|
14 |
"attention_dropout": 0.0,
|
15 |
"bad_words_ids": null,
|
16 |
+
"begin_suppress_tokens": null,
|
17 |
"bos_token_id": 0,
|
18 |
"chunk_size_feed_forward": 0,
|
19 |
"classifier_dropout": 0.0,
|
|
|
72 |
"return_dict_in_generate": false,
|
73 |
"scale_embedding": true,
|
74 |
"sep_token_id": null,
|
75 |
+
"suppress_tokens": null,
|
76 |
"task_specific_params": null,
|
77 |
"temperature": 1.0,
|
78 |
"tf_legacy_loss": false,
|
|
|
83 |
"top_p": 1.0,
|
84 |
"torch_dtype": null,
|
85 |
"torchscript": false,
|
86 |
+
"transformers_version": "4.27.0.dev0",
|
87 |
"typical_p": 1.0,
|
88 |
"use_bfloat16": false,
|
89 |
"use_cache": true,
|
|
|
95 |
"architectures": null,
|
96 |
"attention_probs_dropout_prob": 0.0,
|
97 |
"bad_words_ids": null,
|
98 |
+
"begin_suppress_tokens": null,
|
99 |
"bos_token_id": null,
|
100 |
"chunk_size_feed_forward": 0,
|
101 |
"cross_attention_hidden_size": null,
|
|
|
168 |
"return_dict": true,
|
169 |
"return_dict_in_generate": false,
|
170 |
"sep_token_id": null,
|
171 |
+
"suppress_tokens": null,
|
172 |
"task_specific_params": null,
|
173 |
"temperature": 1.0,
|
174 |
"tf_legacy_loss": false,
|
|
|
179 |
"top_p": 1.0,
|
180 |
"torch_dtype": null,
|
181 |
"torchscript": false,
|
182 |
+
"transformers_version": "4.27.0.dev0",
|
183 |
"typical_p": 1.0,
|
184 |
"use_absolute_embeddings": false,
|
185 |
"use_bfloat16": false,
|
generation_config.json
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 0,
|
4 |
+
"eos_token_id": 2,
|
5 |
+
"forced_eos_token_id": 2,
|
6 |
+
"max_length": 128,
|
7 |
+
"pad_token_id": 1,
|
8 |
+
"transformers_version": "4.27.0.dev0"
|
9 |
+
}
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1e0a873f56e41c22671d8d5e4ed115eb52b90765475b933467c62bcb5fe258d3
|
3 |
+
size 809200857
|