Ontocord.AI
commited on
Commit
·
2631957
1
Parent(s):
96cd4f9
Upload config.json with huggingface_hub
Browse files- config.json +3 -4
config.json
CHANGED
@@ -1,5 +1,4 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "TehVenom/MPT-7b-WizardLM_Uncensored-Storywriter-Merge",
|
3 |
"architectures": [
|
4 |
"MPTForCausalLM"
|
5 |
],
|
@@ -16,8 +15,8 @@
|
|
16 |
"softmax_scale": null
|
17 |
},
|
18 |
"auto_map": {
|
19 |
-
"AutoConfig": "
|
20 |
-
"AutoModelForCausalLM": "
|
21 |
},
|
22 |
"d_model": 4096,
|
23 |
"emb_pdrop": 0,
|
@@ -46,7 +45,7 @@
|
|
46 |
"resid_pdrop": 0,
|
47 |
"tokenizer_name": "EleutherAI/gpt-neox-20b",
|
48 |
"torch_dtype": "bfloat16",
|
49 |
-
"transformers_version": "4.
|
50 |
"use_cache": false,
|
51 |
"verbose": 0,
|
52 |
"vocab_size": 50432
|
|
|
1 |
{
|
|
|
2 |
"architectures": [
|
3 |
"MPTForCausalLM"
|
4 |
],
|
|
|
15 |
"softmax_scale": null
|
16 |
},
|
17 |
"auto_map": {
|
18 |
+
"AutoConfig": "configuration_mpt.MPTConfig",
|
19 |
+
"AutoModelForCausalLM": "modeling_mpt.MPTForCausalLM"
|
20 |
},
|
21 |
"d_model": 4096,
|
22 |
"emb_pdrop": 0,
|
|
|
45 |
"resid_pdrop": 0,
|
46 |
"tokenizer_name": "EleutherAI/gpt-neox-20b",
|
47 |
"torch_dtype": "bfloat16",
|
48 |
+
"transformers_version": "4.28.1",
|
49 |
"use_cache": false,
|
50 |
"verbose": 0,
|
51 |
"vocab_size": 50432
|