phoebeklett commited on
Commit
258c898
·
verified ·
1 Parent(s): 3ae9dd7

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +19 -3
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "mosaicml/mpt-30b-chat",
3
  "architectures": [
4
  "ExtendedMptForCausalLM"
5
  ],
@@ -7,8 +7,8 @@
7
  "model_type": ""
8
  },
9
  "auto_map": {
10
- "AutoConfig": "mosaicml/mpt-30b-chat--configuration_mpt.MPTConfig",
11
- "AutoModelForCausalLM": "mosaicml/mpt-30b-chat--modeling_mpt.MPTForCausalLM"
12
  },
13
  "d_model": 7168,
14
  "emb_pdrop": 0,
@@ -75,6 +75,22 @@
75
  true,
76
  true,
77
  true,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
78
  true
79
  ],
80
  "verbose": 0,
 
1
  {
2
+ "_name_or_path": "normalcomputing/extended-mind-mpt-30b-chat",
3
  "architectures": [
4
  "ExtendedMptForCausalLM"
5
  ],
 
7
  "model_type": ""
8
  },
9
  "auto_map": {
10
+ "AutoConfig": "configuration.MptConfig",
11
+ "AutoModelForCausalLM": "modeling.MptForCausalLM"
12
  },
13
  "d_model": 7168,
14
  "emb_pdrop": 0,
 
75
  true,
76
  true,
77
  true,
78
+ true,
79
+ true,
80
+ true,
81
+ true,
82
+ true,
83
+ true,
84
+ true,
85
+ true,
86
+ true,
87
+ true,
88
+ true,
89
+ true,
90
+ true,
91
+ true,
92
+ true,
93
+ true,
94
  true
95
  ],
96
  "verbose": 0,