geetu040 commited on
Commit
3bde277
·
verified ·
1 Parent(s): d2520d4

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -20,7 +20,7 @@
20
  "layernorm_mlp_alpha": 1,
21
  "layernorm_mlp_beta": 1,
22
  "max_position_embeddings": 131072,
23
- "model_type": "minimax_text_01",
24
  "num_attention_heads": 32,
25
  "num_experts_per_tok": 2,
26
  "num_hidden_layers": 2,
 
20
  "layernorm_mlp_alpha": 1,
21
  "layernorm_mlp_beta": 1,
22
  "max_position_embeddings": 131072,
23
+ "model_type": "minimax",
24
  "num_attention_heads": 32,
25
  "num_experts_per_tok": 2,
26
  "num_hidden_layers": 2,