tommymarto commited on
Commit
ad2eff7
·
1 Parent(s): 095ea16

changed config

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -18,7 +18,7 @@
18
  "intermediate_size": 3072,
19
  "layer_norm_eps": 1e-12,
20
  "max_position_embeddings": 512,
21
- "model_type": "mcqbert",
22
  "num_attention_heads": 12,
23
  "num_hidden_layers": 12,
24
  "pad_token_id": 0,
 
18
  "intermediate_size": 3072,
19
  "layer_norm_eps": 1e-12,
20
  "max_position_embeddings": 512,
21
+ "model_type": "bert",
22
  "num_attention_heads": 12,
23
  "num_hidden_layers": 12,
24
  "pad_token_id": 0,