lhallee commited on
Commit
4000b5d
·
verified ·
1 Parent(s): 7651a83

Upload ESMplusplusForMaskedLM

Browse files
Files changed (1) hide show
  1. config.json +12 -19
config.json CHANGED
@@ -1,19 +1,12 @@
1
- {
2
- "architectures": [
3
- "ESMplusplusForMaskedLM"
4
- ],
5
- "auto_map": {
6
- "AutoConfig": "modeling_esm_plusplus.ESMplusplusConfig",
7
- "AutoModel": "modeling_esm_plusplus.ESMplusplusForMaskedLM",
8
- "AutoModelForMaskedLM": "modeling_esm_plusplus.ESMplusplusForMaskedLM",
9
- "AutoModelForSequenceClassification": "modeling_esm_plusplus.ESMplusplusForSequenceClassification",
10
- "AutoModelForTokenClassification": "modeling_esm_plusplus.ESMplusplusForTokenClassification"
11
- },
12
- "hidden_size": 960,
13
- "model_type": "ESMplusplus",
14
- "num_attention_heads": 15,
15
- "num_hidden_layers": 30,
16
- "torch_dtype": "float32",
17
- "transformers_version": "4.45.0",
18
- "vocab_size": 64
19
- }
 
1
+ {
2
+ "architectures": [
3
+ "ESMplusplusForMaskedLM"
4
+ ],
5
+ "hidden_size": 960,
6
+ "model_type": "ESMplusplus",
7
+ "num_attention_heads": 15,
8
+ "num_hidden_layers": 30,
9
+ "torch_dtype": "float32",
10
+ "transformers_version": "4.45.0",
11
+ "vocab_size": 64
12
+ }