elodiesune commited on
Commit
b9b315f
·
verified ·
1 Parent(s): 452b6a4

Upload EsmForSequenceClassification

Browse files
Files changed (2) hide show
  1. config.json +4 -4
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "facebook/esm2_t12_35M_UR50D",
3
  "architectures": [
4
  "EsmForSequenceClassification"
5
  ],
@@ -9,12 +9,12 @@
9
  "esmfold_config": null,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.0,
12
- "hidden_size": 480,
13
  "id2label": {
14
  "0": "LABEL_0"
15
  },
16
  "initializer_range": 0.02,
17
- "intermediate_size": 1920,
18
  "is_folding_model": false,
19
  "label2id": {
20
  "LABEL_0": 0
@@ -24,7 +24,7 @@
24
  "max_position_embeddings": 1026,
25
  "model_type": "esm",
26
  "num_attention_heads": 20,
27
- "num_hidden_layers": 12,
28
  "pad_token_id": 1,
29
  "position_embedding_type": "rotary",
30
  "problem_type": "regression",
 
1
  {
2
+ "_name_or_path": "facebook/esm2_t30_150M_UR50D",
3
  "architectures": [
4
  "EsmForSequenceClassification"
5
  ],
 
9
  "esmfold_config": null,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.0,
12
+ "hidden_size": 640,
13
  "id2label": {
14
  "0": "LABEL_0"
15
  },
16
  "initializer_range": 0.02,
17
+ "intermediate_size": 2560,
18
  "is_folding_model": false,
19
  "label2id": {
20
  "LABEL_0": 0
 
24
  "max_position_embeddings": 1026,
25
  "model_type": "esm",
26
  "num_attention_heads": 20,
27
+ "num_hidden_layers": 30,
28
  "pad_token_id": 1,
29
  "position_embedding_type": "rotary",
30
  "problem_type": "regression",
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:08c57e4e7909c0c98ba1092b233c0df14a61dd25088b6a0efd8d3e4b52af1761
3
- size 136002440
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c07bb3d0d2079eb1c2a40b6813c54fe9dce2951f9b58d142d6c4d3fdbf2a775
3
+ size 595252008