bms18 commited on
Commit
6388552
·
verified ·
1 Parent(s): 23a7fbd

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "facebook/esm2_t6_8M_UR50D",
3
  "architectures": [
4
  "EsmForSequenceClassification"
5
  ],
@@ -9,16 +9,16 @@
9
  "esmfold_config": null,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.0,
12
- "hidden_size": 320,
13
  "initializer_range": 0.02,
14
- "intermediate_size": 1280,
15
  "is_folding_model": false,
16
  "layer_norm_eps": 1e-05,
17
  "mask_token_id": 32,
18
  "max_position_embeddings": 1026,
19
  "model_type": "esm",
20
  "num_attention_heads": 20,
21
- "num_hidden_layers": 6,
22
  "pad_token_id": 1,
23
  "position_embedding_type": "rotary",
24
  "problem_type": "single_label_classification",
 
1
  {
2
+ "_name_or_path": "facebook/esm2_t30_150M_UR50D",
3
  "architectures": [
4
  "EsmForSequenceClassification"
5
  ],
 
9
  "esmfold_config": null,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.0,
12
+ "hidden_size": 640,
13
  "initializer_range": 0.02,
14
+ "intermediate_size": 2560,
15
  "is_folding_model": false,
16
  "layer_norm_eps": 1e-05,
17
  "mask_token_id": 32,
18
  "max_position_embeddings": 1026,
19
  "model_type": "esm",
20
  "num_attention_heads": 20,
21
+ "num_hidden_layers": 30,
22
  "pad_token_id": 1,
23
  "position_embedding_type": "rotary",
24
  "problem_type": "single_label_classification",
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e40fa1700b034660cf37939ae688e7757e9e32291c914189b6e16bec880a55ff
3
- size 31375788
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:216e80370846d5042efe8f5b3ee129faa540f503ac2596ca78e393165b523cd8
3
+ size 595249204
runs/Apr10_17-34-22_DESKTOP-2SFFJ6C/events.out.tfevents.1712766873.DESKTOP-2SFFJ6C.21412.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba4d2a0c1c6c95a2b2247a6f58c2c28bca80463832f2eec1b255025b494a7555
3
+ size 5268
tokenizer_config.json CHANGED
@@ -45,7 +45,7 @@
45
  "cls_token": "<cls>",
46
  "eos_token": "<eos>",
47
  "mask_token": "<mask>",
48
- "model_max_length": 1024,
49
  "pad_token": "<pad>",
50
  "tokenizer_class": "EsmTokenizer",
51
  "unk_token": "<unk>"
 
45
  "cls_token": "<cls>",
46
  "eos_token": "<eos>",
47
  "mask_token": "<mask>",
48
+ "model_max_length": 1000000000000000019884624838656,
49
  "pad_token": "<pad>",
50
  "tokenizer_class": "EsmTokenizer",
51
  "unk_token": "<unk>"
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:46eeb929b8d835fd46225cd8b54cb71182ff6a631b00a85e148813ab91823f98
3
  size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df078d55b9fa5932312f649a531f7294b6a74c324c2bf10e80c54f9ddd22bc78
3
  size 4920