elodiesune
commited on
Model save
Browse files- README.md +65 -69
- adapter_config.json +1 -1
- adapter_model.safetensors +2 -2
- training_args.bin +1 -1
README.md
CHANGED
@@ -6,6 +6,7 @@ tags:
|
|
6 |
metrics:
|
7 |
- precision
|
8 |
- recall
|
|
|
9 |
model-index:
|
10 |
- name: esm_ft_Aerin_Yang_et_al_2023
|
11 |
results: []
|
@@ -18,38 +19,38 @@ should probably proofread and complete it, then remove this comment. -->
|
|
18 |
|
19 |
This model is a fine-tuned version of [facebook/esm2_t6_8M_UR50D](https://huggingface.co/facebook/esm2_t6_8M_UR50D) on an unknown dataset.
|
20 |
It achieves the following results on the evaluation set:
|
21 |
-
- Loss:
|
22 |
-
- Rmse:
|
23 |
-
- Mae:
|
24 |
-
- Spearmanr Corr: 0.
|
25 |
- Spearmanr Corr P Value: 0.0000
|
26 |
-
- Pearsonr Corr: 0.
|
27 |
- Pearsonr Corr P Value: 0.0000
|
28 |
-
- Spearmanr Corr Of Deltas: 0.
|
29 |
- Spearmanr Corr Of Deltas P Value: 0.0
|
30 |
-
- Pearsonr Corr Of Deltas: 0.
|
31 |
- Pearsonr Corr Of Deltas P Value: 0.0
|
32 |
-
- Ranking F1 Score: 0.
|
33 |
-
- Ranking Mcc: 0.
|
34 |
-
- Rmse Enriched:
|
35 |
-
- Mae Enriched:
|
36 |
-
- Spearmanr Corr Enriched: 0.
|
37 |
- Spearmanr Corr Enriched P Value: 0.0000
|
38 |
-
- Pearsonr Corr Enriched: 0.
|
39 |
-
- Pearsonr Corr Enriched P Value: 0.
|
40 |
-
- Spearmanr Corr Of Deltas Enriched: 0.
|
41 |
- Spearmanr Corr Of Deltas Enriched P Value: 0.0
|
42 |
-
- Pearsonr Corr Of Deltas Enriched: 0.
|
43 |
-
- Pearsonr Corr Of Deltas Enriched P Value: 0.
|
44 |
-
- Ranking F1 Score Enriched: 0.
|
45 |
-
- Ranking Mcc Enriched: 0.
|
46 |
-
- Classification Thresh: 0.
|
47 |
-
- Mcc: 0.
|
48 |
-
- F1 Score: 0.
|
49 |
-
- Acc: 0.
|
50 |
-
- Auc: 0.
|
51 |
-
- Precision: 0.
|
52 |
-
- Recall: 0.
|
53 |
|
54 |
## Model description
|
55 |
|
@@ -68,7 +69,7 @@ More information needed
|
|
68 |
### Training hyperparameters
|
69 |
|
70 |
The following hyperparameters were used during training:
|
71 |
-
- learning_rate: 0.
|
72 |
- train_batch_size: 8
|
73 |
- eval_batch_size: 8
|
74 |
- seed: 42
|
@@ -81,52 +82,47 @@ The following hyperparameters were used during training:
|
|
81 |
|
82 |
### Training results
|
83 |
|
84 |
-
| Training Loss | Epoch | Step | Validation Loss | Rmse
|
85 |
-
|
86 |
-
|
|
87 |
-
|
|
88 |
-
|
|
89 |
-
|
|
90 |
-
|
|
91 |
-
|
|
92 |
-
|
|
93 |
-
|
|
94 |
-
|
|
95 |
-
|
|
96 |
-
|
|
97 |
-
|
|
98 |
-
|
|
99 |
-
|
|
100 |
-
|
|
101 |
-
|
|
102 |
-
|
|
103 |
-
|
|
104 |
-
|
|
105 |
-
|
|
106 |
-
|
|
107 |
-
|
|
108 |
-
|
|
109 |
-
|
|
110 |
-
|
|
111 |
-
|
|
112 |
-
|
|
113 |
-
|
|
114 |
-
|
|
115 |
-
|
|
116 |
-
|
|
117 |
-
|
|
118 |
-
|
|
119 |
-
| 0.0113 | 34.0 | 11390 | 0.0111 | 0.3364 | 0.2489 | 0.8561 | 0.0000 | 0.9062 | 0.0000 | 0.8695 | 0.0 | 0.9058 | 0.0 | 0.7718 | 0.6095 | 0.1158 | 0.0502 | 0.5813 | 0.0000 | 0.1314 | 0.0120 | 0.4868 | 0.0 | 0.1320 | 0.0000 | 0.6684 | 0.4049 | 0.2 | 0.8918 | 0.9504 | 0.9463 | 0.9804 | 0.9455 | 0.9464 |
|
120 |
-
| 0.0113 | 35.0 | 11725 | 0.0118 | 0.3413 | 0.2542 | 0.8545 | 0.0000 | 0.9031 | 0.0000 | 0.8657 | 0.0 | 0.9026 | 0.0 | 0.7723 | 0.6105 | 0.1042 | 0.0460 | 0.5860 | 0.0000 | 0.1317 | 0.0118 | 0.4950 | 0.0 | 0.1321 | 0.0000 | 0.6731 | 0.4123 | 0.2 | 0.8856 | 0.9482 | 0.9433 | 0.9784 | 0.9433 | 0.9423 |
|
121 |
-
| 0.0099 | 36.0 | 12060 | 0.0122 | 0.3514 | 0.2605 | 0.8461 | 0.0000 | 0.9016 | 0.0000 | 0.8687 | 0.0 | 0.9012 | 0.0 | 0.7696 | 0.6051 | 0.1288 | 0.0645 | 0.5650 | 0.0000 | 0.1560 | 0.0028 | 0.4806 | 0.0 | 0.1566 | 0.0 | 0.6647 | 0.3975 | 0.2 | 0.8986 | 0.9526 | 0.9493 | 0.9758 | 0.9481 | 0.9505 |
|
122 |
-
| 0.0113 | 37.0 | 12395 | 0.0127 | 0.3301 | 0.2495 | 0.8422 | 0.0000 | 0.8980 | 0.0000 | 0.8641 | 0.0 | 0.8975 | 0.0 | 0.7670 | 0.6000 | 0.1362 | 0.0761 | 0.5706 | 0.0000 | 0.1543 | 0.0031 | 0.4834 | 0.0 | 0.1546 | 0.0 | 0.6673 | 0.4070 | 0.2 | 0.8869 | 0.9469 | 0.9433 | 0.9716 | 0.9422 | 0.9447 |
|
123 |
-
| 0.01 | 38.0 | 12730 | 0.0115 | 0.3294 | 0.2480 | 0.8506 | 0.0000 | 0.9036 | 0.0000 | 0.8639 | 0.0 | 0.9032 | 0.0 | 0.7680 | 0.6028 | 0.1084 | 0.0467 | 0.5947 | 0.0000 | 0.1379 | 0.0083 | 0.5140 | 0.0 | 0.1382 | 0.0000 | 0.6786 | 0.4224 | 0.3 | 0.8953 | 0.9515 | 0.9478 | 0.9746 | 0.9467 | 0.9486 |
|
124 |
-
| 0.0107 | 39.0 | 13065 | 0.0111 | 0.3306 | 0.2493 | 0.8522 | 0.0000 | 0.9108 | 0.0000 | 0.8703 | 0.0 | 0.9104 | 0.0 | 0.7710 | 0.6081 | 0.1210 | 0.0675 | 0.5778 | 0.0000 | 0.1383 | 0.0082 | 0.5093 | 0.0 | 0.1388 | 0.0000 | 0.6722 | 0.4113 | 0.3 | 0.8926 | 0.9499 | 0.9463 | 0.9778 | 0.9451 | 0.9475 |
|
125 |
|
126 |
|
127 |
### Framework versions
|
128 |
|
|
|
129 |
- Transformers 4.41.2
|
130 |
- Pytorch 2.5.1+cu118
|
131 |
- Datasets 3.1.0
|
132 |
-
- Tokenizers 0.19.1
|
|
|
6 |
metrics:
|
7 |
- precision
|
8 |
- recall
|
9 |
+
library_name: peft
|
10 |
model-index:
|
11 |
- name: esm_ft_Aerin_Yang_et_al_2023
|
12 |
results: []
|
|
|
19 |
|
20 |
This model is a fine-tuned version of [facebook/esm2_t6_8M_UR50D](https://huggingface.co/facebook/esm2_t6_8M_UR50D) on an unknown dataset.
|
21 |
It achieves the following results on the evaluation set:
|
22 |
+
- Loss: 39.8878
|
23 |
+
- Rmse: 21.3236
|
24 |
+
- Mae: 15.1765
|
25 |
+
- Spearmanr Corr: 0.8348
|
26 |
- Spearmanr Corr P Value: 0.0000
|
27 |
+
- Pearsonr Corr: 0.9129
|
28 |
- Pearsonr Corr P Value: 0.0000
|
29 |
+
- Spearmanr Corr Of Deltas: 0.8678
|
30 |
- Spearmanr Corr Of Deltas P Value: 0.0
|
31 |
+
- Pearsonr Corr Of Deltas: 0.9127
|
32 |
- Pearsonr Corr Of Deltas P Value: 0.0
|
33 |
+
- Ranking F1 Score: 0.7761
|
34 |
+
- Ranking Mcc: 0.6138
|
35 |
+
- Rmse Enriched: 7.2500
|
36 |
+
- Mae Enriched: 2.0039
|
37 |
+
- Spearmanr Corr Enriched: 0.4481
|
38 |
- Spearmanr Corr Enriched P Value: 0.0000
|
39 |
+
- Pearsonr Corr Enriched: 0.0056
|
40 |
+
- Pearsonr Corr Enriched P Value: 0.9162
|
41 |
+
- Spearmanr Corr Of Deltas Enriched: 0.4078
|
42 |
- Spearmanr Corr Of Deltas Enriched P Value: 0.0
|
43 |
+
- Pearsonr Corr Of Deltas Enriched: 0.0084
|
44 |
+
- Pearsonr Corr Of Deltas Enriched P Value: 0.0336
|
45 |
+
- Ranking F1 Score Enriched: 0.6296
|
46 |
+
- Ranking Mcc Enriched: 0.3165
|
47 |
+
- Classification Thresh: 0.2
|
48 |
+
- Mcc: 0.8780
|
49 |
+
- F1 Score: 0.9418
|
50 |
+
- Acc: 0.9388
|
51 |
+
- Auc: 0.9743
|
52 |
+
- Precision: 0.9381
|
53 |
+
- Recall: 0.9399
|
54 |
|
55 |
## Model description
|
56 |
|
|
|
69 |
### Training hyperparameters
|
70 |
|
71 |
The following hyperparameters were used during training:
|
72 |
+
- learning_rate: 0.0001
|
73 |
- train_batch_size: 8
|
74 |
- eval_batch_size: 8
|
75 |
- seed: 42
|
|
|
82 |
|
83 |
### Training results
|
84 |
|
85 |
+
| Training Loss | Epoch | Step | Validation Loss | Rmse | Mae | Spearmanr Corr | Spearmanr Corr P Value | Pearsonr Corr | Pearsonr Corr P Value | Spearmanr Corr Of Deltas | Spearmanr Corr Of Deltas P Value | Pearsonr Corr Of Deltas | Pearsonr Corr Of Deltas P Value | Ranking F1 Score | Ranking Mcc | Rmse Enriched | Mae Enriched | Spearmanr Corr Enriched | Spearmanr Corr Enriched P Value | Pearsonr Corr Enriched | Pearsonr Corr Enriched P Value | Spearmanr Corr Of Deltas Enriched | Spearmanr Corr Of Deltas Enriched P Value | Pearsonr Corr Of Deltas Enriched | Pearsonr Corr Of Deltas Enriched P Value | Ranking F1 Score Enriched | Ranking Mcc Enriched | Classification Thresh | Mcc | F1 Score | Acc | Auc | Precision | Recall |
|
86 |
+
|:-------------:|:-----:|:-----:|:---------------:|:-------:|:-------:|:--------------:|:----------------------:|:-------------:|:---------------------:|:------------------------:|:--------------------------------:|:-----------------------:|:-------------------------------:|:----------------:|:-----------:|:-------------:|:------------:|:-----------------------:|:-------------------------------:|:----------------------:|:------------------------------:|:---------------------------------:|:-----------------------------------------:|:--------------------------------:|:----------------------------------------:|:-------------------------:|:--------------------:|:---------------------:|:------:|:--------:|:------:|:------:|:---------:|:------:|
|
87 |
+
| 322.4036 | 1.0 | 335 | 170.5273 | 17.7771 | 14.4453 | 0.8087 | 0.0000 | 0.8469 | 0.0000 | 0.8254 | 0.0 | 0.8463 | 0.0 | 0.7495 | 0.5639 | 3.7796 | 2.1388 | 0.4247 | 0.0000 | 0.0251 | 0.6349 | 0.3748 | 0.0 | 0.0269 | 0.0000 | 0.6053 | 0.2814 | 0.1 | 0.8217 | 0.9112 | 0.9090 | 0.9586 | 0.9099 | 0.9118 |
|
88 |
+
| 178.9624 | 2.0 | 670 | 83.5034 | 19.1493 | 14.9378 | 0.8161 | 0.0000 | 0.8602 | 0.0000 | 0.8319 | 0.0 | 0.8596 | 0.0 | 0.7638 | 0.5910 | 5.0134 | 1.7113 | 0.3799 | 0.0000 | 0.0109 | 0.8362 | 0.3223 | 0.0 | 0.0134 | 0.0007 | 0.5917 | 0.2544 | 0.4 | 0.8290 | 0.9207 | 0.9149 | 0.9653 | 0.9148 | 0.9142 |
|
89 |
+
| 58.7267 | 3.0 | 1005 | 49.3526 | 20.8551 | 15.1712 | 0.8377 | 0.0000 | 0.8896 | 0.0000 | 0.8579 | 0.0 | 0.8891 | 0.0 | 0.7757 | 0.6129 | 6.3592 | 1.8552 | 0.4710 | 0.0000 | 0.0074 | 0.8887 | 0.3851 | 0.0 | 0.0101 | 0.0102 | 0.6338 | 0.3245 | 0.1 | 0.8740 | 0.9415 | 0.9373 | 0.9731 | 0.9370 | 0.9370 |
|
90 |
+
| 57.3836 | 4.0 | 1340 | 69.0669 | 21.2627 | 15.0576 | 0.8435 | 0.0000 | 0.8493 | 0.0000 | 0.8530 | 0.0 | 0.8486 | 0.0 | 0.7778 | 0.6168 | 6.3047 | 1.4575 | 0.5105 | 0.0000 | 0.0024 | 0.9645 | 0.4457 | 0.0 | 0.0060 | 0.1309 | 0.6426 | 0.3456 | 0.2 | 0.8333 | 0.9245 | 0.9164 | 0.9723 | 0.9199 | 0.9134 |
|
91 |
+
| 53.7883 | 5.0 | 1675 | 48.1886 | 21.1480 | 15.1387 | 0.8430 | 0.0000 | 0.8926 | 0.0000 | 0.8660 | 0.0 | 0.8921 | 0.0 | 0.7684 | 0.5996 | 7.0220 | 2.0070 | 0.5321 | 0.0000 | 0.0029 | 0.9567 | 0.4401 | 0.0 | 0.0043 | 0.2758 | 0.6507 | 0.3607 | 0.01 | 0.8780 | 0.9418 | 0.9388 | 0.9722 | 0.9381 | 0.9399 |
|
92 |
+
| 52.5999 | 6.0 | 2010 | 44.7967 | 21.3116 | 15.1609 | 0.8499 | 0.0000 | 0.9016 | 0.0000 | 0.8730 | 0.0 | 0.9012 | 0.0 | 0.7816 | 0.6240 | 7.0418 | 1.9243 | 0.5035 | 0.0000 | 0.0034 | 0.9486 | 0.4141 | 0.0 | 0.0058 | 0.1402 | 0.6416 | 0.3463 | 0.2 | 0.8744 | 0.9408 | 0.9373 | 0.9791 | 0.9366 | 0.9378 |
|
93 |
+
| 52.4754 | 7.0 | 2345 | 59.0400 | 21.3311 | 15.1190 | 0.8520 | 0.0000 | 0.8710 | 0.0000 | 0.8642 | 0.0 | 0.8704 | 0.0 | 0.7872 | 0.6342 | 6.4355 | 1.4927 | 0.5187 | 0.0000 | 0.0026 | 0.9606 | 0.4698 | 0.0 | 0.0061 | 0.1250 | 0.6543 | 0.3692 | 0.4 | 0.8624 | 0.9370 | 0.9313 | 0.9726 | 0.9328 | 0.9296 |
|
94 |
+
| 53.8573 | 8.0 | 2680 | 48.6266 | 21.2723 | 15.1655 | 0.7704 | 0.0000 | 0.8929 | 0.0000 | 0.8107 | 0.0 | 0.8925 | 0.0 | 0.7350 | 0.5378 | 7.1861 | 2.1439 | 0.0958 | 0.0699 | 0.0012 | 0.9826 | 0.1100 | 0.0000 | 0.0056 | 0.1558 | 0.4920 | 0.0671 | 0.01 | 0.8831 | 0.9455 | 0.9418 | 0.9676 | 0.9413 | 0.9418 |
|
95 |
+
| 50.3365 | 9.0 | 3015 | 60.9515 | 21.3035 | 15.0752 | 0.8176 | 0.0000 | 0.8665 | 0.0000 | 0.8370 | 0.0 | 0.8659 | 0.0 | 0.7697 | 0.6018 | 6.4208 | 1.4898 | 0.5091 | 0.0000 | 0.0026 | 0.9613 | 0.4736 | 0.0 | 0.0061 | 0.1240 | 0.6481 | 0.3526 | 0.4 | 0.8478 | 0.9306 | 0.9239 | 0.9506 | 0.9263 | 0.9216 |
|
96 |
+
| 56.4606 | 10.0 | 3350 | 47.2109 | 21.1671 | 15.1503 | 0.8581 | 0.0000 | 0.8950 | 0.0000 | 0.8755 | 0.0 | 0.8946 | 0.0 | 0.7921 | 0.6432 | 6.6823 | 2.0974 | 0.5587 | 0.0000 | 0.0040 | 0.9394 | 0.4914 | 0.0 | 0.0084 | 0.0328 | 0.6700 | 0.3979 | 0.2 | 0.8800 | 0.9443 | 0.9403 | 0.9760 | 0.9400 | 0.9400 |
|
97 |
+
| 47.2891 | 11.0 | 3685 | 42.9350 | 21.4256 | 15.2179 | 0.8551 | 0.0000 | 0.9072 | 0.0000 | 0.8768 | 0.0 | 0.9069 | 0.0 | 0.7944 | 0.6473 | 7.5706 | 2.0721 | 0.5064 | 0.0000 | 0.0021 | 0.9681 | 0.4414 | 0.0 | 0.0059 | 0.1345 | 0.6502 | 0.3568 | 0.2 | 0.8837 | 0.9448 | 0.9418 | 0.9788 | 0.9411 | 0.9427 |
|
98 |
+
| 46.1667 | 12.0 | 4020 | 64.4558 | 21.2132 | 15.0403 | 0.7896 | 0.0000 | 0.8587 | 0.0000 | 0.8116 | 0.0 | 0.8581 | 0.0 | 0.7476 | 0.5604 | 5.7742 | 1.3566 | 0.3547 | 0.0000 | -0.0005 | 0.9932 | 0.3391 | 0.0 | 0.0026 | 0.5119 | 0.5858 | 0.2399 | 0.1 | 0.8486 | 0.9314 | 0.9239 | 0.9542 | 0.9278 | 0.9208 |
|
99 |
+
| 52.2836 | 13.0 | 4355 | 43.7076 | 21.3301 | 15.1676 | 0.8437 | 0.0000 | 0.9041 | 0.0000 | 0.8679 | 0.0 | 0.9038 | 0.0 | 0.7821 | 0.6248 | 7.0591 | 1.8864 | 0.4418 | 0.0000 | 0.0042 | 0.9367 | 0.3915 | 0.0 | 0.0085 | 0.0315 | 0.6212 | 0.3086 | 0.2 | 0.8775 | 0.9422 | 0.9388 | 0.9792 | 0.9381 | 0.9395 |
|
100 |
+
| 49.2196 | 14.0 | 4690 | 44.0100 | 21.3915 | 15.1924 | 0.8669 | 0.0000 | 0.9039 | 0.0000 | 0.8834 | 0.0 | 0.9035 | 0.0 | 0.7951 | 0.6492 | 7.0582 | 1.8005 | 0.5986 | 0.0000 | 0.0035 | 0.9480 | 0.5072 | 0.0 | 0.0087 | 0.0283 | 0.6849 | 0.4232 | 0.2 | 0.8891 | 0.9483 | 0.9448 | 0.9800 | 0.9443 | 0.9448 |
|
101 |
+
| 44.0102 | 15.0 | 5025 | 39.9849 | 21.3490 | 15.1845 | 0.8337 | 0.0000 | 0.9132 | 0.0000 | 0.8677 | 0.0 | 0.9130 | 0.0 | 0.7865 | 0.6328 | 7.2932 | 2.0215 | 0.5070 | 0.0000 | 0.0061 | 0.9079 | 0.4234 | 0.0 | 0.0099 | 0.0121 | 0.6487 | 0.3579 | 0.2 | 0.8814 | 0.9430 | 0.9403 | 0.9633 | 0.9397 | 0.9417 |
|
102 |
+
| 49.9841 | 16.0 | 5360 | 59.4978 | 21.3604 | 15.1175 | 0.8496 | 0.0000 | 0.8705 | 0.0000 | 0.8638 | 0.0 | 0.8700 | 0.0 | 0.7807 | 0.6224 | 6.2476 | 1.4632 | 0.5222 | 0.0000 | 0.0023 | 0.9653 | 0.4646 | 0.0 | 0.0064 | 0.1052 | 0.6518 | 0.3623 | 0.2 | 0.8503 | 0.9317 | 0.9254 | 0.9774 | 0.9268 | 0.9235 |
|
103 |
+
| 48.9333 | 17.0 | 5695 | 46.9142 | 21.2059 | 15.2101 | 0.8260 | 0.0000 | 0.8980 | 0.0000 | 0.8585 | 0.0 | 0.8977 | 0.0 | 0.7676 | 0.5979 | 8.1980 | 2.7610 | 0.3826 | 0.0000 | 0.0047 | 0.9289 | 0.3499 | 0.0 | 0.0078 | 0.0487 | 0.5998 | 0.2721 | 0.2 | 0.8594 | 0.9306 | 0.9284 | 0.9756 | 0.9286 | 0.9308 |
|
104 |
+
| 44.6202 | 18.0 | 6030 | 50.6476 | 21.3743 | 15.1230 | 0.8300 | 0.0000 | 0.8897 | 0.0000 | 0.8506 | 0.0 | 0.8892 | 0.0 | 0.7703 | 0.6029 | 5.9865 | 1.3225 | 0.4918 | 0.0000 | 0.0005 | 0.9918 | 0.4423 | 0.0 | 0.0045 | 0.2561 | 0.6426 | 0.3432 | 0.2 | 0.8744 | 0.9426 | 0.9373 | 0.9672 | 0.9389 | 0.9355 |
|
105 |
+
| 45.736 | 19.0 | 6365 | 42.7219 | 21.3199 | 15.1342 | 0.8487 | 0.0000 | 0.9061 | 0.0000 | 0.8702 | 0.0 | 0.9057 | 0.0 | 0.7740 | 0.6101 | 6.4299 | 1.5214 | 0.5348 | 0.0000 | 0.0030 | 0.9544 | 0.4618 | 0.0 | 0.0075 | 0.0583 | 0.6632 | 0.3820 | 0.2 | 0.8920 | 0.95 | 0.9463 | 0.9753 | 0.9462 | 0.9458 |
|
106 |
+
| 42.4466 | 20.0 | 6700 | 49.4293 | 21.3812 | 15.1290 | 0.8166 | 0.0000 | 0.8920 | 0.0000 | 0.8477 | 0.0 | 0.8917 | 0.0 | 0.7661 | 0.5951 | 6.3871 | 1.4594 | 0.3867 | 0.0000 | 0.0022 | 0.9664 | 0.3756 | 0.0 | 0.0074 | 0.0593 | 0.6053 | 0.2738 | 0.2 | 0.8800 | 0.9449 | 0.9403 | 0.9673 | 0.9409 | 0.9391 |
|
107 |
+
| 44.6269 | 21.0 | 7035 | 51.7803 | 21.3407 | 15.1073 | 0.8373 | 0.0000 | 0.8865 | 0.0000 | 0.8591 | 0.0 | 0.8861 | 0.0 | 0.7778 | 0.6169 | 6.4531 | 1.5002 | 0.5360 | 0.0000 | 0.0028 | 0.9572 | 0.4885 | 0.0 | 0.0075 | 0.0586 | 0.6621 | 0.3817 | 0.2 | 0.8740 | 0.9421 | 0.9373 | 0.9658 | 0.9379 | 0.9361 |
|
108 |
+
| 43.6578 | 22.0 | 7370 | 47.6016 | 21.3924 | 15.2455 | 0.8066 | 0.0000 | 0.8992 | 0.0000 | 0.8468 | 0.0 | 0.8988 | 0.0 | 0.7646 | 0.5927 | 8.7133 | 2.8206 | 0.3334 | 0.0000 | 0.0017 | 0.9750 | 0.3019 | 0.0 | 0.0030 | 0.4492 | 0.5840 | 0.2392 | 0.2 | 0.8636 | 0.9316 | 0.9299 | 0.9650 | 0.9308 | 0.9328 |
|
109 |
+
| 46.4904 | 23.0 | 7705 | 42.1610 | 21.2646 | 15.1456 | 0.8504 | 0.0000 | 0.9070 | 0.0000 | 0.8789 | 0.0 | 0.9067 | 0.0 | 0.7924 | 0.6437 | 6.5963 | 1.8549 | 0.5553 | 0.0000 | 0.0041 | 0.9385 | 0.4936 | 0.0 | 0.0082 | 0.0376 | 0.6659 | 0.3893 | 0.2 | 0.8889 | 0.9487 | 0.9448 | 0.9734 | 0.9448 | 0.9442 |
|
110 |
+
| 39.9749 | 24.0 | 8040 | 39.7221 | 21.3645 | 15.1815 | 0.8412 | 0.0000 | 0.9132 | 0.0000 | 0.8692 | 0.0 | 0.9130 | 0.0 | 0.7797 | 0.6203 | 6.6229 | 1.6823 | 0.5055 | 0.0000 | 0.0031 | 0.9527 | 0.4472 | 0.0 | 0.0072 | 0.0664 | 0.6514 | 0.3619 | 0.2 | 0.8950 | 0.9512 | 0.9478 | 0.9721 | 0.9474 | 0.9476 |
|
111 |
+
| 38.3662 | 25.0 | 8375 | 40.5885 | 21.3899 | 15.1735 | 0.8552 | 0.0000 | 0.9116 | 0.0000 | 0.8772 | 0.0 | 0.9115 | 0.0 | 0.7865 | 0.6331 | 6.9604 | 1.7973 | 0.5424 | 0.0000 | 0.0045 | 0.9326 | 0.4661 | 0.0 | 0.0083 | 0.0355 | 0.6613 | 0.3809 | 0.2 | 0.8834 | 0.9451 | 0.9418 | 0.9780 | 0.9411 | 0.9422 |
|
112 |
+
| 44.2025 | 26.0 | 8710 | 43.1233 | 21.1791 | 15.1035 | 0.8464 | 0.0000 | 0.9042 | 0.0000 | 0.8657 | 0.0 | 0.9039 | 0.0 | 0.7773 | 0.6162 | 6.0269 | 1.4761 | 0.4770 | 0.0000 | 0.0052 | 0.9215 | 0.4639 | 0.0 | 0.0088 | 0.0251 | 0.6389 | 0.3318 | 0.2 | 0.8890 | 0.9485 | 0.9448 | 0.9811 | 0.9446 | 0.9444 |
|
113 |
+
| 39.2364 | 27.0 | 9045 | 41.0239 | 21.3000 | 15.1537 | 0.8521 | 0.0000 | 0.9099 | 0.0000 | 0.8789 | 0.0 | 0.9096 | 0.0 | 0.7881 | 0.6358 | 6.7786 | 1.8539 | 0.5215 | 0.0000 | 0.0047 | 0.9297 | 0.4590 | 0.0 | 0.0081 | 0.0407 | 0.6539 | 0.3687 | 0.2 | 0.8891 | 0.9483 | 0.9448 | 0.9780 | 0.9443 | 0.9448 |
|
114 |
+
| 34.0631 | 28.0 | 9380 | 37.2483 | 21.4183 | 15.1928 | 0.8369 | 0.0000 | 0.9193 | 0.0000 | 0.8707 | 0.0 | 0.9191 | 0.0 | 0.7806 | 0.6220 | 7.0196 | 1.7789 | 0.5115 | 0.0000 | 0.0057 | 0.9140 | 0.4615 | 0.0 | 0.0085 | 0.0321 | 0.6569 | 0.3714 | 0.1 | 0.9013 | 0.9536 | 0.9507 | 0.9666 | 0.9501 | 0.9512 |
|
115 |
+
| 40.0891 | 29.0 | 9715 | 46.0297 | 21.2290 | 15.1126 | 0.8209 | 0.0000 | 0.8981 | 0.0000 | 0.8494 | 0.0 | 0.8977 | 0.0 | 0.7661 | 0.5950 | 6.3359 | 1.5989 | 0.4640 | 0.0000 | 0.0025 | 0.9621 | 0.4145 | 0.0 | 0.0052 | 0.1908 | 0.6328 | 0.3269 | 0.2 | 0.8769 | 0.9433 | 0.9388 | 0.9634 | 0.9390 | 0.9380 |
|
116 |
+
| 32.5745 | 30.0 | 10050 | 60.5604 | 21.1329 | 15.0569 | 0.8462 | 0.0000 | 0.8658 | 0.0000 | 0.8583 | 0.0 | 0.8653 | 0.0 | 0.7811 | 0.6232 | 5.9604 | 1.5502 | 0.5403 | 0.0000 | 0.0008 | 0.9887 | 0.4635 | 0.0 | 0.0048 | 0.2207 | 0.6632 | 0.3848 | 0.2 | 0.8536 | 0.9333 | 0.9269 | 0.9734 | 0.9289 | 0.9247 |
|
117 |
+
| 30.2815 | 31.0 | 10385 | 35.5872 | 21.5131 | 15.2808 | 0.8172 | 0.0000 | 0.9235 | 0.0000 | 0.8543 | 0.0 | 0.9232 | 0.0 | 0.7633 | 0.5901 | 6.9169 | 1.8155 | 0.3620 | 0.0000 | 0.0051 | 0.9238 | 0.3457 | 0.0 | 0.0076 | 0.0534 | 0.5901 | 0.2497 | 0.2 | 0.9013 | 0.9536 | 0.9507 | 0.9703 | 0.9501 | 0.9512 |
|
118 |
+
| 36.0448 | 32.0 | 10720 | 42.6942 | 21.3322 | 15.1173 | 0.8447 | 0.0000 | 0.9063 | 0.0000 | 0.8705 | 0.0 | 0.9059 | 0.0 | 0.7788 | 0.6189 | 5.9659 | 1.3238 | 0.5151 | 0.0000 | 0.0006 | 0.9911 | 0.4581 | 0.0 | 0.0016 | 0.6793 | 0.6536 | 0.3628 | 0.2 | 0.8980 | 0.9530 | 0.9493 | 0.9732 | 0.9497 | 0.9483 |
|
119 |
+
| 34.7347 | 33.0 | 11055 | 39.8878 | 21.3236 | 15.1765 | 0.8348 | 0.0000 | 0.9129 | 0.0000 | 0.8678 | 0.0 | 0.9127 | 0.0 | 0.7761 | 0.6138 | 7.2500 | 2.0039 | 0.4481 | 0.0000 | 0.0056 | 0.9162 | 0.4078 | 0.0 | 0.0084 | 0.0336 | 0.6296 | 0.3165 | 0.2 | 0.8780 | 0.9418 | 0.9388 | 0.9743 | 0.9381 | 0.9399 |
|
|
|
|
|
|
|
|
|
|
|
|
|
120 |
|
121 |
|
122 |
### Framework versions
|
123 |
|
124 |
+
- PEFT 0.13.2
|
125 |
- Transformers 4.41.2
|
126 |
- Pytorch 2.5.1+cu118
|
127 |
- Datasets 3.1.0
|
128 |
+
- Tokenizers 0.19.1
|
adapter_config.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
{
|
2 |
"auto_mapping": null,
|
3 |
-
"base_model_name_or_path": "facebook/
|
4 |
"fan_in_fan_out": false,
|
5 |
"feedforward_modules": [
|
6 |
"value",
|
|
|
1 |
{
|
2 |
"auto_mapping": null,
|
3 |
+
"base_model_name_or_path": "facebook/esm2_t6_8M_UR50D",
|
4 |
"fan_in_fan_out": false,
|
5 |
"feedforward_modules": [
|
6 |
"value",
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:234f31207df00673a10cefc0d39c9e370dacac66aa988c5877c5e56ecf496174
|
3 |
+
size 5412288
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5304
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8f4d7df9839bb5b8c5ed9b6611ede265f97621c5cc5474c677e8eb88c5869967
|
3 |
size 5304
|