abvijaykumar
commited on
Commit
•
9215d32
1
Parent(s):
d73b9ad
Model save
Browse files- README.md +15 -105
- adapter_config.json +5 -5
- adapter_model.bin +2 -2
- training_args.bin +1 -1
README.md
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
---
|
2 |
-
license:
|
3 |
-
base_model:
|
4 |
tags:
|
5 |
- generated_from_trainer
|
6 |
model-index:
|
@@ -13,9 +13,9 @@ should probably proofread and complete it, then remove this comment. -->
|
|
13 |
|
14 |
# finetuned-model
|
15 |
|
16 |
-
This model is a fine-tuned version of [
|
17 |
It achieves the following results on the evaluation set:
|
18 |
-
- Loss:
|
19 |
|
20 |
## Model description
|
21 |
|
@@ -40,112 +40,22 @@ The following hyperparameters were used during training:
|
|
40 |
- seed: 42
|
41 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
42 |
- lr_scheduler_type: linear
|
43 |
-
- num_epochs:
|
44 |
|
45 |
### Training results
|
46 |
|
47 |
| Training Loss | Epoch | Step | Validation Loss |
|
48 |
|:-------------:|:-----:|:----:|:---------------:|
|
49 |
-
| No log | 1.0 | 6 |
|
50 |
-
| No log | 2.0 | 12 |
|
51 |
-
| No log | 3.0 | 18 |
|
52 |
-
| No log | 4.0 | 24 |
|
53 |
-
| No log | 5.0 | 30 |
|
54 |
-
| No log | 6.0 | 36 |
|
55 |
-
| No log | 7.0 | 42 |
|
56 |
-
| No log | 8.0 | 48 |
|
57 |
-
| No log | 9.0 | 54 |
|
58 |
-
| No log | 10.0 | 60 |
|
59 |
-
| No log | 11.0 | 66 | 6.5351 |
|
60 |
-
| No log | 12.0 | 72 | 6.5223 |
|
61 |
-
| No log | 13.0 | 78 | 6.5101 |
|
62 |
-
| No log | 14.0 | 84 | 6.4982 |
|
63 |
-
| No log | 15.0 | 90 | 6.4869 |
|
64 |
-
| No log | 16.0 | 96 | 6.4758 |
|
65 |
-
| No log | 17.0 | 102 | 6.4653 |
|
66 |
-
| No log | 18.0 | 108 | 6.4551 |
|
67 |
-
| No log | 19.0 | 114 | 6.4453 |
|
68 |
-
| No log | 20.0 | 120 | 6.4357 |
|
69 |
-
| No log | 21.0 | 126 | 6.4266 |
|
70 |
-
| No log | 22.0 | 132 | 6.4177 |
|
71 |
-
| No log | 23.0 | 138 | 6.4090 |
|
72 |
-
| No log | 24.0 | 144 | 6.4006 |
|
73 |
-
| No log | 25.0 | 150 | 6.3924 |
|
74 |
-
| No log | 26.0 | 156 | 6.3845 |
|
75 |
-
| No log | 27.0 | 162 | 6.3768 |
|
76 |
-
| No log | 28.0 | 168 | 6.3696 |
|
77 |
-
| No log | 29.0 | 174 | 6.3625 |
|
78 |
-
| No log | 30.0 | 180 | 6.3557 |
|
79 |
-
| No log | 31.0 | 186 | 6.3489 |
|
80 |
-
| No log | 32.0 | 192 | 6.3423 |
|
81 |
-
| No log | 33.0 | 198 | 6.3357 |
|
82 |
-
| No log | 34.0 | 204 | 6.3294 |
|
83 |
-
| No log | 35.0 | 210 | 6.3235 |
|
84 |
-
| No log | 36.0 | 216 | 6.3176 |
|
85 |
-
| No log | 37.0 | 222 | 6.3119 |
|
86 |
-
| No log | 38.0 | 228 | 6.3064 |
|
87 |
-
| No log | 39.0 | 234 | 6.3010 |
|
88 |
-
| No log | 40.0 | 240 | 6.2957 |
|
89 |
-
| No log | 41.0 | 246 | 6.2907 |
|
90 |
-
| No log | 42.0 | 252 | 6.2859 |
|
91 |
-
| No log | 43.0 | 258 | 6.2811 |
|
92 |
-
| No log | 44.0 | 264 | 6.2765 |
|
93 |
-
| No log | 45.0 | 270 | 6.2720 |
|
94 |
-
| No log | 46.0 | 276 | 6.2675 |
|
95 |
-
| No log | 47.0 | 282 | 6.2632 |
|
96 |
-
| No log | 48.0 | 288 | 6.2590 |
|
97 |
-
| No log | 49.0 | 294 | 6.2550 |
|
98 |
-
| No log | 50.0 | 300 | 6.2511 |
|
99 |
-
| No log | 51.0 | 306 | 6.2473 |
|
100 |
-
| No log | 52.0 | 312 | 6.2437 |
|
101 |
-
| No log | 53.0 | 318 | 6.2400 |
|
102 |
-
| No log | 54.0 | 324 | 6.2365 |
|
103 |
-
| No log | 55.0 | 330 | 6.2331 |
|
104 |
-
| No log | 56.0 | 336 | 6.2298 |
|
105 |
-
| No log | 57.0 | 342 | 6.2267 |
|
106 |
-
| No log | 58.0 | 348 | 6.2237 |
|
107 |
-
| No log | 59.0 | 354 | 6.2206 |
|
108 |
-
| No log | 60.0 | 360 | 6.2177 |
|
109 |
-
| No log | 61.0 | 366 | 6.2149 |
|
110 |
-
| No log | 62.0 | 372 | 6.2121 |
|
111 |
-
| No log | 63.0 | 378 | 6.2095 |
|
112 |
-
| No log | 64.0 | 384 | 6.2068 |
|
113 |
-
| No log | 65.0 | 390 | 6.2043 |
|
114 |
-
| No log | 66.0 | 396 | 6.2019 |
|
115 |
-
| No log | 67.0 | 402 | 6.1995 |
|
116 |
-
| No log | 68.0 | 408 | 6.1973 |
|
117 |
-
| No log | 69.0 | 414 | 6.1950 |
|
118 |
-
| No log | 70.0 | 420 | 6.1929 |
|
119 |
-
| No log | 71.0 | 426 | 6.1909 |
|
120 |
-
| No log | 72.0 | 432 | 6.1889 |
|
121 |
-
| No log | 73.0 | 438 | 6.1870 |
|
122 |
-
| No log | 74.0 | 444 | 6.1851 |
|
123 |
-
| No log | 75.0 | 450 | 6.1834 |
|
124 |
-
| No log | 76.0 | 456 | 6.1817 |
|
125 |
-
| No log | 77.0 | 462 | 6.1801 |
|
126 |
-
| No log | 78.0 | 468 | 6.1786 |
|
127 |
-
| No log | 79.0 | 474 | 6.1772 |
|
128 |
-
| No log | 80.0 | 480 | 6.1758 |
|
129 |
-
| No log | 81.0 | 486 | 6.1745 |
|
130 |
-
| No log | 82.0 | 492 | 6.1733 |
|
131 |
-
| No log | 83.0 | 498 | 6.1722 |
|
132 |
-
| 6.7021 | 84.0 | 504 | 6.1711 |
|
133 |
-
| 6.7021 | 85.0 | 510 | 6.1701 |
|
134 |
-
| 6.7021 | 86.0 | 516 | 6.1691 |
|
135 |
-
| 6.7021 | 87.0 | 522 | 6.1683 |
|
136 |
-
| 6.7021 | 88.0 | 528 | 6.1674 |
|
137 |
-
| 6.7021 | 89.0 | 534 | 6.1666 |
|
138 |
-
| 6.7021 | 90.0 | 540 | 6.1660 |
|
139 |
-
| 6.7021 | 91.0 | 546 | 6.1653 |
|
140 |
-
| 6.7021 | 92.0 | 552 | 6.1647 |
|
141 |
-
| 6.7021 | 93.0 | 558 | 6.1642 |
|
142 |
-
| 6.7021 | 94.0 | 564 | 6.1638 |
|
143 |
-
| 6.7021 | 95.0 | 570 | 6.1634 |
|
144 |
-
| 6.7021 | 96.0 | 576 | 6.1631 |
|
145 |
-
| 6.7021 | 97.0 | 582 | 6.1629 |
|
146 |
-
| 6.7021 | 98.0 | 588 | 6.1627 |
|
147 |
-
| 6.7021 | 99.0 | 594 | 6.1626 |
|
148 |
-
| 6.7021 | 100.0 | 600 | 6.1626 |
|
149 |
|
150 |
|
151 |
### Framework versions
|
|
|
1 |
---
|
2 |
+
license: bigscience-bloom-rail-1.0
|
3 |
+
base_model: bigscience/bloom-560m
|
4 |
tags:
|
5 |
- generated_from_trainer
|
6 |
model-index:
|
|
|
13 |
|
14 |
# finetuned-model
|
15 |
|
16 |
+
This model is a fine-tuned version of [bigscience/bloom-560m](https://huggingface.co/bigscience/bloom-560m) on an unknown dataset.
|
17 |
It achieves the following results on the evaluation set:
|
18 |
+
- Loss: 7.8626
|
19 |
|
20 |
## Model description
|
21 |
|
|
|
40 |
- seed: 42
|
41 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
42 |
- lr_scheduler_type: linear
|
43 |
+
- num_epochs: 10
|
44 |
|
45 |
### Training results
|
46 |
|
47 |
| Training Loss | Epoch | Step | Validation Loss |
|
48 |
|:-------------:|:-----:|:----:|:---------------:|
|
49 |
+
| No log | 1.0 | 6 | 7.9789 |
|
50 |
+
| No log | 2.0 | 12 | 7.9410 |
|
51 |
+
| No log | 3.0 | 18 | 7.9208 |
|
52 |
+
| No log | 4.0 | 24 | 7.9045 |
|
53 |
+
| No log | 5.0 | 30 | 7.8931 |
|
54 |
+
| No log | 6.0 | 36 | 7.8828 |
|
55 |
+
| No log | 7.0 | 42 | 7.8742 |
|
56 |
+
| No log | 8.0 | 48 | 7.8679 |
|
57 |
+
| No log | 9.0 | 54 | 7.8640 |
|
58 |
+
| No log | 10.0 | 60 | 7.8626 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
|
61 |
### Framework versions
|
adapter_config.json
CHANGED
@@ -1,15 +1,15 @@
|
|
1 |
{
|
2 |
"auto_mapping": null,
|
3 |
-
"base_model_name_or_path": "
|
4 |
-
"encoder_hidden_size":
|
5 |
"inference_mode": true,
|
6 |
-
"num_attention_heads":
|
7 |
-
"num_layers":
|
8 |
"num_transformer_submodules": 1,
|
9 |
"num_virtual_tokens": 20,
|
10 |
"peft_type": "PREFIX_TUNING",
|
11 |
"prefix_projection": false,
|
12 |
"revision": null,
|
13 |
"task_type": "CAUSAL_LM",
|
14 |
-
"token_dim":
|
15 |
}
|
|
|
1 |
{
|
2 |
"auto_mapping": null,
|
3 |
+
"base_model_name_or_path": "bigscience/bloom-560m",
|
4 |
+
"encoder_hidden_size": 1024,
|
5 |
"inference_mode": true,
|
6 |
+
"num_attention_heads": 16,
|
7 |
+
"num_layers": 24,
|
8 |
"num_transformer_submodules": 1,
|
9 |
"num_virtual_tokens": 20,
|
10 |
"peft_type": "PREFIX_TUNING",
|
11 |
"prefix_projection": false,
|
12 |
"revision": null,
|
13 |
"task_type": "CAUSAL_LM",
|
14 |
+
"token_dim": 1024
|
15 |
}
|
adapter_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e2bd4f15dc9d3306f73c25082dfcf5b569499a7ab268dd4ed8a60159720778b6
|
3 |
+
size 3932989
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4027
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ef1de0603d3779f6dd7ab8794efd62c6de80dcb6eae98ed9be7d8125b3991c9d
|
3 |
size 4027
|