Add files using upload-large-folder tool
Browse files- .gitattributes +1 -0
- README.md +66 -0
- adapter_config.json +34 -0
- adapter_model.safetensors +3 -0
- added_tokens.json +24 -0
- all_results.json +12 -0
- eval_results.json +7 -0
- merges.txt +0 -0
- special_tokens_map.json +31 -0
- tokenizer.json +3 -0
- tokenizer_config.json +208 -0
- train_results.json +8 -0
- trainer_log.jsonl +270 -0
- trainer_state.json +1926 -0
- training_args.bin +3 -0
- training_eval_loss.png +0 -0
- training_loss.png +0 -0
- vocab.json +0 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
library_name: peft
|
3 |
+
license: other
|
4 |
+
base_model: Qwen/Qwen2.5-14B-Instruct
|
5 |
+
tags:
|
6 |
+
- llama-factory
|
7 |
+
- lora
|
8 |
+
- generated_from_trainer
|
9 |
+
model-index:
|
10 |
+
- name: MATH_training_response_Qwen2.5_7B
|
11 |
+
results: []
|
12 |
+
---
|
13 |
+
|
14 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
15 |
+
should probably proofread and complete it, then remove this comment. -->
|
16 |
+
|
17 |
+
# MATH_training_response_Qwen2.5_7B
|
18 |
+
|
19 |
+
This model is a fine-tuned version of [Qwen/Qwen2.5-14B-Instruct](https://huggingface.co/Qwen/Qwen2.5-14B-Instruct) on the MATH_training_response_Qwen2.5_7B dataset.
|
20 |
+
It achieves the following results on the evaluation set:
|
21 |
+
- Loss: 0.0509
|
22 |
+
|
23 |
+
## Model description
|
24 |
+
|
25 |
+
More information needed
|
26 |
+
|
27 |
+
## Intended uses & limitations
|
28 |
+
|
29 |
+
More information needed
|
30 |
+
|
31 |
+
## Training and evaluation data
|
32 |
+
|
33 |
+
More information needed
|
34 |
+
|
35 |
+
## Training procedure
|
36 |
+
|
37 |
+
### Training hyperparameters
|
38 |
+
|
39 |
+
The following hyperparameters were used during training:
|
40 |
+
- learning_rate: 0.0001
|
41 |
+
- train_batch_size: 1
|
42 |
+
- eval_batch_size: 1
|
43 |
+
- seed: 42
|
44 |
+
- distributed_type: multi-GPU
|
45 |
+
- num_devices: 4
|
46 |
+
- total_train_batch_size: 4
|
47 |
+
- total_eval_batch_size: 4
|
48 |
+
- optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
49 |
+
- lr_scheduler_type: cosine
|
50 |
+
- lr_scheduler_warmup_ratio: 0.1
|
51 |
+
- num_epochs: 2.0
|
52 |
+
|
53 |
+
### Training results
|
54 |
+
|
55 |
+
| Training Loss | Epoch | Step | Validation Loss |
|
56 |
+
|:-------------:|:------:|:----:|:---------------:|
|
57 |
+
| 0.045 | 1.4925 | 200 | 0.0516 |
|
58 |
+
|
59 |
+
|
60 |
+
### Framework versions
|
61 |
+
|
62 |
+
- PEFT 0.12.0
|
63 |
+
- Transformers 4.46.1
|
64 |
+
- Pytorch 2.5.1+cu124
|
65 |
+
- Datasets 3.1.0
|
66 |
+
- Tokenizers 0.20.3
|
adapter_config.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"alpha_pattern": {},
|
3 |
+
"auto_mapping": null,
|
4 |
+
"base_model_name_or_path": "Qwen/Qwen2.5-14B-Instruct",
|
5 |
+
"bias": "none",
|
6 |
+
"fan_in_fan_out": false,
|
7 |
+
"inference_mode": true,
|
8 |
+
"init_lora_weights": true,
|
9 |
+
"layer_replication": null,
|
10 |
+
"layers_pattern": null,
|
11 |
+
"layers_to_transform": null,
|
12 |
+
"loftq_config": {},
|
13 |
+
"lora_alpha": 16,
|
14 |
+
"lora_dropout": 0.0,
|
15 |
+
"megatron_config": null,
|
16 |
+
"megatron_core": "megatron.core",
|
17 |
+
"modules_to_save": null,
|
18 |
+
"peft_type": "LORA",
|
19 |
+
"r": 8,
|
20 |
+
"rank_pattern": {},
|
21 |
+
"revision": null,
|
22 |
+
"target_modules": [
|
23 |
+
"q_proj",
|
24 |
+
"down_proj",
|
25 |
+
"o_proj",
|
26 |
+
"v_proj",
|
27 |
+
"gate_proj",
|
28 |
+
"up_proj",
|
29 |
+
"k_proj"
|
30 |
+
],
|
31 |
+
"task_type": "CAUSAL_LM",
|
32 |
+
"use_dora": false,
|
33 |
+
"use_rslora": false
|
34 |
+
}
|
adapter_model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cb0ef46500487385ae8273afa539d7216a8d8215c0eca792f983e7cabb05405f
|
3 |
+
size 68902296
|
added_tokens.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"</tool_call>": 151658,
|
3 |
+
"<tool_call>": 151657,
|
4 |
+
"<|box_end|>": 151649,
|
5 |
+
"<|box_start|>": 151648,
|
6 |
+
"<|endoftext|>": 151643,
|
7 |
+
"<|file_sep|>": 151664,
|
8 |
+
"<|fim_middle|>": 151660,
|
9 |
+
"<|fim_pad|>": 151662,
|
10 |
+
"<|fim_prefix|>": 151659,
|
11 |
+
"<|fim_suffix|>": 151661,
|
12 |
+
"<|im_end|>": 151645,
|
13 |
+
"<|im_start|>": 151644,
|
14 |
+
"<|image_pad|>": 151655,
|
15 |
+
"<|object_ref_end|>": 151647,
|
16 |
+
"<|object_ref_start|>": 151646,
|
17 |
+
"<|quad_end|>": 151651,
|
18 |
+
"<|quad_start|>": 151650,
|
19 |
+
"<|repo_name|>": 151663,
|
20 |
+
"<|video_pad|>": 151656,
|
21 |
+
"<|vision_end|>": 151653,
|
22 |
+
"<|vision_pad|>": 151654,
|
23 |
+
"<|vision_start|>": 151652
|
24 |
+
}
|
all_results.json
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 2.0,
|
3 |
+
"eval_loss": 0.05085289850831032,
|
4 |
+
"eval_runtime": 2.8859,
|
5 |
+
"eval_samples_per_second": 2.079,
|
6 |
+
"eval_steps_per_second": 0.693,
|
7 |
+
"total_flos": 509043416236032.0,
|
8 |
+
"train_loss": 0.058696881270230705,
|
9 |
+
"train_runtime": 998.1148,
|
10 |
+
"train_samples_per_second": 1.068,
|
11 |
+
"train_steps_per_second": 0.269
|
12 |
+
}
|
eval_results.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 2.0,
|
3 |
+
"eval_loss": 0.05085289850831032,
|
4 |
+
"eval_runtime": 2.8859,
|
5 |
+
"eval_samples_per_second": 2.079,
|
6 |
+
"eval_steps_per_second": 0.693
|
7 |
+
}
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
special_tokens_map.json
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
"<|im_start|>",
|
4 |
+
"<|im_end|>",
|
5 |
+
"<|object_ref_start|>",
|
6 |
+
"<|object_ref_end|>",
|
7 |
+
"<|box_start|>",
|
8 |
+
"<|box_end|>",
|
9 |
+
"<|quad_start|>",
|
10 |
+
"<|quad_end|>",
|
11 |
+
"<|vision_start|>",
|
12 |
+
"<|vision_end|>",
|
13 |
+
"<|vision_pad|>",
|
14 |
+
"<|image_pad|>",
|
15 |
+
"<|video_pad|>"
|
16 |
+
],
|
17 |
+
"eos_token": {
|
18 |
+
"content": "<|im_end|>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": false,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
},
|
24 |
+
"pad_token": {
|
25 |
+
"content": "<|endoftext|>",
|
26 |
+
"lstrip": false,
|
27 |
+
"normalized": false,
|
28 |
+
"rstrip": false,
|
29 |
+
"single_word": false
|
30 |
+
}
|
31 |
+
}
|
tokenizer.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
|
3 |
+
size 11421896
|
tokenizer_config.json
ADDED
@@ -0,0 +1,208 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": false,
|
3 |
+
"add_prefix_space": false,
|
4 |
+
"added_tokens_decoder": {
|
5 |
+
"151643": {
|
6 |
+
"content": "<|endoftext|>",
|
7 |
+
"lstrip": false,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false,
|
11 |
+
"special": true
|
12 |
+
},
|
13 |
+
"151644": {
|
14 |
+
"content": "<|im_start|>",
|
15 |
+
"lstrip": false,
|
16 |
+
"normalized": false,
|
17 |
+
"rstrip": false,
|
18 |
+
"single_word": false,
|
19 |
+
"special": true
|
20 |
+
},
|
21 |
+
"151645": {
|
22 |
+
"content": "<|im_end|>",
|
23 |
+
"lstrip": false,
|
24 |
+
"normalized": false,
|
25 |
+
"rstrip": false,
|
26 |
+
"single_word": false,
|
27 |
+
"special": true
|
28 |
+
},
|
29 |
+
"151646": {
|
30 |
+
"content": "<|object_ref_start|>",
|
31 |
+
"lstrip": false,
|
32 |
+
"normalized": false,
|
33 |
+
"rstrip": false,
|
34 |
+
"single_word": false,
|
35 |
+
"special": true
|
36 |
+
},
|
37 |
+
"151647": {
|
38 |
+
"content": "<|object_ref_end|>",
|
39 |
+
"lstrip": false,
|
40 |
+
"normalized": false,
|
41 |
+
"rstrip": false,
|
42 |
+
"single_word": false,
|
43 |
+
"special": true
|
44 |
+
},
|
45 |
+
"151648": {
|
46 |
+
"content": "<|box_start|>",
|
47 |
+
"lstrip": false,
|
48 |
+
"normalized": false,
|
49 |
+
"rstrip": false,
|
50 |
+
"single_word": false,
|
51 |
+
"special": true
|
52 |
+
},
|
53 |
+
"151649": {
|
54 |
+
"content": "<|box_end|>",
|
55 |
+
"lstrip": false,
|
56 |
+
"normalized": false,
|
57 |
+
"rstrip": false,
|
58 |
+
"single_word": false,
|
59 |
+
"special": true
|
60 |
+
},
|
61 |
+
"151650": {
|
62 |
+
"content": "<|quad_start|>",
|
63 |
+
"lstrip": false,
|
64 |
+
"normalized": false,
|
65 |
+
"rstrip": false,
|
66 |
+
"single_word": false,
|
67 |
+
"special": true
|
68 |
+
},
|
69 |
+
"151651": {
|
70 |
+
"content": "<|quad_end|>",
|
71 |
+
"lstrip": false,
|
72 |
+
"normalized": false,
|
73 |
+
"rstrip": false,
|
74 |
+
"single_word": false,
|
75 |
+
"special": true
|
76 |
+
},
|
77 |
+
"151652": {
|
78 |
+
"content": "<|vision_start|>",
|
79 |
+
"lstrip": false,
|
80 |
+
"normalized": false,
|
81 |
+
"rstrip": false,
|
82 |
+
"single_word": false,
|
83 |
+
"special": true
|
84 |
+
},
|
85 |
+
"151653": {
|
86 |
+
"content": "<|vision_end|>",
|
87 |
+
"lstrip": false,
|
88 |
+
"normalized": false,
|
89 |
+
"rstrip": false,
|
90 |
+
"single_word": false,
|
91 |
+
"special": true
|
92 |
+
},
|
93 |
+
"151654": {
|
94 |
+
"content": "<|vision_pad|>",
|
95 |
+
"lstrip": false,
|
96 |
+
"normalized": false,
|
97 |
+
"rstrip": false,
|
98 |
+
"single_word": false,
|
99 |
+
"special": true
|
100 |
+
},
|
101 |
+
"151655": {
|
102 |
+
"content": "<|image_pad|>",
|
103 |
+
"lstrip": false,
|
104 |
+
"normalized": false,
|
105 |
+
"rstrip": false,
|
106 |
+
"single_word": false,
|
107 |
+
"special": true
|
108 |
+
},
|
109 |
+
"151656": {
|
110 |
+
"content": "<|video_pad|>",
|
111 |
+
"lstrip": false,
|
112 |
+
"normalized": false,
|
113 |
+
"rstrip": false,
|
114 |
+
"single_word": false,
|
115 |
+
"special": true
|
116 |
+
},
|
117 |
+
"151657": {
|
118 |
+
"content": "<tool_call>",
|
119 |
+
"lstrip": false,
|
120 |
+
"normalized": false,
|
121 |
+
"rstrip": false,
|
122 |
+
"single_word": false,
|
123 |
+
"special": false
|
124 |
+
},
|
125 |
+
"151658": {
|
126 |
+
"content": "</tool_call>",
|
127 |
+
"lstrip": false,
|
128 |
+
"normalized": false,
|
129 |
+
"rstrip": false,
|
130 |
+
"single_word": false,
|
131 |
+
"special": false
|
132 |
+
},
|
133 |
+
"151659": {
|
134 |
+
"content": "<|fim_prefix|>",
|
135 |
+
"lstrip": false,
|
136 |
+
"normalized": false,
|
137 |
+
"rstrip": false,
|
138 |
+
"single_word": false,
|
139 |
+
"special": false
|
140 |
+
},
|
141 |
+
"151660": {
|
142 |
+
"content": "<|fim_middle|>",
|
143 |
+
"lstrip": false,
|
144 |
+
"normalized": false,
|
145 |
+
"rstrip": false,
|
146 |
+
"single_word": false,
|
147 |
+
"special": false
|
148 |
+
},
|
149 |
+
"151661": {
|
150 |
+
"content": "<|fim_suffix|>",
|
151 |
+
"lstrip": false,
|
152 |
+
"normalized": false,
|
153 |
+
"rstrip": false,
|
154 |
+
"single_word": false,
|
155 |
+
"special": false
|
156 |
+
},
|
157 |
+
"151662": {
|
158 |
+
"content": "<|fim_pad|>",
|
159 |
+
"lstrip": false,
|
160 |
+
"normalized": false,
|
161 |
+
"rstrip": false,
|
162 |
+
"single_word": false,
|
163 |
+
"special": false
|
164 |
+
},
|
165 |
+
"151663": {
|
166 |
+
"content": "<|repo_name|>",
|
167 |
+
"lstrip": false,
|
168 |
+
"normalized": false,
|
169 |
+
"rstrip": false,
|
170 |
+
"single_word": false,
|
171 |
+
"special": false
|
172 |
+
},
|
173 |
+
"151664": {
|
174 |
+
"content": "<|file_sep|>",
|
175 |
+
"lstrip": false,
|
176 |
+
"normalized": false,
|
177 |
+
"rstrip": false,
|
178 |
+
"single_word": false,
|
179 |
+
"special": false
|
180 |
+
}
|
181 |
+
},
|
182 |
+
"additional_special_tokens": [
|
183 |
+
"<|im_start|>",
|
184 |
+
"<|im_end|>",
|
185 |
+
"<|object_ref_start|>",
|
186 |
+
"<|object_ref_end|>",
|
187 |
+
"<|box_start|>",
|
188 |
+
"<|box_end|>",
|
189 |
+
"<|quad_start|>",
|
190 |
+
"<|quad_end|>",
|
191 |
+
"<|vision_start|>",
|
192 |
+
"<|vision_end|>",
|
193 |
+
"<|vision_pad|>",
|
194 |
+
"<|image_pad|>",
|
195 |
+
"<|video_pad|>"
|
196 |
+
],
|
197 |
+
"bos_token": null,
|
198 |
+
"chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
|
199 |
+
"clean_up_tokenization_spaces": false,
|
200 |
+
"eos_token": "<|im_end|>",
|
201 |
+
"errors": "replace",
|
202 |
+
"model_max_length": 131072,
|
203 |
+
"pad_token": "<|endoftext|>",
|
204 |
+
"padding_side": "right",
|
205 |
+
"split_special_tokens": false,
|
206 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
207 |
+
"unk_token": null
|
208 |
+
}
|
train_results.json
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 2.0,
|
3 |
+
"total_flos": 509043416236032.0,
|
4 |
+
"train_loss": 0.058696881270230705,
|
5 |
+
"train_runtime": 998.1148,
|
6 |
+
"train_samples_per_second": 1.068,
|
7 |
+
"train_steps_per_second": 0.269
|
8 |
+
}
|
trainer_log.jsonl
ADDED
@@ -0,0 +1,270 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{"current_steps": 1, "total_steps": 268, "loss": 0.1102, "lr": 3.7037037037037037e-06, "epoch": 0.007462686567164179, "percentage": 0.37, "elapsed_time": "0:00:06", "remaining_time": "0:29:56"}
|
2 |
+
{"current_steps": 2, "total_steps": 268, "loss": 0.0782, "lr": 7.4074074074074075e-06, "epoch": 0.014925373134328358, "percentage": 0.75, "elapsed_time": "0:00:16", "remaining_time": "0:37:19"}
|
3 |
+
{"current_steps": 3, "total_steps": 268, "loss": 0.0887, "lr": 1.1111111111111112e-05, "epoch": 0.022388059701492536, "percentage": 1.12, "elapsed_time": "0:00:20", "remaining_time": "0:30:06"}
|
4 |
+
{"current_steps": 4, "total_steps": 268, "loss": 0.0997, "lr": 1.4814814814814815e-05, "epoch": 0.029850746268656716, "percentage": 1.49, "elapsed_time": "0:00:24", "remaining_time": "0:26:26"}
|
5 |
+
{"current_steps": 5, "total_steps": 268, "loss": 0.1068, "lr": 1.8518518518518518e-05, "epoch": 0.03731343283582089, "percentage": 1.87, "elapsed_time": "0:00:27", "remaining_time": "0:24:12"}
|
6 |
+
{"current_steps": 6, "total_steps": 268, "loss": 0.1309, "lr": 2.2222222222222223e-05, "epoch": 0.04477611940298507, "percentage": 2.24, "elapsed_time": "0:00:31", "remaining_time": "0:22:41"}
|
7 |
+
{"current_steps": 7, "total_steps": 268, "loss": 0.1307, "lr": 2.5925925925925925e-05, "epoch": 0.05223880597014925, "percentage": 2.61, "elapsed_time": "0:00:34", "remaining_time": "0:21:36"}
|
8 |
+
{"current_steps": 8, "total_steps": 268, "loss": 0.088, "lr": 2.962962962962963e-05, "epoch": 0.05970149253731343, "percentage": 2.99, "elapsed_time": "0:00:38", "remaining_time": "0:20:47"}
|
9 |
+
{"current_steps": 9, "total_steps": 268, "loss": 0.0892, "lr": 3.3333333333333335e-05, "epoch": 0.06716417910447761, "percentage": 3.36, "elapsed_time": "0:00:41", "remaining_time": "0:20:07"}
|
10 |
+
{"current_steps": 10, "total_steps": 268, "loss": 0.0853, "lr": 3.7037037037037037e-05, "epoch": 0.07462686567164178, "percentage": 3.73, "elapsed_time": "0:00:45", "remaining_time": "0:19:34"}
|
11 |
+
{"current_steps": 11, "total_steps": 268, "loss": 0.0697, "lr": 4.074074074074074e-05, "epoch": 0.08208955223880597, "percentage": 4.1, "elapsed_time": "0:00:49", "remaining_time": "0:19:08"}
|
12 |
+
{"current_steps": 12, "total_steps": 268, "loss": 0.0721, "lr": 4.4444444444444447e-05, "epoch": 0.08955223880597014, "percentage": 4.48, "elapsed_time": "0:00:52", "remaining_time": "0:18:45"}
|
13 |
+
{"current_steps": 13, "total_steps": 268, "loss": 0.0991, "lr": 4.814814814814815e-05, "epoch": 0.09701492537313433, "percentage": 4.85, "elapsed_time": "0:00:56", "remaining_time": "0:18:25"}
|
14 |
+
{"current_steps": 14, "total_steps": 268, "loss": 0.0857, "lr": 5.185185185185185e-05, "epoch": 0.1044776119402985, "percentage": 5.22, "elapsed_time": "0:00:59", "remaining_time": "0:18:06"}
|
15 |
+
{"current_steps": 15, "total_steps": 268, "loss": 0.0873, "lr": 5.555555555555556e-05, "epoch": 0.11194029850746269, "percentage": 5.6, "elapsed_time": "0:01:03", "remaining_time": "0:17:51"}
|
16 |
+
{"current_steps": 16, "total_steps": 268, "loss": 0.0704, "lr": 5.925925925925926e-05, "epoch": 0.11940298507462686, "percentage": 5.97, "elapsed_time": "0:01:07", "remaining_time": "0:17:37"}
|
17 |
+
{"current_steps": 17, "total_steps": 268, "loss": 0.0646, "lr": 6.296296296296296e-05, "epoch": 0.12686567164179105, "percentage": 6.34, "elapsed_time": "0:01:10", "remaining_time": "0:17:24"}
|
18 |
+
{"current_steps": 18, "total_steps": 268, "loss": 0.0586, "lr": 6.666666666666667e-05, "epoch": 0.13432835820895522, "percentage": 6.72, "elapsed_time": "0:01:14", "remaining_time": "0:17:12"}
|
19 |
+
{"current_steps": 19, "total_steps": 268, "loss": 0.0728, "lr": 7.037037037037038e-05, "epoch": 0.1417910447761194, "percentage": 7.09, "elapsed_time": "0:01:17", "remaining_time": "0:17:01"}
|
20 |
+
{"current_steps": 20, "total_steps": 268, "loss": 0.0659, "lr": 7.407407407407407e-05, "epoch": 0.14925373134328357, "percentage": 7.46, "elapsed_time": "0:01:21", "remaining_time": "0:16:51"}
|
21 |
+
{"current_steps": 21, "total_steps": 268, "loss": 0.0563, "lr": 7.777777777777778e-05, "epoch": 0.15671641791044777, "percentage": 7.84, "elapsed_time": "0:01:25", "remaining_time": "0:16:41"}
|
22 |
+
{"current_steps": 22, "total_steps": 268, "loss": 0.0698, "lr": 8.148148148148148e-05, "epoch": 0.16417910447761194, "percentage": 8.21, "elapsed_time": "0:01:28", "remaining_time": "0:16:32"}
|
23 |
+
{"current_steps": 23, "total_steps": 268, "loss": 0.0731, "lr": 8.518518518518518e-05, "epoch": 0.17164179104477612, "percentage": 8.58, "elapsed_time": "0:01:32", "remaining_time": "0:16:24"}
|
24 |
+
{"current_steps": 24, "total_steps": 268, "loss": 0.0601, "lr": 8.888888888888889e-05, "epoch": 0.1791044776119403, "percentage": 8.96, "elapsed_time": "0:01:36", "remaining_time": "0:16:16"}
|
25 |
+
{"current_steps": 25, "total_steps": 268, "loss": 0.0598, "lr": 9.25925925925926e-05, "epoch": 0.1865671641791045, "percentage": 9.33, "elapsed_time": "0:01:39", "remaining_time": "0:16:08"}
|
26 |
+
{"current_steps": 26, "total_steps": 268, "loss": 0.063, "lr": 9.62962962962963e-05, "epoch": 0.19402985074626866, "percentage": 9.7, "elapsed_time": "0:01:43", "remaining_time": "0:16:00"}
|
27 |
+
{"current_steps": 27, "total_steps": 268, "loss": 0.0605, "lr": 0.0001, "epoch": 0.20149253731343283, "percentage": 10.07, "elapsed_time": "0:01:46", "remaining_time": "0:15:53"}
|
28 |
+
{"current_steps": 28, "total_steps": 268, "loss": 0.0628, "lr": 9.999575185316994e-05, "epoch": 0.208955223880597, "percentage": 10.45, "elapsed_time": "0:01:50", "remaining_time": "0:15:47"}
|
29 |
+
{"current_steps": 29, "total_steps": 268, "loss": 0.0686, "lr": 9.998300813454982e-05, "epoch": 0.21641791044776118, "percentage": 10.82, "elapsed_time": "0:01:54", "remaining_time": "0:15:40"}
|
30 |
+
{"current_steps": 30, "total_steps": 268, "loss": 0.0762, "lr": 9.996177100962714e-05, "epoch": 0.22388059701492538, "percentage": 11.19, "elapsed_time": "0:01:57", "remaining_time": "0:15:33"}
|
31 |
+
{"current_steps": 31, "total_steps": 268, "loss": 0.0839, "lr": 9.99320440871389e-05, "epoch": 0.23134328358208955, "percentage": 11.57, "elapsed_time": "0:02:01", "remaining_time": "0:15:27"}
|
32 |
+
{"current_steps": 32, "total_steps": 268, "loss": 0.0654, "lr": 9.989383241845838e-05, "epoch": 0.23880597014925373, "percentage": 11.94, "elapsed_time": "0:02:04", "remaining_time": "0:15:21"}
|
33 |
+
{"current_steps": 33, "total_steps": 268, "loss": 0.0496, "lr": 9.984714249673675e-05, "epoch": 0.2462686567164179, "percentage": 12.31, "elapsed_time": "0:02:08", "remaining_time": "0:15:15"}
|
34 |
+
{"current_steps": 34, "total_steps": 268, "loss": 0.064, "lr": 9.979198225579968e-05, "epoch": 0.2537313432835821, "percentage": 12.69, "elapsed_time": "0:02:12", "remaining_time": "0:15:09"}
|
35 |
+
{"current_steps": 35, "total_steps": 268, "loss": 0.0899, "lr": 9.972836106879935e-05, "epoch": 0.26119402985074625, "percentage": 13.06, "elapsed_time": "0:02:15", "remaining_time": "0:15:03"}
|
36 |
+
{"current_steps": 36, "total_steps": 268, "loss": 0.0535, "lr": 9.965628974662144e-05, "epoch": 0.26865671641791045, "percentage": 13.43, "elapsed_time": "0:02:19", "remaining_time": "0:14:57"}
|
37 |
+
{"current_steps": 37, "total_steps": 268, "loss": 0.0582, "lr": 9.957578053604837e-05, "epoch": 0.27611940298507465, "percentage": 13.81, "elapsed_time": "0:02:22", "remaining_time": "0:14:52"}
|
38 |
+
{"current_steps": 38, "total_steps": 268, "loss": 0.0557, "lr": 9.9486847117678e-05, "epoch": 0.2835820895522388, "percentage": 14.18, "elapsed_time": "0:02:26", "remaining_time": "0:14:46"}
|
39 |
+
{"current_steps": 39, "total_steps": 268, "loss": 0.0555, "lr": 9.938950460359913e-05, "epoch": 0.291044776119403, "percentage": 14.55, "elapsed_time": "0:02:30", "remaining_time": "0:14:41"}
|
40 |
+
{"current_steps": 40, "total_steps": 268, "loss": 0.0538, "lr": 9.928376953482343e-05, "epoch": 0.29850746268656714, "percentage": 14.93, "elapsed_time": "0:02:33", "remaining_time": "0:14:36"}
|
41 |
+
{"current_steps": 41, "total_steps": 268, "loss": 0.062, "lr": 9.916965987847485e-05, "epoch": 0.30597014925373134, "percentage": 15.3, "elapsed_time": "0:02:37", "remaining_time": "0:14:31"}
|
42 |
+
{"current_steps": 42, "total_steps": 268, "loss": 0.0729, "lr": 9.904719502473634e-05, "epoch": 0.31343283582089554, "percentage": 15.67, "elapsed_time": "0:02:40", "remaining_time": "0:14:25"}
|
43 |
+
{"current_steps": 43, "total_steps": 268, "loss": 0.0673, "lr": 9.891639578355511e-05, "epoch": 0.3208955223880597, "percentage": 16.04, "elapsed_time": "0:02:44", "remaining_time": "0:14:21"}
|
44 |
+
{"current_steps": 44, "total_steps": 268, "loss": 0.0621, "lr": 9.877728438110645e-05, "epoch": 0.3283582089552239, "percentage": 16.42, "elapsed_time": "0:02:48", "remaining_time": "0:14:16"}
|
45 |
+
{"current_steps": 45, "total_steps": 268, "loss": 0.0674, "lr": 9.862988445601688e-05, "epoch": 0.3358208955223881, "percentage": 16.79, "elapsed_time": "0:02:51", "remaining_time": "0:14:11"}
|
46 |
+
{"current_steps": 46, "total_steps": 268, "loss": 0.0745, "lr": 9.847422105534739e-05, "epoch": 0.34328358208955223, "percentage": 17.16, "elapsed_time": "0:02:55", "remaining_time": "0:14:06"}
|
47 |
+
{"current_steps": 47, "total_steps": 268, "loss": 0.0747, "lr": 9.831032063033726e-05, "epoch": 0.35074626865671643, "percentage": 17.54, "elapsed_time": "0:02:58", "remaining_time": "0:14:01"}
|
48 |
+
{"current_steps": 48, "total_steps": 268, "loss": 0.0549, "lr": 9.813821103190932e-05, "epoch": 0.3582089552238806, "percentage": 17.91, "elapsed_time": "0:03:02", "remaining_time": "0:13:56"}
|
49 |
+
{"current_steps": 49, "total_steps": 268, "loss": 0.0646, "lr": 9.795792150593739e-05, "epoch": 0.3656716417910448, "percentage": 18.28, "elapsed_time": "0:03:06", "remaining_time": "0:13:52"}
|
50 |
+
{"current_steps": 50, "total_steps": 268, "loss": 0.0595, "lr": 9.776948268827659e-05, "epoch": 0.373134328358209, "percentage": 18.66, "elapsed_time": "0:03:09", "remaining_time": "0:13:47"}
|
51 |
+
{"current_steps": 51, "total_steps": 268, "loss": 0.0644, "lr": 9.757292659955755e-05, "epoch": 0.3805970149253731, "percentage": 19.03, "elapsed_time": "0:03:13", "remaining_time": "0:13:44"}
|
52 |
+
{"current_steps": 52, "total_steps": 268, "loss": 0.0594, "lr": 9.736828663974527e-05, "epoch": 0.3880597014925373, "percentage": 19.4, "elapsed_time": "0:03:17", "remaining_time": "0:13:40"}
|
53 |
+
{"current_steps": 53, "total_steps": 268, "loss": 0.0554, "lr": 9.715559758246363e-05, "epoch": 0.39552238805970147, "percentage": 19.78, "elapsed_time": "0:03:21", "remaining_time": "0:13:37"}
|
54 |
+
{"current_steps": 54, "total_steps": 268, "loss": 0.0645, "lr": 9.693489556908641e-05, "epoch": 0.40298507462686567, "percentage": 20.15, "elapsed_time": "0:03:24", "remaining_time": "0:13:32"}
|
55 |
+
{"current_steps": 55, "total_steps": 268, "loss": 0.078, "lr": 9.670621810259595e-05, "epoch": 0.41044776119402987, "percentage": 20.52, "elapsed_time": "0:03:28", "remaining_time": "0:13:27"}
|
56 |
+
{"current_steps": 56, "total_steps": 268, "loss": 0.0569, "lr": 9.646960404121042e-05, "epoch": 0.417910447761194, "percentage": 20.9, "elapsed_time": "0:03:32", "remaining_time": "0:13:23"}
|
57 |
+
{"current_steps": 57, "total_steps": 268, "loss": 0.0682, "lr": 9.62250935917808e-05, "epoch": 0.4253731343283582, "percentage": 21.27, "elapsed_time": "0:03:35", "remaining_time": "0:13:18"}
|
58 |
+
{"current_steps": 58, "total_steps": 268, "loss": 0.0808, "lr": 9.597272830295876e-05, "epoch": 0.43283582089552236, "percentage": 21.64, "elapsed_time": "0:03:39", "remaining_time": "0:13:15"}
|
59 |
+
{"current_steps": 59, "total_steps": 268, "loss": 0.0656, "lr": 9.571255105813632e-05, "epoch": 0.44029850746268656, "percentage": 22.01, "elapsed_time": "0:03:43", "remaining_time": "0:13:10"}
|
60 |
+
{"current_steps": 60, "total_steps": 268, "loss": 0.0683, "lr": 9.5444606068159e-05, "epoch": 0.44776119402985076, "percentage": 22.39, "elapsed_time": "0:03:46", "remaining_time": "0:13:06"}
|
61 |
+
{"current_steps": 61, "total_steps": 268, "loss": 0.0752, "lr": 9.516893886381323e-05, "epoch": 0.4552238805970149, "percentage": 22.76, "elapsed_time": "0:03:50", "remaining_time": "0:13:02"}
|
62 |
+
{"current_steps": 62, "total_steps": 268, "loss": 0.0702, "lr": 9.488559628808939e-05, "epoch": 0.4626865671641791, "percentage": 23.13, "elapsed_time": "0:03:54", "remaining_time": "0:12:57"}
|
63 |
+
{"current_steps": 63, "total_steps": 268, "loss": 0.0675, "lr": 9.459462648822208e-05, "epoch": 0.4701492537313433, "percentage": 23.51, "elapsed_time": "0:03:57", "remaining_time": "0:12:53"}
|
64 |
+
{"current_steps": 64, "total_steps": 268, "loss": 0.0593, "lr": 9.429607890750863e-05, "epoch": 0.47761194029850745, "percentage": 23.88, "elapsed_time": "0:04:01", "remaining_time": "0:12:49"}
|
65 |
+
{"current_steps": 65, "total_steps": 268, "loss": 0.0566, "lr": 9.399000427690735e-05, "epoch": 0.48507462686567165, "percentage": 24.25, "elapsed_time": "0:04:04", "remaining_time": "0:12:44"}
|
66 |
+
{"current_steps": 66, "total_steps": 268, "loss": 0.0609, "lr": 9.367645460641716e-05, "epoch": 0.4925373134328358, "percentage": 24.63, "elapsed_time": "0:04:08", "remaining_time": "0:12:40"}
|
67 |
+
{"current_steps": 67, "total_steps": 268, "loss": 0.0718, "lr": 9.335548317623957e-05, "epoch": 0.5, "percentage": 25.0, "elapsed_time": "0:04:12", "remaining_time": "0:12:36"}
|
68 |
+
{"current_steps": 68, "total_steps": 268, "loss": 0.0609, "lr": 9.302714452772516e-05, "epoch": 0.5074626865671642, "percentage": 25.37, "elapsed_time": "0:04:15", "remaining_time": "0:12:31"}
|
69 |
+
{"current_steps": 69, "total_steps": 268, "loss": 0.064, "lr": 9.269149445410545e-05, "epoch": 0.5149253731343284, "percentage": 25.75, "elapsed_time": "0:04:19", "remaining_time": "0:12:27"}
|
70 |
+
{"current_steps": 70, "total_steps": 268, "loss": 0.0515, "lr": 9.234858999101231e-05, "epoch": 0.5223880597014925, "percentage": 26.12, "elapsed_time": "0:04:22", "remaining_time": "0:12:23"}
|
71 |
+
{"current_steps": 71, "total_steps": 268, "loss": 0.0633, "lr": 9.199848940678606e-05, "epoch": 0.5298507462686567, "percentage": 26.49, "elapsed_time": "0:04:26", "remaining_time": "0:12:19"}
|
72 |
+
{"current_steps": 72, "total_steps": 268, "loss": 0.0557, "lr": 9.164125219257418e-05, "epoch": 0.5373134328358209, "percentage": 26.87, "elapsed_time": "0:04:30", "remaining_time": "0:12:15"}
|
73 |
+
{"current_steps": 73, "total_steps": 268, "loss": 0.0636, "lr": 9.127693905222224e-05, "epoch": 0.5447761194029851, "percentage": 27.24, "elapsed_time": "0:04:33", "remaining_time": "0:12:10"}
|
74 |
+
{"current_steps": 74, "total_steps": 268, "loss": 0.065, "lr": 9.09056118919587e-05, "epoch": 0.5522388059701493, "percentage": 27.61, "elapsed_time": "0:04:37", "remaining_time": "0:12:06"}
|
75 |
+
{"current_steps": 75, "total_steps": 268, "loss": 0.0655, "lr": 9.052733380987554e-05, "epoch": 0.5597014925373134, "percentage": 27.99, "elapsed_time": "0:04:40", "remaining_time": "0:12:02"}
|
76 |
+
{"current_steps": 76, "total_steps": 268, "loss": 0.0672, "lr": 9.014216908520618e-05, "epoch": 0.5671641791044776, "percentage": 28.36, "elapsed_time": "0:04:44", "remaining_time": "0:11:58"}
|
77 |
+
{"current_steps": 77, "total_steps": 268, "loss": 0.0471, "lr": 8.975018316740278e-05, "epoch": 0.5746268656716418, "percentage": 28.73, "elapsed_time": "0:04:48", "remaining_time": "0:11:54"}
|
78 |
+
{"current_steps": 78, "total_steps": 268, "loss": 0.0657, "lr": 8.935144266501469e-05, "epoch": 0.582089552238806, "percentage": 29.1, "elapsed_time": "0:04:51", "remaining_time": "0:11:50"}
|
79 |
+
{"current_steps": 79, "total_steps": 268, "loss": 0.0587, "lr": 8.894601533436999e-05, "epoch": 0.5895522388059702, "percentage": 29.48, "elapsed_time": "0:04:55", "remaining_time": "0:11:46"}
|
80 |
+
{"current_steps": 80, "total_steps": 268, "loss": 0.0695, "lr": 8.853397006806182e-05, "epoch": 0.5970149253731343, "percentage": 29.85, "elapsed_time": "0:04:58", "remaining_time": "0:11:42"}
|
81 |
+
{"current_steps": 81, "total_steps": 268, "loss": 0.0615, "lr": 8.811537688324188e-05, "epoch": 0.6044776119402985, "percentage": 30.22, "elapsed_time": "0:05:02", "remaining_time": "0:11:38"}
|
82 |
+
{"current_steps": 82, "total_steps": 268, "loss": 0.0736, "lr": 8.769030690972262e-05, "epoch": 0.6119402985074627, "percentage": 30.6, "elapsed_time": "0:05:05", "remaining_time": "0:11:34"}
|
83 |
+
{"current_steps": 83, "total_steps": 268, "loss": 0.05, "lr": 8.725883237789045e-05, "epoch": 0.6194029850746269, "percentage": 30.97, "elapsed_time": "0:05:09", "remaining_time": "0:11:30"}
|
84 |
+
{"current_steps": 84, "total_steps": 268, "loss": 0.0547, "lr": 8.682102660643197e-05, "epoch": 0.6268656716417911, "percentage": 31.34, "elapsed_time": "0:05:13", "remaining_time": "0:11:26"}
|
85 |
+
{"current_steps": 85, "total_steps": 268, "loss": 0.0755, "lr": 8.637696398987516e-05, "epoch": 0.6343283582089553, "percentage": 31.72, "elapsed_time": "0:05:16", "remaining_time": "0:11:22"}
|
86 |
+
{"current_steps": 86, "total_steps": 268, "loss": 0.0596, "lr": 8.592671998594794e-05, "epoch": 0.6417910447761194, "percentage": 32.09, "elapsed_time": "0:05:20", "remaining_time": "0:11:17"}
|
87 |
+
{"current_steps": 87, "total_steps": 268, "loss": 0.0498, "lr": 8.547037110275579e-05, "epoch": 0.6492537313432836, "percentage": 32.46, "elapsed_time": "0:05:23", "remaining_time": "0:11:14"}
|
88 |
+
{"current_steps": 88, "total_steps": 268, "loss": 0.0693, "lr": 8.50079948857812e-05, "epoch": 0.6567164179104478, "percentage": 32.84, "elapsed_time": "0:05:27", "remaining_time": "0:11:10"}
|
89 |
+
{"current_steps": 89, "total_steps": 268, "loss": 0.0667, "lr": 8.453966990470656e-05, "epoch": 0.664179104477612, "percentage": 33.21, "elapsed_time": "0:05:31", "remaining_time": "0:11:06"}
|
90 |
+
{"current_steps": 90, "total_steps": 268, "loss": 0.0657, "lr": 8.406547574006325e-05, "epoch": 0.6716417910447762, "percentage": 33.58, "elapsed_time": "0:05:34", "remaining_time": "0:11:02"}
|
91 |
+
{"current_steps": 91, "total_steps": 268, "loss": 0.062, "lr": 8.358549296970876e-05, "epoch": 0.6791044776119403, "percentage": 33.96, "elapsed_time": "0:05:38", "remaining_time": "0:10:58"}
|
92 |
+
{"current_steps": 92, "total_steps": 268, "loss": 0.0619, "lr": 8.309980315513444e-05, "epoch": 0.6865671641791045, "percentage": 34.33, "elapsed_time": "0:05:42", "remaining_time": "0:10:54"}
|
93 |
+
{"current_steps": 93, "total_steps": 268, "loss": 0.0607, "lr": 8.260848882760615e-05, "epoch": 0.6940298507462687, "percentage": 34.7, "elapsed_time": "0:05:45", "remaining_time": "0:10:50"}
|
94 |
+
{"current_steps": 94, "total_steps": 268, "loss": 0.0641, "lr": 8.211163347414003e-05, "epoch": 0.7014925373134329, "percentage": 35.07, "elapsed_time": "0:05:49", "remaining_time": "0:10:46"}
|
95 |
+
{"current_steps": 95, "total_steps": 268, "loss": 0.052, "lr": 8.160932152331586e-05, "epoch": 0.7089552238805971, "percentage": 35.45, "elapsed_time": "0:05:52", "remaining_time": "0:10:42"}
|
96 |
+
{"current_steps": 96, "total_steps": 268, "loss": 0.0642, "lr": 8.11016383309305e-05, "epoch": 0.7164179104477612, "percentage": 35.82, "elapsed_time": "0:05:56", "remaining_time": "0:10:38"}
|
97 |
+
{"current_steps": 97, "total_steps": 268, "loss": 0.0511, "lr": 8.058867016549372e-05, "epoch": 0.7238805970149254, "percentage": 36.19, "elapsed_time": "0:06:00", "remaining_time": "0:10:34"}
|
98 |
+
{"current_steps": 98, "total_steps": 268, "loss": 0.0639, "lr": 8.007050419356899e-05, "epoch": 0.7313432835820896, "percentage": 36.57, "elapsed_time": "0:06:03", "remaining_time": "0:10:30"}
|
99 |
+
{"current_steps": 99, "total_steps": 268, "loss": 0.0459, "lr": 7.95472284649615e-05, "epoch": 0.7388059701492538, "percentage": 36.94, "elapsed_time": "0:06:07", "remaining_time": "0:10:26"}
|
100 |
+
{"current_steps": 100, "total_steps": 268, "loss": 0.0511, "lr": 7.90189318977564e-05, "epoch": 0.746268656716418, "percentage": 37.31, "elapsed_time": "0:06:10", "remaining_time": "0:10:23"}
|
101 |
+
{"current_steps": 101, "total_steps": 268, "loss": 0.0628, "lr": 7.848570426320917e-05, "epoch": 0.753731343283582, "percentage": 37.69, "elapsed_time": "0:06:14", "remaining_time": "0:10:19"}
|
102 |
+
{"current_steps": 102, "total_steps": 268, "loss": 0.0703, "lr": 7.794763617049124e-05, "epoch": 0.7611940298507462, "percentage": 38.06, "elapsed_time": "0:06:18", "remaining_time": "0:10:15"}
|
103 |
+
{"current_steps": 103, "total_steps": 268, "loss": 0.077, "lr": 7.740481905129306e-05, "epoch": 0.7686567164179104, "percentage": 38.43, "elapsed_time": "0:06:21", "remaining_time": "0:10:11"}
|
104 |
+
{"current_steps": 104, "total_steps": 268, "loss": 0.0679, "lr": 7.685734514428766e-05, "epoch": 0.7761194029850746, "percentage": 38.81, "elapsed_time": "0:06:25", "remaining_time": "0:10:07"}
|
105 |
+
{"current_steps": 105, "total_steps": 268, "loss": 0.0437, "lr": 7.630530747945673e-05, "epoch": 0.7835820895522388, "percentage": 39.18, "elapsed_time": "0:06:28", "remaining_time": "0:10:03"}
|
106 |
+
{"current_steps": 106, "total_steps": 268, "loss": 0.0619, "lr": 7.574879986228245e-05, "epoch": 0.7910447761194029, "percentage": 39.55, "elapsed_time": "0:06:32", "remaining_time": "0:09:59"}
|
107 |
+
{"current_steps": 107, "total_steps": 268, "loss": 0.0584, "lr": 7.518791685780768e-05, "epoch": 0.7985074626865671, "percentage": 39.93, "elapsed_time": "0:06:36", "remaining_time": "0:09:55"}
|
108 |
+
{"current_steps": 108, "total_steps": 268, "loss": 0.071, "lr": 7.46227537745667e-05, "epoch": 0.8059701492537313, "percentage": 40.3, "elapsed_time": "0:06:39", "remaining_time": "0:09:52"}
|
109 |
+
{"current_steps": 109, "total_steps": 268, "loss": 0.0703, "lr": 7.405340664838993e-05, "epoch": 0.8134328358208955, "percentage": 40.67, "elapsed_time": "0:06:43", "remaining_time": "0:09:48"}
|
110 |
+
{"current_steps": 110, "total_steps": 268, "loss": 0.058, "lr": 7.347997222608492e-05, "epoch": 0.8208955223880597, "percentage": 41.04, "elapsed_time": "0:06:46", "remaining_time": "0:09:44"}
|
111 |
+
{"current_steps": 111, "total_steps": 268, "loss": 0.0476, "lr": 7.290254794899664e-05, "epoch": 0.8283582089552238, "percentage": 41.42, "elapsed_time": "0:06:50", "remaining_time": "0:09:40"}
|
112 |
+
{"current_steps": 112, "total_steps": 268, "loss": 0.0617, "lr": 7.232123193644957e-05, "epoch": 0.835820895522388, "percentage": 41.79, "elapsed_time": "0:06:54", "remaining_time": "0:09:36"}
|
113 |
+
{"current_steps": 113, "total_steps": 268, "loss": 0.0557, "lr": 7.173612296907472e-05, "epoch": 0.8432835820895522, "percentage": 42.16, "elapsed_time": "0:06:57", "remaining_time": "0:09:32"}
|
114 |
+
{"current_steps": 114, "total_steps": 268, "loss": 0.0461, "lr": 7.114732047202433e-05, "epoch": 0.8507462686567164, "percentage": 42.54, "elapsed_time": "0:07:01", "remaining_time": "0:09:29"}
|
115 |
+
{"current_steps": 115, "total_steps": 268, "loss": 0.0578, "lr": 7.055492449807684e-05, "epoch": 0.8582089552238806, "percentage": 42.91, "elapsed_time": "0:07:04", "remaining_time": "0:09:25"}
|
116 |
+
{"current_steps": 116, "total_steps": 268, "loss": 0.0559, "lr": 6.99590357106354e-05, "epoch": 0.8656716417910447, "percentage": 43.28, "elapsed_time": "0:07:08", "remaining_time": "0:09:21"}
|
117 |
+
{"current_steps": 117, "total_steps": 268, "loss": 0.0567, "lr": 6.935975536662253e-05, "epoch": 0.8731343283582089, "percentage": 43.66, "elapsed_time": "0:07:12", "remaining_time": "0:09:17"}
|
118 |
+
{"current_steps": 118, "total_steps": 268, "loss": 0.0711, "lr": 6.875718529927405e-05, "epoch": 0.8805970149253731, "percentage": 44.03, "elapsed_time": "0:07:15", "remaining_time": "0:09:13"}
|
119 |
+
{"current_steps": 119, "total_steps": 268, "loss": 0.0638, "lr": 6.815142790083472e-05, "epoch": 0.8880597014925373, "percentage": 44.4, "elapsed_time": "0:07:19", "remaining_time": "0:09:09"}
|
120 |
+
{"current_steps": 120, "total_steps": 268, "loss": 0.0592, "lr": 6.75425861051595e-05, "epoch": 0.8955223880597015, "percentage": 44.78, "elapsed_time": "0:07:22", "remaining_time": "0:09:06"}
|
121 |
+
{"current_steps": 121, "total_steps": 268, "loss": 0.0527, "lr": 6.693076337022211e-05, "epoch": 0.9029850746268657, "percentage": 45.15, "elapsed_time": "0:07:26", "remaining_time": "0:09:02"}
|
122 |
+
{"current_steps": 122, "total_steps": 268, "loss": 0.0619, "lr": 6.631606366053506e-05, "epoch": 0.9104477611940298, "percentage": 45.52, "elapsed_time": "0:07:30", "remaining_time": "0:08:58"}
|
123 |
+
{"current_steps": 123, "total_steps": 268, "loss": 0.0496, "lr": 6.569859142948328e-05, "epoch": 0.917910447761194, "percentage": 45.9, "elapsed_time": "0:07:33", "remaining_time": "0:08:54"}
|
124 |
+
{"current_steps": 124, "total_steps": 268, "loss": 0.069, "lr": 6.507845160157477e-05, "epoch": 0.9253731343283582, "percentage": 46.27, "elapsed_time": "0:07:37", "remaining_time": "0:08:50"}
|
125 |
+
{"current_steps": 125, "total_steps": 268, "loss": 0.0525, "lr": 6.445574955461134e-05, "epoch": 0.9328358208955224, "percentage": 46.64, "elapsed_time": "0:07:40", "remaining_time": "0:08:47"}
|
126 |
+
{"current_steps": 126, "total_steps": 268, "loss": 0.0652, "lr": 6.383059110178204e-05, "epoch": 0.9402985074626866, "percentage": 47.01, "elapsed_time": "0:07:44", "remaining_time": "0:08:43"}
|
127 |
+
{"current_steps": 127, "total_steps": 268, "loss": 0.0436, "lr": 6.320308247368286e-05, "epoch": 0.9477611940298507, "percentage": 47.39, "elapsed_time": "0:07:48", "remaining_time": "0:08:39"}
|
128 |
+
{"current_steps": 128, "total_steps": 268, "loss": 0.0778, "lr": 6.257333030026538e-05, "epoch": 0.9552238805970149, "percentage": 47.76, "elapsed_time": "0:07:51", "remaining_time": "0:08:35"}
|
129 |
+
{"current_steps": 129, "total_steps": 268, "loss": 0.0674, "lr": 6.194144159271756e-05, "epoch": 0.9626865671641791, "percentage": 48.13, "elapsed_time": "0:07:55", "remaining_time": "0:08:32"}
|
130 |
+
{"current_steps": 130, "total_steps": 268, "loss": 0.0556, "lr": 6.130752372527982e-05, "epoch": 0.9701492537313433, "percentage": 48.51, "elapsed_time": "0:07:58", "remaining_time": "0:08:28"}
|
131 |
+
{"current_steps": 131, "total_steps": 268, "loss": 0.0563, "lr": 6.0671684416999273e-05, "epoch": 0.9776119402985075, "percentage": 48.88, "elapsed_time": "0:08:02", "remaining_time": "0:08:24"}
|
132 |
+
{"current_steps": 132, "total_steps": 268, "loss": 0.0571, "lr": 6.003403171342563e-05, "epoch": 0.9850746268656716, "percentage": 49.25, "elapsed_time": "0:08:06", "remaining_time": "0:08:20"}
|
133 |
+
{"current_steps": 133, "total_steps": 268, "loss": 0.0688, "lr": 5.939467396825137e-05, "epoch": 0.9925373134328358, "percentage": 49.63, "elapsed_time": "0:08:09", "remaining_time": "0:08:16"}
|
134 |
+
{"current_steps": 134, "total_steps": 268, "loss": 0.0656, "lr": 5.875371982489959e-05, "epoch": 1.0, "percentage": 50.0, "elapsed_time": "0:08:13", "remaining_time": "0:08:13"}
|
135 |
+
{"current_steps": 135, "total_steps": 268, "loss": 0.0471, "lr": 5.811127819806277e-05, "epoch": 1.007462686567164, "percentage": 50.37, "elapsed_time": "0:08:16", "remaining_time": "0:08:09"}
|
136 |
+
{"current_steps": 136, "total_steps": 268, "loss": 0.0469, "lr": 5.7467458255195384e-05, "epoch": 1.0149253731343284, "percentage": 50.75, "elapsed_time": "0:08:20", "remaining_time": "0:08:05"}
|
137 |
+
{"current_steps": 137, "total_steps": 268, "loss": 0.0474, "lr": 5.682236939796337e-05, "epoch": 1.0223880597014925, "percentage": 51.12, "elapsed_time": "0:08:24", "remaining_time": "0:08:01"}
|
138 |
+
{"current_steps": 138, "total_steps": 268, "loss": 0.0564, "lr": 5.61761212436541e-05, "epoch": 1.0298507462686568, "percentage": 51.49, "elapsed_time": "0:08:27", "remaining_time": "0:07:58"}
|
139 |
+
{"current_steps": 139, "total_steps": 268, "loss": 0.0562, "lr": 5.55288236065495e-05, "epoch": 1.037313432835821, "percentage": 51.87, "elapsed_time": "0:08:31", "remaining_time": "0:07:54"}
|
140 |
+
{"current_steps": 140, "total_steps": 268, "loss": 0.0579, "lr": 5.488058647926577e-05, "epoch": 1.044776119402985, "percentage": 52.24, "elapsed_time": "0:08:34", "remaining_time": "0:07:50"}
|
141 |
+
{"current_steps": 141, "total_steps": 268, "loss": 0.0491, "lr": 5.423152001406282e-05, "epoch": 1.0522388059701493, "percentage": 52.61, "elapsed_time": "0:08:38", "remaining_time": "0:07:46"}
|
142 |
+
{"current_steps": 142, "total_steps": 268, "loss": 0.0422, "lr": 5.3581734504126494e-05, "epoch": 1.0597014925373134, "percentage": 52.99, "elapsed_time": "0:08:42", "remaining_time": "0:07:43"}
|
143 |
+
{"current_steps": 143, "total_steps": 268, "loss": 0.0685, "lr": 5.293134036482698e-05, "epoch": 1.0671641791044777, "percentage": 53.36, "elapsed_time": "0:08:45", "remaining_time": "0:07:39"}
|
144 |
+
{"current_steps": 144, "total_steps": 268, "loss": 0.0431, "lr": 5.2280448114956316e-05, "epoch": 1.0746268656716418, "percentage": 53.73, "elapsed_time": "0:08:49", "remaining_time": "0:07:35"}
|
145 |
+
{"current_steps": 145, "total_steps": 268, "loss": 0.0561, "lr": 5.1629168357948435e-05, "epoch": 1.0820895522388059, "percentage": 54.1, "elapsed_time": "0:08:52", "remaining_time": "0:07:31"}
|
146 |
+
{"current_steps": 146, "total_steps": 268, "loss": 0.0502, "lr": 5.097761176308471e-05, "epoch": 1.0895522388059702, "percentage": 54.48, "elapsed_time": "0:08:56", "remaining_time": "0:07:28"}
|
147 |
+
{"current_steps": 147, "total_steps": 268, "loss": 0.0532, "lr": 5.032588904668851e-05, "epoch": 1.0970149253731343, "percentage": 54.85, "elapsed_time": "0:09:00", "remaining_time": "0:07:24"}
|
148 |
+
{"current_steps": 148, "total_steps": 268, "loss": 0.0535, "lr": 4.967411095331149e-05, "epoch": 1.1044776119402986, "percentage": 55.22, "elapsed_time": "0:09:03", "remaining_time": "0:07:20"}
|
149 |
+
{"current_steps": 149, "total_steps": 268, "loss": 0.0554, "lr": 4.90223882369153e-05, "epoch": 1.1119402985074627, "percentage": 55.6, "elapsed_time": "0:09:07", "remaining_time": "0:07:17"}
|
150 |
+
{"current_steps": 150, "total_steps": 268, "loss": 0.0513, "lr": 4.837083164205159e-05, "epoch": 1.1194029850746268, "percentage": 55.97, "elapsed_time": "0:09:10", "remaining_time": "0:07:13"}
|
151 |
+
{"current_steps": 151, "total_steps": 268, "loss": 0.0624, "lr": 4.771955188504371e-05, "epoch": 1.126865671641791, "percentage": 56.34, "elapsed_time": "0:09:14", "remaining_time": "0:07:09"}
|
152 |
+
{"current_steps": 152, "total_steps": 268, "loss": 0.0565, "lr": 4.7068659635173026e-05, "epoch": 1.1343283582089552, "percentage": 56.72, "elapsed_time": "0:09:18", "remaining_time": "0:07:05"}
|
153 |
+
{"current_steps": 153, "total_steps": 268, "loss": 0.0547, "lr": 4.641826549587352e-05, "epoch": 1.1417910447761195, "percentage": 57.09, "elapsed_time": "0:09:21", "remaining_time": "0:07:02"}
|
154 |
+
{"current_steps": 154, "total_steps": 268, "loss": 0.0386, "lr": 4.57684799859372e-05, "epoch": 1.1492537313432836, "percentage": 57.46, "elapsed_time": "0:09:25", "remaining_time": "0:06:58"}
|
155 |
+
{"current_steps": 155, "total_steps": 268, "loss": 0.0602, "lr": 4.511941352073424e-05, "epoch": 1.1567164179104479, "percentage": 57.84, "elapsed_time": "0:09:28", "remaining_time": "0:06:54"}
|
156 |
+
{"current_steps": 156, "total_steps": 268, "loss": 0.0546, "lr": 4.447117639345052e-05, "epoch": 1.164179104477612, "percentage": 58.21, "elapsed_time": "0:09:32", "remaining_time": "0:06:50"}
|
157 |
+
{"current_steps": 157, "total_steps": 268, "loss": 0.0585, "lr": 4.382387875634591e-05, "epoch": 1.171641791044776, "percentage": 58.58, "elapsed_time": "0:09:36", "remaining_time": "0:06:47"}
|
158 |
+
{"current_steps": 158, "total_steps": 268, "loss": 0.0433, "lr": 4.317763060203664e-05, "epoch": 1.1791044776119404, "percentage": 58.96, "elapsed_time": "0:09:39", "remaining_time": "0:06:43"}
|
159 |
+
{"current_steps": 159, "total_steps": 268, "loss": 0.0479, "lr": 4.253254174480462e-05, "epoch": 1.1865671641791045, "percentage": 59.33, "elapsed_time": "0:09:43", "remaining_time": "0:06:39"}
|
160 |
+
{"current_steps": 160, "total_steps": 268, "loss": 0.0455, "lr": 4.188872180193723e-05, "epoch": 1.1940298507462686, "percentage": 59.7, "elapsed_time": "0:09:46", "remaining_time": "0:06:36"}
|
161 |
+
{"current_steps": 161, "total_steps": 268, "loss": 0.0598, "lr": 4.124628017510043e-05, "epoch": 1.2014925373134329, "percentage": 60.07, "elapsed_time": "0:09:50", "remaining_time": "0:06:32"}
|
162 |
+
{"current_steps": 162, "total_steps": 268, "loss": 0.0454, "lr": 4.0605326031748645e-05, "epoch": 1.208955223880597, "percentage": 60.45, "elapsed_time": "0:09:54", "remaining_time": "0:06:28"}
|
163 |
+
{"current_steps": 163, "total_steps": 268, "loss": 0.0486, "lr": 3.9965968286574376e-05, "epoch": 1.2164179104477613, "percentage": 60.82, "elapsed_time": "0:09:57", "remaining_time": "0:06:25"}
|
164 |
+
{"current_steps": 164, "total_steps": 268, "loss": 0.06, "lr": 3.932831558300074e-05, "epoch": 1.2238805970149254, "percentage": 61.19, "elapsed_time": "0:10:01", "remaining_time": "0:06:21"}
|
165 |
+
{"current_steps": 165, "total_steps": 268, "loss": 0.0423, "lr": 3.869247627472021e-05, "epoch": 1.2313432835820897, "percentage": 61.57, "elapsed_time": "0:10:04", "remaining_time": "0:06:17"}
|
166 |
+
{"current_steps": 166, "total_steps": 268, "loss": 0.0514, "lr": 3.8058558407282464e-05, "epoch": 1.2388059701492538, "percentage": 61.94, "elapsed_time": "0:10:08", "remaining_time": "0:06:13"}
|
167 |
+
{"current_steps": 167, "total_steps": 268, "loss": 0.0506, "lr": 3.742666969973463e-05, "epoch": 1.2462686567164178, "percentage": 62.31, "elapsed_time": "0:10:12", "remaining_time": "0:06:10"}
|
168 |
+
{"current_steps": 168, "total_steps": 268, "loss": 0.0607, "lr": 3.6796917526317156e-05, "epoch": 1.2537313432835822, "percentage": 62.69, "elapsed_time": "0:10:15", "remaining_time": "0:06:06"}
|
169 |
+
{"current_steps": 169, "total_steps": 268, "loss": 0.0613, "lr": 3.616940889821797e-05, "epoch": 1.2611940298507462, "percentage": 63.06, "elapsed_time": "0:10:19", "remaining_time": "0:06:02"}
|
170 |
+
{"current_steps": 170, "total_steps": 268, "loss": 0.045, "lr": 3.5544250445388675e-05, "epoch": 1.2686567164179103, "percentage": 63.43, "elapsed_time": "0:10:22", "remaining_time": "0:05:59"}
|
171 |
+
{"current_steps": 171, "total_steps": 268, "loss": 0.0484, "lr": 3.492154839842525e-05, "epoch": 1.2761194029850746, "percentage": 63.81, "elapsed_time": "0:10:26", "remaining_time": "0:05:55"}
|
172 |
+
{"current_steps": 172, "total_steps": 268, "loss": 0.0495, "lr": 3.430140857051675e-05, "epoch": 1.2835820895522387, "percentage": 64.18, "elapsed_time": "0:10:30", "remaining_time": "0:05:51"}
|
173 |
+
{"current_steps": 173, "total_steps": 268, "loss": 0.0565, "lr": 3.368393633946496e-05, "epoch": 1.291044776119403, "percentage": 64.55, "elapsed_time": "0:10:33", "remaining_time": "0:05:47"}
|
174 |
+
{"current_steps": 174, "total_steps": 268, "loss": 0.0536, "lr": 3.306923662977789e-05, "epoch": 1.2985074626865671, "percentage": 64.93, "elapsed_time": "0:10:37", "remaining_time": "0:05:44"}
|
175 |
+
{"current_steps": 175, "total_steps": 268, "loss": 0.0502, "lr": 3.245741389484052e-05, "epoch": 1.3059701492537314, "percentage": 65.3, "elapsed_time": "0:10:40", "remaining_time": "0:05:40"}
|
176 |
+
{"current_steps": 176, "total_steps": 268, "loss": 0.0432, "lr": 3.184857209916528e-05, "epoch": 1.3134328358208955, "percentage": 65.67, "elapsed_time": "0:10:44", "remaining_time": "0:05:36"}
|
177 |
+
{"current_steps": 177, "total_steps": 268, "loss": 0.0644, "lr": 3.124281470072597e-05, "epoch": 1.3208955223880596, "percentage": 66.04, "elapsed_time": "0:10:48", "remaining_time": "0:05:33"}
|
178 |
+
{"current_steps": 178, "total_steps": 268, "loss": 0.0484, "lr": 3.064024463337747e-05, "epoch": 1.328358208955224, "percentage": 66.42, "elapsed_time": "0:10:51", "remaining_time": "0:05:29"}
|
179 |
+
{"current_steps": 179, "total_steps": 268, "loss": 0.0417, "lr": 3.0040964289364616e-05, "epoch": 1.335820895522388, "percentage": 66.79, "elapsed_time": "0:10:55", "remaining_time": "0:05:25"}
|
180 |
+
{"current_steps": 180, "total_steps": 268, "loss": 0.0682, "lr": 2.944507550192318e-05, "epoch": 1.3432835820895521, "percentage": 67.16, "elapsed_time": "0:10:58", "remaining_time": "0:05:22"}
|
181 |
+
{"current_steps": 181, "total_steps": 268, "loss": 0.042, "lr": 2.885267952797569e-05, "epoch": 1.3507462686567164, "percentage": 67.54, "elapsed_time": "0:11:02", "remaining_time": "0:05:18"}
|
182 |
+
{"current_steps": 182, "total_steps": 268, "loss": 0.0555, "lr": 2.8263877030925277e-05, "epoch": 1.3582089552238805, "percentage": 67.91, "elapsed_time": "0:11:05", "remaining_time": "0:05:14"}
|
183 |
+
{"current_steps": 183, "total_steps": 268, "loss": 0.058, "lr": 2.7678768063550452e-05, "epoch": 1.3656716417910448, "percentage": 68.28, "elapsed_time": "0:11:09", "remaining_time": "0:05:11"}
|
184 |
+
{"current_steps": 184, "total_steps": 268, "loss": 0.0565, "lr": 2.7097452051003375e-05, "epoch": 1.373134328358209, "percentage": 68.66, "elapsed_time": "0:11:13", "remaining_time": "0:05:07"}
|
185 |
+
{"current_steps": 185, "total_steps": 268, "loss": 0.0347, "lr": 2.6520027773915075e-05, "epoch": 1.3805970149253732, "percentage": 69.03, "elapsed_time": "0:11:16", "remaining_time": "0:05:03"}
|
186 |
+
{"current_steps": 186, "total_steps": 268, "loss": 0.0455, "lr": 2.5946593351610082e-05, "epoch": 1.3880597014925373, "percentage": 69.4, "elapsed_time": "0:11:20", "remaining_time": "0:04:59"}
|
187 |
+
{"current_steps": 187, "total_steps": 268, "loss": 0.0538, "lr": 2.5377246225433303e-05, "epoch": 1.3955223880597014, "percentage": 69.78, "elapsed_time": "0:11:23", "remaining_time": "0:04:56"}
|
188 |
+
{"current_steps": 188, "total_steps": 268, "loss": 0.0431, "lr": 2.4812083142192328e-05, "epoch": 1.4029850746268657, "percentage": 70.15, "elapsed_time": "0:11:27", "remaining_time": "0:04:52"}
|
189 |
+
{"current_steps": 189, "total_steps": 268, "loss": 0.0528, "lr": 2.4251200137717544e-05, "epoch": 1.4104477611940298, "percentage": 70.52, "elapsed_time": "0:11:31", "remaining_time": "0:04:48"}
|
190 |
+
{"current_steps": 190, "total_steps": 268, "loss": 0.0451, "lr": 2.3694692520543295e-05, "epoch": 1.417910447761194, "percentage": 70.9, "elapsed_time": "0:11:34", "remaining_time": "0:04:45"}
|
191 |
+
{"current_steps": 191, "total_steps": 268, "loss": 0.0389, "lr": 2.3142654855712354e-05, "epoch": 1.4253731343283582, "percentage": 71.27, "elapsed_time": "0:11:38", "remaining_time": "0:04:41"}
|
192 |
+
{"current_steps": 192, "total_steps": 268, "loss": 0.0328, "lr": 2.259518094870693e-05, "epoch": 1.4328358208955223, "percentage": 71.64, "elapsed_time": "0:11:42", "remaining_time": "0:04:37"}
|
193 |
+
{"current_steps": 193, "total_steps": 268, "loss": 0.0556, "lr": 2.2052363829508775e-05, "epoch": 1.4402985074626866, "percentage": 72.01, "elapsed_time": "0:11:45", "remaining_time": "0:04:34"}
|
194 |
+
{"current_steps": 194, "total_steps": 268, "loss": 0.0504, "lr": 2.151429573679084e-05, "epoch": 1.4477611940298507, "percentage": 72.39, "elapsed_time": "0:11:49", "remaining_time": "0:04:30"}
|
195 |
+
{"current_steps": 195, "total_steps": 268, "loss": 0.0518, "lr": 2.0981068102243616e-05, "epoch": 1.455223880597015, "percentage": 72.76, "elapsed_time": "0:11:53", "remaining_time": "0:04:26"}
|
196 |
+
{"current_steps": 196, "total_steps": 268, "loss": 0.0588, "lr": 2.0452771535038518e-05, "epoch": 1.462686567164179, "percentage": 73.13, "elapsed_time": "0:11:56", "remaining_time": "0:04:23"}
|
197 |
+
{"current_steps": 197, "total_steps": 268, "loss": 0.0464, "lr": 1.9929495806431025e-05, "epoch": 1.4701492537313432, "percentage": 73.51, "elapsed_time": "0:12:00", "remaining_time": "0:04:19"}
|
198 |
+
{"current_steps": 198, "total_steps": 268, "loss": 0.0559, "lr": 1.9411329834506286e-05, "epoch": 1.4776119402985075, "percentage": 73.88, "elapsed_time": "0:12:04", "remaining_time": "0:04:16"}
|
199 |
+
{"current_steps": 199, "total_steps": 268, "loss": 0.0502, "lr": 1.8898361669069497e-05, "epoch": 1.4850746268656716, "percentage": 74.25, "elapsed_time": "0:12:07", "remaining_time": "0:04:12"}
|
200 |
+
{"current_steps": 200, "total_steps": 268, "loss": 0.045, "lr": 1.8390678476684142e-05, "epoch": 1.4925373134328357, "percentage": 74.63, "elapsed_time": "0:12:11", "remaining_time": "0:04:08"}
|
201 |
+
{"current_steps": 200, "total_steps": 268, "eval_loss": 0.05160621926188469, "epoch": 1.4925373134328357, "percentage": 74.63, "elapsed_time": "0:12:14", "remaining_time": "0:04:09"}
|
202 |
+
{"current_steps": 201, "total_steps": 268, "loss": 0.0566, "lr": 1.7888366525859968e-05, "epoch": 1.5, "percentage": 75.0, "elapsed_time": "0:12:17", "remaining_time": "0:04:05"}
|
203 |
+
{"current_steps": 202, "total_steps": 268, "loss": 0.0477, "lr": 1.739151117239385e-05, "epoch": 1.5074626865671643, "percentage": 75.37, "elapsed_time": "0:12:21", "remaining_time": "0:04:02"}
|
204 |
+
{"current_steps": 203, "total_steps": 268, "loss": 0.0533, "lr": 1.6900196844865573e-05, "epoch": 1.5149253731343284, "percentage": 75.75, "elapsed_time": "0:12:24", "remaining_time": "0:03:58"}
|
205 |
+
{"current_steps": 204, "total_steps": 268, "loss": 0.0519, "lr": 1.641450703029125e-05, "epoch": 1.5223880597014925, "percentage": 76.12, "elapsed_time": "0:12:28", "remaining_time": "0:03:54"}
|
206 |
+
{"current_steps": 205, "total_steps": 268, "loss": 0.046, "lr": 1.5934524259936756e-05, "epoch": 1.5298507462686568, "percentage": 76.49, "elapsed_time": "0:12:32", "remaining_time": "0:03:51"}
|
207 |
+
{"current_steps": 206, "total_steps": 268, "loss": 0.0498, "lr": 1.5460330095293447e-05, "epoch": 1.537313432835821, "percentage": 76.87, "elapsed_time": "0:12:36", "remaining_time": "0:03:47"}
|
208 |
+
{"current_steps": 207, "total_steps": 268, "loss": 0.0497, "lr": 1.4992005114218805e-05, "epoch": 1.544776119402985, "percentage": 77.24, "elapsed_time": "0:12:39", "remaining_time": "0:03:43"}
|
209 |
+
{"current_steps": 208, "total_steps": 268, "loss": 0.0495, "lr": 1.4529628897244212e-05, "epoch": 1.5522388059701493, "percentage": 77.61, "elapsed_time": "0:12:43", "remaining_time": "0:03:40"}
|
210 |
+
{"current_steps": 209, "total_steps": 268, "loss": 0.0514, "lr": 1.4073280014052077e-05, "epoch": 1.5597014925373134, "percentage": 77.99, "elapsed_time": "0:12:46", "remaining_time": "0:03:36"}
|
211 |
+
{"current_steps": 210, "total_steps": 268, "loss": 0.044, "lr": 1.3623036010124846e-05, "epoch": 1.5671641791044775, "percentage": 78.36, "elapsed_time": "0:12:50", "remaining_time": "0:03:32"}
|
212 |
+
{"current_steps": 211, "total_steps": 268, "loss": 0.0529, "lr": 1.3178973393568057e-05, "epoch": 1.5746268656716418, "percentage": 78.73, "elapsed_time": "0:12:53", "remaining_time": "0:03:29"}
|
213 |
+
{"current_steps": 212, "total_steps": 268, "loss": 0.047, "lr": 1.2741167622109556e-05, "epoch": 1.582089552238806, "percentage": 79.1, "elapsed_time": "0:12:57", "remaining_time": "0:03:25"}
|
214 |
+
{"current_steps": 213, "total_steps": 268, "loss": 0.0476, "lr": 1.230969309027739e-05, "epoch": 1.5895522388059702, "percentage": 79.48, "elapsed_time": "0:13:01", "remaining_time": "0:03:21"}
|
215 |
+
{"current_steps": 214, "total_steps": 268, "loss": 0.0539, "lr": 1.1884623116758121e-05, "epoch": 1.5970149253731343, "percentage": 79.85, "elapsed_time": "0:13:04", "remaining_time": "0:03:18"}
|
216 |
+
{"current_steps": 215, "total_steps": 268, "loss": 0.0576, "lr": 1.1466029931938182e-05, "epoch": 1.6044776119402986, "percentage": 80.22, "elapsed_time": "0:13:08", "remaining_time": "0:03:14"}
|
217 |
+
{"current_steps": 216, "total_steps": 268, "loss": 0.0449, "lr": 1.1053984665630024e-05, "epoch": 1.6119402985074627, "percentage": 80.6, "elapsed_time": "0:13:11", "remaining_time": "0:03:10"}
|
218 |
+
{"current_steps": 217, "total_steps": 268, "loss": 0.0589, "lr": 1.0648557334985309e-05, "epoch": 1.6194029850746268, "percentage": 80.97, "elapsed_time": "0:13:15", "remaining_time": "0:03:06"}
|
219 |
+
{"current_steps": 218, "total_steps": 268, "loss": 0.0549, "lr": 1.024981683259723e-05, "epoch": 1.626865671641791, "percentage": 81.34, "elapsed_time": "0:13:19", "remaining_time": "0:03:03"}
|
220 |
+
{"current_steps": 219, "total_steps": 268, "loss": 0.0537, "lr": 9.857830914793826e-06, "epoch": 1.6343283582089554, "percentage": 81.72, "elapsed_time": "0:13:22", "remaining_time": "0:02:59"}
|
221 |
+
{"current_steps": 220, "total_steps": 268, "loss": 0.0436, "lr": 9.472666190124457e-06, "epoch": 1.6417910447761193, "percentage": 82.09, "elapsed_time": "0:13:26", "remaining_time": "0:02:55"}
|
222 |
+
{"current_steps": 221, "total_steps": 268, "loss": 0.0571, "lr": 9.094388108041302e-06, "epoch": 1.6492537313432836, "percentage": 82.46, "elapsed_time": "0:13:29", "remaining_time": "0:02:52"}
|
223 |
+
{"current_steps": 222, "total_steps": 268, "loss": 0.0516, "lr": 8.723060947777777e-06, "epoch": 1.6567164179104479, "percentage": 82.84, "elapsed_time": "0:13:33", "remaining_time": "0:02:48"}
|
224 |
+
{"current_steps": 223, "total_steps": 268, "loss": 0.0501, "lr": 8.358747807425826e-06, "epoch": 1.664179104477612, "percentage": 83.21, "elapsed_time": "0:13:37", "remaining_time": "0:02:44"}
|
225 |
+
{"current_steps": 224, "total_steps": 268, "loss": 0.0531, "lr": 8.001510593213946e-06, "epoch": 1.671641791044776, "percentage": 83.58, "elapsed_time": "0:13:40", "remaining_time": "0:02:41"}
|
226 |
+
{"current_steps": 225, "total_steps": 268, "loss": 0.0564, "lr": 7.651410008987697e-06, "epoch": 1.6791044776119404, "percentage": 83.96, "elapsed_time": "0:13:44", "remaining_time": "0:02:37"}
|
227 |
+
{"current_steps": 226, "total_steps": 268, "loss": 0.0459, "lr": 7.308505545894567e-06, "epoch": 1.6865671641791045, "percentage": 84.33, "elapsed_time": "0:13:47", "remaining_time": "0:02:33"}
|
228 |
+
{"current_steps": 227, "total_steps": 268, "loss": 0.0539, "lr": 6.972855472274853e-06, "epoch": 1.6940298507462686, "percentage": 84.7, "elapsed_time": "0:13:51", "remaining_time": "0:02:30"}
|
229 |
+
{"current_steps": 228, "total_steps": 268, "loss": 0.0583, "lr": 6.6445168237604385e-06, "epoch": 1.7014925373134329, "percentage": 85.07, "elapsed_time": "0:13:55", "remaining_time": "0:02:26"}
|
230 |
+
{"current_steps": 229, "total_steps": 268, "loss": 0.0526, "lr": 6.323545393582847e-06, "epoch": 1.7089552238805972, "percentage": 85.45, "elapsed_time": "0:13:58", "remaining_time": "0:02:22"}
|
231 |
+
{"current_steps": 230, "total_steps": 268, "loss": 0.051, "lr": 6.009995723092654e-06, "epoch": 1.716417910447761, "percentage": 85.82, "elapsed_time": "0:14:02", "remaining_time": "0:02:19"}
|
232 |
+
{"current_steps": 231, "total_steps": 268, "loss": 0.0546, "lr": 5.703921092491393e-06, "epoch": 1.7238805970149254, "percentage": 86.19, "elapsed_time": "0:14:05", "remaining_time": "0:02:15"}
|
233 |
+
{"current_steps": 232, "total_steps": 268, "loss": 0.0624, "lr": 5.405373511777939e-06, "epoch": 1.7313432835820897, "percentage": 86.57, "elapsed_time": "0:14:09", "remaining_time": "0:02:11"}
|
234 |
+
{"current_steps": 233, "total_steps": 268, "loss": 0.0565, "lr": 5.114403711910632e-06, "epoch": 1.7388059701492538, "percentage": 86.94, "elapsed_time": "0:14:13", "remaining_time": "0:02:08"}
|
235 |
+
{"current_steps": 234, "total_steps": 268, "loss": 0.0569, "lr": 4.8310611361867875e-06, "epoch": 1.7462686567164178, "percentage": 87.31, "elapsed_time": "0:14:16", "remaining_time": "0:02:04"}
|
236 |
+
{"current_steps": 235, "total_steps": 268, "loss": 0.0457, "lr": 4.555393931841001e-06, "epoch": 1.7537313432835822, "percentage": 87.69, "elapsed_time": "0:14:20", "remaining_time": "0:02:00"}
|
237 |
+
{"current_steps": 236, "total_steps": 268, "loss": 0.0462, "lr": 4.287448941863692e-06, "epoch": 1.7611940298507462, "percentage": 88.06, "elapsed_time": "0:14:23", "remaining_time": "0:01:57"}
|
238 |
+
{"current_steps": 237, "total_steps": 268, "loss": 0.0497, "lr": 4.027271697041252e-06, "epoch": 1.7686567164179103, "percentage": 88.43, "elapsed_time": "0:14:27", "remaining_time": "0:01:53"}
|
239 |
+
{"current_steps": 238, "total_steps": 268, "loss": 0.0432, "lr": 3.7749064082191977e-06, "epoch": 1.7761194029850746, "percentage": 88.81, "elapsed_time": "0:14:31", "remaining_time": "0:01:49"}
|
240 |
+
{"current_steps": 239, "total_steps": 268, "loss": 0.0523, "lr": 3.5303959587895898e-06, "epoch": 1.783582089552239, "percentage": 89.18, "elapsed_time": "0:14:34", "remaining_time": "0:01:46"}
|
241 |
+
{"current_steps": 240, "total_steps": 268, "loss": 0.0636, "lr": 3.2937818974040635e-06, "epoch": 1.7910447761194028, "percentage": 89.55, "elapsed_time": "0:14:38", "remaining_time": "0:01:42"}
|
242 |
+
{"current_steps": 241, "total_steps": 268, "loss": 0.058, "lr": 3.065104430913601e-06, "epoch": 1.7985074626865671, "percentage": 89.93, "elapsed_time": "0:14:41", "remaining_time": "0:01:38"}
|
243 |
+
{"current_steps": 242, "total_steps": 268, "loss": 0.0521, "lr": 2.844402417536374e-06, "epoch": 1.8059701492537314, "percentage": 90.3, "elapsed_time": "0:14:45", "remaining_time": "0:01:35"}
|
244 |
+
{"current_steps": 243, "total_steps": 268, "loss": 0.0533, "lr": 2.631713360254734e-06, "epoch": 1.8134328358208955, "percentage": 90.67, "elapsed_time": "0:14:49", "remaining_time": "0:01:31"}
|
245 |
+
{"current_steps": 244, "total_steps": 268, "loss": 0.043, "lr": 2.4270734004424643e-06, "epoch": 1.8208955223880596, "percentage": 91.04, "elapsed_time": "0:14:52", "remaining_time": "0:01:27"}
|
246 |
+
{"current_steps": 245, "total_steps": 268, "loss": 0.0552, "lr": 2.2305173117234236e-06, "epoch": 1.828358208955224, "percentage": 91.42, "elapsed_time": "0:14:56", "remaining_time": "0:01:24"}
|
247 |
+
{"current_steps": 246, "total_steps": 268, "loss": 0.0474, "lr": 2.0420784940626157e-06, "epoch": 1.835820895522388, "percentage": 91.79, "elapsed_time": "0:14:59", "remaining_time": "0:01:20"}
|
248 |
+
{"current_steps": 247, "total_steps": 268, "loss": 0.0511, "lr": 1.861788968090683e-06, "epoch": 1.8432835820895521, "percentage": 92.16, "elapsed_time": "0:15:03", "remaining_time": "0:01:16"}
|
249 |
+
{"current_steps": 248, "total_steps": 268, "loss": 0.047, "lr": 1.68967936966275e-06, "epoch": 1.8507462686567164, "percentage": 92.54, "elapsed_time": "0:15:07", "remaining_time": "0:01:13"}
|
250 |
+
{"current_steps": 249, "total_steps": 268, "loss": 0.046, "lr": 1.5257789446526172e-06, "epoch": 1.8582089552238807, "percentage": 92.91, "elapsed_time": "0:15:10", "remaining_time": "0:01:09"}
|
251 |
+
{"current_steps": 250, "total_steps": 268, "loss": 0.0457, "lr": 1.3701155439831249e-06, "epoch": 1.8656716417910446, "percentage": 93.28, "elapsed_time": "0:15:14", "remaining_time": "0:01:05"}
|
252 |
+
{"current_steps": 251, "total_steps": 268, "loss": 0.0499, "lr": 1.222715618893555e-06, "epoch": 1.873134328358209, "percentage": 93.66, "elapsed_time": "0:15:17", "remaining_time": "0:01:02"}
|
253 |
+
{"current_steps": 252, "total_steps": 268, "loss": 0.044, "lr": 1.0836042164448945e-06, "epoch": 1.8805970149253732, "percentage": 94.03, "elapsed_time": "0:15:21", "remaining_time": "0:00:58"}
|
254 |
+
{"current_steps": 253, "total_steps": 268, "loss": 0.0422, "lr": 9.528049752636714e-07, "epoch": 1.8880597014925373, "percentage": 94.4, "elapsed_time": "0:15:25", "remaining_time": "0:00:54"}
|
255 |
+
{"current_steps": 254, "total_steps": 268, "loss": 0.0671, "lr": 8.303401215251583e-07, "epoch": 1.8955223880597014, "percentage": 94.78, "elapsed_time": "0:15:28", "remaining_time": "0:00:51"}
|
256 |
+
{"current_steps": 255, "total_steps": 268, "loss": 0.0405, "lr": 7.16230465176565e-07, "epoch": 1.9029850746268657, "percentage": 95.15, "elapsed_time": "0:15:32", "remaining_time": "0:00:47"}
|
257 |
+
{"current_steps": 256, "total_steps": 268, "loss": 0.0425, "lr": 6.104953964008897e-07, "epoch": 1.9104477611940298, "percentage": 95.52, "elapsed_time": "0:15:35", "remaining_time": "0:00:43"}
|
258 |
+
{"current_steps": 257, "total_steps": 268, "loss": 0.0518, "lr": 5.131528823220099e-07, "epoch": 1.917910447761194, "percentage": 95.9, "elapsed_time": "0:15:39", "remaining_time": "0:00:40"}
|
259 |
+
{"current_steps": 258, "total_steps": 268, "loss": 0.0526, "lr": 4.242194639516417e-07, "epoch": 1.9253731343283582, "percentage": 96.27, "elapsed_time": "0:15:43", "remaining_time": "0:00:36"}
|
260 |
+
{"current_steps": 259, "total_steps": 268, "loss": 0.0495, "lr": 3.4371025337855413e-07, "epoch": 1.9328358208955225, "percentage": 96.64, "elapsed_time": "0:15:46", "remaining_time": "0:00:32"}
|
261 |
+
{"current_steps": 260, "total_steps": 268, "loss": 0.036, "lr": 2.7163893120066285e-07, "epoch": 1.9402985074626866, "percentage": 97.01, "elapsed_time": "0:15:50", "remaining_time": "0:00:29"}
|
262 |
+
{"current_steps": 261, "total_steps": 268, "loss": 0.0519, "lr": 2.0801774420031173e-07, "epoch": 1.9477611940298507, "percentage": 97.39, "elapsed_time": "0:15:53", "remaining_time": "0:00:25"}
|
263 |
+
{"current_steps": 262, "total_steps": 268, "loss": 0.0447, "lr": 1.5285750326325954e-07, "epoch": 1.955223880597015, "percentage": 97.76, "elapsed_time": "0:15:57", "remaining_time": "0:00:21"}
|
264 |
+
{"current_steps": 263, "total_steps": 268, "loss": 0.0567, "lr": 1.0616758154161632e-07, "epoch": 1.962686567164179, "percentage": 98.13, "elapsed_time": "0:16:01", "remaining_time": "0:00:18"}
|
265 |
+
{"current_steps": 264, "total_steps": 268, "loss": 0.0612, "lr": 6.795591286109515e-08, "epoch": 1.9701492537313432, "percentage": 98.51, "elapsed_time": "0:16:04", "remaining_time": "0:00:14"}
|
266 |
+
{"current_steps": 265, "total_steps": 268, "loss": 0.0481, "lr": 3.822899037286276e-08, "epoch": 1.9776119402985075, "percentage": 98.88, "elapsed_time": "0:16:08", "remaining_time": "0:00:10"}
|
267 |
+
{"current_steps": 266, "total_steps": 268, "loss": 0.0534, "lr": 1.6991865450188827e-08, "epoch": 1.9850746268656716, "percentage": 99.25, "elapsed_time": "0:16:11", "remaining_time": "0:00:07"}
|
268 |
+
{"current_steps": 267, "total_steps": 268, "loss": 0.0647, "lr": 4.248146830060362e-09, "epoch": 1.9925373134328357, "percentage": 99.63, "elapsed_time": "0:16:15", "remaining_time": "0:00:03"}
|
269 |
+
{"current_steps": 268, "total_steps": 268, "loss": 0.0434, "lr": 0.0, "epoch": 2.0, "percentage": 100.0, "elapsed_time": "0:16:19", "remaining_time": "0:00:00"}
|
270 |
+
{"current_steps": 268, "total_steps": 268, "epoch": 2.0, "percentage": 100.0, "elapsed_time": "0:16:38", "remaining_time": "0:00:00"}
|
trainer_state.json
ADDED
@@ -0,0 +1,1926 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": null,
|
3 |
+
"best_model_checkpoint": null,
|
4 |
+
"epoch": 2.0,
|
5 |
+
"eval_steps": 200,
|
6 |
+
"global_step": 268,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.007462686567164179,
|
13 |
+
"grad_norm": 0.052013083033399146,
|
14 |
+
"learning_rate": 3.7037037037037037e-06,
|
15 |
+
"loss": 0.1102,
|
16 |
+
"step": 1
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"epoch": 0.014925373134328358,
|
20 |
+
"grad_norm": 0.04863489399680276,
|
21 |
+
"learning_rate": 7.4074074074074075e-06,
|
22 |
+
"loss": 0.0782,
|
23 |
+
"step": 2
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 0.022388059701492536,
|
27 |
+
"grad_norm": 0.04650403014200427,
|
28 |
+
"learning_rate": 1.1111111111111112e-05,
|
29 |
+
"loss": 0.0887,
|
30 |
+
"step": 3
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"epoch": 0.029850746268656716,
|
34 |
+
"grad_norm": 0.06016511021899447,
|
35 |
+
"learning_rate": 1.4814814814814815e-05,
|
36 |
+
"loss": 0.0997,
|
37 |
+
"step": 4
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 0.03731343283582089,
|
41 |
+
"grad_norm": 0.059950159945642095,
|
42 |
+
"learning_rate": 1.8518518518518518e-05,
|
43 |
+
"loss": 0.1068,
|
44 |
+
"step": 5
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"epoch": 0.04477611940298507,
|
48 |
+
"grad_norm": 0.06801325365135043,
|
49 |
+
"learning_rate": 2.2222222222222223e-05,
|
50 |
+
"loss": 0.1309,
|
51 |
+
"step": 6
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.05223880597014925,
|
55 |
+
"grad_norm": 0.07378383041901229,
|
56 |
+
"learning_rate": 2.5925925925925925e-05,
|
57 |
+
"loss": 0.1307,
|
58 |
+
"step": 7
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"epoch": 0.05970149253731343,
|
62 |
+
"grad_norm": 0.05285711413560543,
|
63 |
+
"learning_rate": 2.962962962962963e-05,
|
64 |
+
"loss": 0.088,
|
65 |
+
"step": 8
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"epoch": 0.06716417910447761,
|
69 |
+
"grad_norm": 0.06235445122453932,
|
70 |
+
"learning_rate": 3.3333333333333335e-05,
|
71 |
+
"loss": 0.0892,
|
72 |
+
"step": 9
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"epoch": 0.07462686567164178,
|
76 |
+
"grad_norm": 0.07203989250824563,
|
77 |
+
"learning_rate": 3.7037037037037037e-05,
|
78 |
+
"loss": 0.0853,
|
79 |
+
"step": 10
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"epoch": 0.08208955223880597,
|
83 |
+
"grad_norm": 0.05303096394664147,
|
84 |
+
"learning_rate": 4.074074074074074e-05,
|
85 |
+
"loss": 0.0697,
|
86 |
+
"step": 11
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"epoch": 0.08955223880597014,
|
90 |
+
"grad_norm": 0.0668663305840225,
|
91 |
+
"learning_rate": 4.4444444444444447e-05,
|
92 |
+
"loss": 0.0721,
|
93 |
+
"step": 12
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.09701492537313433,
|
97 |
+
"grad_norm": 0.09237909483778411,
|
98 |
+
"learning_rate": 4.814814814814815e-05,
|
99 |
+
"loss": 0.0991,
|
100 |
+
"step": 13
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"epoch": 0.1044776119402985,
|
104 |
+
"grad_norm": 0.058025150346422645,
|
105 |
+
"learning_rate": 5.185185185185185e-05,
|
106 |
+
"loss": 0.0857,
|
107 |
+
"step": 14
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"epoch": 0.11194029850746269,
|
111 |
+
"grad_norm": 0.06966685338541448,
|
112 |
+
"learning_rate": 5.555555555555556e-05,
|
113 |
+
"loss": 0.0873,
|
114 |
+
"step": 15
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 0.11940298507462686,
|
118 |
+
"grad_norm": 0.05479814780024978,
|
119 |
+
"learning_rate": 5.925925925925926e-05,
|
120 |
+
"loss": 0.0704,
|
121 |
+
"step": 16
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"epoch": 0.12686567164179105,
|
125 |
+
"grad_norm": 0.0440215115227343,
|
126 |
+
"learning_rate": 6.296296296296296e-05,
|
127 |
+
"loss": 0.0646,
|
128 |
+
"step": 17
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"epoch": 0.13432835820895522,
|
132 |
+
"grad_norm": 0.04983222050041505,
|
133 |
+
"learning_rate": 6.666666666666667e-05,
|
134 |
+
"loss": 0.0586,
|
135 |
+
"step": 18
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.1417910447761194,
|
139 |
+
"grad_norm": 0.05341182048539008,
|
140 |
+
"learning_rate": 7.037037037037038e-05,
|
141 |
+
"loss": 0.0728,
|
142 |
+
"step": 19
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"epoch": 0.14925373134328357,
|
146 |
+
"grad_norm": 0.062352625208864434,
|
147 |
+
"learning_rate": 7.407407407407407e-05,
|
148 |
+
"loss": 0.0659,
|
149 |
+
"step": 20
|
150 |
+
},
|
151 |
+
{
|
152 |
+
"epoch": 0.15671641791044777,
|
153 |
+
"grad_norm": 0.06961862010819499,
|
154 |
+
"learning_rate": 7.777777777777778e-05,
|
155 |
+
"loss": 0.0563,
|
156 |
+
"step": 21
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"epoch": 0.16417910447761194,
|
160 |
+
"grad_norm": 0.07349025149953163,
|
161 |
+
"learning_rate": 8.148148148148148e-05,
|
162 |
+
"loss": 0.0698,
|
163 |
+
"step": 22
|
164 |
+
},
|
165 |
+
{
|
166 |
+
"epoch": 0.17164179104477612,
|
167 |
+
"grad_norm": 0.08912963906493375,
|
168 |
+
"learning_rate": 8.518518518518518e-05,
|
169 |
+
"loss": 0.0731,
|
170 |
+
"step": 23
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"epoch": 0.1791044776119403,
|
174 |
+
"grad_norm": 0.07771573512531293,
|
175 |
+
"learning_rate": 8.888888888888889e-05,
|
176 |
+
"loss": 0.0601,
|
177 |
+
"step": 24
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.1865671641791045,
|
181 |
+
"grad_norm": 0.07375666387698736,
|
182 |
+
"learning_rate": 9.25925925925926e-05,
|
183 |
+
"loss": 0.0598,
|
184 |
+
"step": 25
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"epoch": 0.19402985074626866,
|
188 |
+
"grad_norm": 0.06387352388289207,
|
189 |
+
"learning_rate": 9.62962962962963e-05,
|
190 |
+
"loss": 0.063,
|
191 |
+
"step": 26
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"epoch": 0.20149253731343283,
|
195 |
+
"grad_norm": 0.05770123465757898,
|
196 |
+
"learning_rate": 0.0001,
|
197 |
+
"loss": 0.0605,
|
198 |
+
"step": 27
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"epoch": 0.208955223880597,
|
202 |
+
"grad_norm": 0.054048084601145345,
|
203 |
+
"learning_rate": 9.999575185316994e-05,
|
204 |
+
"loss": 0.0628,
|
205 |
+
"step": 28
|
206 |
+
},
|
207 |
+
{
|
208 |
+
"epoch": 0.21641791044776118,
|
209 |
+
"grad_norm": 0.05890919532309351,
|
210 |
+
"learning_rate": 9.998300813454982e-05,
|
211 |
+
"loss": 0.0686,
|
212 |
+
"step": 29
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"epoch": 0.22388059701492538,
|
216 |
+
"grad_norm": 0.06510115188767437,
|
217 |
+
"learning_rate": 9.996177100962714e-05,
|
218 |
+
"loss": 0.0762,
|
219 |
+
"step": 30
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 0.23134328358208955,
|
223 |
+
"grad_norm": 0.053063431559380404,
|
224 |
+
"learning_rate": 9.99320440871389e-05,
|
225 |
+
"loss": 0.0839,
|
226 |
+
"step": 31
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"epoch": 0.23880597014925373,
|
230 |
+
"grad_norm": 0.04538349240796014,
|
231 |
+
"learning_rate": 9.989383241845838e-05,
|
232 |
+
"loss": 0.0654,
|
233 |
+
"step": 32
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"epoch": 0.2462686567164179,
|
237 |
+
"grad_norm": 0.03539564549356966,
|
238 |
+
"learning_rate": 9.984714249673675e-05,
|
239 |
+
"loss": 0.0496,
|
240 |
+
"step": 33
|
241 |
+
},
|
242 |
+
{
|
243 |
+
"epoch": 0.2537313432835821,
|
244 |
+
"grad_norm": 0.040024950601528136,
|
245 |
+
"learning_rate": 9.979198225579968e-05,
|
246 |
+
"loss": 0.064,
|
247 |
+
"step": 34
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"epoch": 0.26119402985074625,
|
251 |
+
"grad_norm": 0.06948825968384087,
|
252 |
+
"learning_rate": 9.972836106879935e-05,
|
253 |
+
"loss": 0.0899,
|
254 |
+
"step": 35
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"epoch": 0.26865671641791045,
|
258 |
+
"grad_norm": 0.03641864684712568,
|
259 |
+
"learning_rate": 9.965628974662144e-05,
|
260 |
+
"loss": 0.0535,
|
261 |
+
"step": 36
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"epoch": 0.27611940298507465,
|
265 |
+
"grad_norm": 0.048210194169089476,
|
266 |
+
"learning_rate": 9.957578053604837e-05,
|
267 |
+
"loss": 0.0582,
|
268 |
+
"step": 37
|
269 |
+
},
|
270 |
+
{
|
271 |
+
"epoch": 0.2835820895522388,
|
272 |
+
"grad_norm": 0.038243902848412455,
|
273 |
+
"learning_rate": 9.9486847117678e-05,
|
274 |
+
"loss": 0.0557,
|
275 |
+
"step": 38
|
276 |
+
},
|
277 |
+
{
|
278 |
+
"epoch": 0.291044776119403,
|
279 |
+
"grad_norm": 0.04391453045822785,
|
280 |
+
"learning_rate": 9.938950460359913e-05,
|
281 |
+
"loss": 0.0555,
|
282 |
+
"step": 39
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"epoch": 0.29850746268656714,
|
286 |
+
"grad_norm": 0.03990722508590128,
|
287 |
+
"learning_rate": 9.928376953482343e-05,
|
288 |
+
"loss": 0.0538,
|
289 |
+
"step": 40
|
290 |
+
},
|
291 |
+
{
|
292 |
+
"epoch": 0.30597014925373134,
|
293 |
+
"grad_norm": 0.038713344753186225,
|
294 |
+
"learning_rate": 9.916965987847485e-05,
|
295 |
+
"loss": 0.062,
|
296 |
+
"step": 41
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"epoch": 0.31343283582089554,
|
300 |
+
"grad_norm": 0.043891252872110674,
|
301 |
+
"learning_rate": 9.904719502473634e-05,
|
302 |
+
"loss": 0.0729,
|
303 |
+
"step": 42
|
304 |
+
},
|
305 |
+
{
|
306 |
+
"epoch": 0.3208955223880597,
|
307 |
+
"grad_norm": 0.043451726800329916,
|
308 |
+
"learning_rate": 9.891639578355511e-05,
|
309 |
+
"loss": 0.0673,
|
310 |
+
"step": 43
|
311 |
+
},
|
312 |
+
{
|
313 |
+
"epoch": 0.3283582089552239,
|
314 |
+
"grad_norm": 0.05417686850709893,
|
315 |
+
"learning_rate": 9.877728438110645e-05,
|
316 |
+
"loss": 0.0621,
|
317 |
+
"step": 44
|
318 |
+
},
|
319 |
+
{
|
320 |
+
"epoch": 0.3358208955223881,
|
321 |
+
"grad_norm": 0.04710102286566618,
|
322 |
+
"learning_rate": 9.862988445601688e-05,
|
323 |
+
"loss": 0.0674,
|
324 |
+
"step": 45
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"epoch": 0.34328358208955223,
|
328 |
+
"grad_norm": 0.04303756281606097,
|
329 |
+
"learning_rate": 9.847422105534739e-05,
|
330 |
+
"loss": 0.0745,
|
331 |
+
"step": 46
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"epoch": 0.35074626865671643,
|
335 |
+
"grad_norm": 0.04513289131537309,
|
336 |
+
"learning_rate": 9.831032063033726e-05,
|
337 |
+
"loss": 0.0747,
|
338 |
+
"step": 47
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"epoch": 0.3582089552238806,
|
342 |
+
"grad_norm": 0.03974814589369387,
|
343 |
+
"learning_rate": 9.813821103190932e-05,
|
344 |
+
"loss": 0.0549,
|
345 |
+
"step": 48
|
346 |
+
},
|
347 |
+
{
|
348 |
+
"epoch": 0.3656716417910448,
|
349 |
+
"grad_norm": 0.04476607962393581,
|
350 |
+
"learning_rate": 9.795792150593739e-05,
|
351 |
+
"loss": 0.0646,
|
352 |
+
"step": 49
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"epoch": 0.373134328358209,
|
356 |
+
"grad_norm": 0.03577579931178259,
|
357 |
+
"learning_rate": 9.776948268827659e-05,
|
358 |
+
"loss": 0.0595,
|
359 |
+
"step": 50
|
360 |
+
},
|
361 |
+
{
|
362 |
+
"epoch": 0.3805970149253731,
|
363 |
+
"grad_norm": 0.04809723587992589,
|
364 |
+
"learning_rate": 9.757292659955755e-05,
|
365 |
+
"loss": 0.0644,
|
366 |
+
"step": 51
|
367 |
+
},
|
368 |
+
{
|
369 |
+
"epoch": 0.3880597014925373,
|
370 |
+
"grad_norm": 0.038353896024139626,
|
371 |
+
"learning_rate": 9.736828663974527e-05,
|
372 |
+
"loss": 0.0594,
|
373 |
+
"step": 52
|
374 |
+
},
|
375 |
+
{
|
376 |
+
"epoch": 0.39552238805970147,
|
377 |
+
"grad_norm": 0.04158825035527114,
|
378 |
+
"learning_rate": 9.715559758246363e-05,
|
379 |
+
"loss": 0.0554,
|
380 |
+
"step": 53
|
381 |
+
},
|
382 |
+
{
|
383 |
+
"epoch": 0.40298507462686567,
|
384 |
+
"grad_norm": 0.040559930018824054,
|
385 |
+
"learning_rate": 9.693489556908641e-05,
|
386 |
+
"loss": 0.0645,
|
387 |
+
"step": 54
|
388 |
+
},
|
389 |
+
{
|
390 |
+
"epoch": 0.41044776119402987,
|
391 |
+
"grad_norm": 0.06306440011917559,
|
392 |
+
"learning_rate": 9.670621810259595e-05,
|
393 |
+
"loss": 0.078,
|
394 |
+
"step": 55
|
395 |
+
},
|
396 |
+
{
|
397 |
+
"epoch": 0.417910447761194,
|
398 |
+
"grad_norm": 0.042148943613004676,
|
399 |
+
"learning_rate": 9.646960404121042e-05,
|
400 |
+
"loss": 0.0569,
|
401 |
+
"step": 56
|
402 |
+
},
|
403 |
+
{
|
404 |
+
"epoch": 0.4253731343283582,
|
405 |
+
"grad_norm": 0.04640675633826986,
|
406 |
+
"learning_rate": 9.62250935917808e-05,
|
407 |
+
"loss": 0.0682,
|
408 |
+
"step": 57
|
409 |
+
},
|
410 |
+
{
|
411 |
+
"epoch": 0.43283582089552236,
|
412 |
+
"grad_norm": 0.053493212658384144,
|
413 |
+
"learning_rate": 9.597272830295876e-05,
|
414 |
+
"loss": 0.0808,
|
415 |
+
"step": 58
|
416 |
+
},
|
417 |
+
{
|
418 |
+
"epoch": 0.44029850746268656,
|
419 |
+
"grad_norm": 0.04860235661540847,
|
420 |
+
"learning_rate": 9.571255105813632e-05,
|
421 |
+
"loss": 0.0656,
|
422 |
+
"step": 59
|
423 |
+
},
|
424 |
+
{
|
425 |
+
"epoch": 0.44776119402985076,
|
426 |
+
"grad_norm": 0.04477975237893416,
|
427 |
+
"learning_rate": 9.5444606068159e-05,
|
428 |
+
"loss": 0.0683,
|
429 |
+
"step": 60
|
430 |
+
},
|
431 |
+
{
|
432 |
+
"epoch": 0.4552238805970149,
|
433 |
+
"grad_norm": 0.04817673407206397,
|
434 |
+
"learning_rate": 9.516893886381323e-05,
|
435 |
+
"loss": 0.0752,
|
436 |
+
"step": 61
|
437 |
+
},
|
438 |
+
{
|
439 |
+
"epoch": 0.4626865671641791,
|
440 |
+
"grad_norm": 0.04825888354702613,
|
441 |
+
"learning_rate": 9.488559628808939e-05,
|
442 |
+
"loss": 0.0702,
|
443 |
+
"step": 62
|
444 |
+
},
|
445 |
+
{
|
446 |
+
"epoch": 0.4701492537313433,
|
447 |
+
"grad_norm": 0.048836566311892565,
|
448 |
+
"learning_rate": 9.459462648822208e-05,
|
449 |
+
"loss": 0.0675,
|
450 |
+
"step": 63
|
451 |
+
},
|
452 |
+
{
|
453 |
+
"epoch": 0.47761194029850745,
|
454 |
+
"grad_norm": 0.04135551600617683,
|
455 |
+
"learning_rate": 9.429607890750863e-05,
|
456 |
+
"loss": 0.0593,
|
457 |
+
"step": 64
|
458 |
+
},
|
459 |
+
{
|
460 |
+
"epoch": 0.48507462686567165,
|
461 |
+
"grad_norm": 0.06166570310656485,
|
462 |
+
"learning_rate": 9.399000427690735e-05,
|
463 |
+
"loss": 0.0566,
|
464 |
+
"step": 65
|
465 |
+
},
|
466 |
+
{
|
467 |
+
"epoch": 0.4925373134328358,
|
468 |
+
"grad_norm": 0.046466806343920246,
|
469 |
+
"learning_rate": 9.367645460641716e-05,
|
470 |
+
"loss": 0.0609,
|
471 |
+
"step": 66
|
472 |
+
},
|
473 |
+
{
|
474 |
+
"epoch": 0.5,
|
475 |
+
"grad_norm": 0.10263989924117295,
|
476 |
+
"learning_rate": 9.335548317623957e-05,
|
477 |
+
"loss": 0.0718,
|
478 |
+
"step": 67
|
479 |
+
},
|
480 |
+
{
|
481 |
+
"epoch": 0.5074626865671642,
|
482 |
+
"grad_norm": 0.058108824443232655,
|
483 |
+
"learning_rate": 9.302714452772516e-05,
|
484 |
+
"loss": 0.0609,
|
485 |
+
"step": 68
|
486 |
+
},
|
487 |
+
{
|
488 |
+
"epoch": 0.5149253731343284,
|
489 |
+
"grad_norm": 0.060823380041092286,
|
490 |
+
"learning_rate": 9.269149445410545e-05,
|
491 |
+
"loss": 0.064,
|
492 |
+
"step": 69
|
493 |
+
},
|
494 |
+
{
|
495 |
+
"epoch": 0.5223880597014925,
|
496 |
+
"grad_norm": 0.038783477491824724,
|
497 |
+
"learning_rate": 9.234858999101231e-05,
|
498 |
+
"loss": 0.0515,
|
499 |
+
"step": 70
|
500 |
+
},
|
501 |
+
{
|
502 |
+
"epoch": 0.5298507462686567,
|
503 |
+
"grad_norm": 0.05413360115801873,
|
504 |
+
"learning_rate": 9.199848940678606e-05,
|
505 |
+
"loss": 0.0633,
|
506 |
+
"step": 71
|
507 |
+
},
|
508 |
+
{
|
509 |
+
"epoch": 0.5373134328358209,
|
510 |
+
"grad_norm": 0.04527892455539891,
|
511 |
+
"learning_rate": 9.164125219257418e-05,
|
512 |
+
"loss": 0.0557,
|
513 |
+
"step": 72
|
514 |
+
},
|
515 |
+
{
|
516 |
+
"epoch": 0.5447761194029851,
|
517 |
+
"grad_norm": 0.0511013431140608,
|
518 |
+
"learning_rate": 9.127693905222224e-05,
|
519 |
+
"loss": 0.0636,
|
520 |
+
"step": 73
|
521 |
+
},
|
522 |
+
{
|
523 |
+
"epoch": 0.5522388059701493,
|
524 |
+
"grad_norm": 0.05686117150772095,
|
525 |
+
"learning_rate": 9.09056118919587e-05,
|
526 |
+
"loss": 0.065,
|
527 |
+
"step": 74
|
528 |
+
},
|
529 |
+
{
|
530 |
+
"epoch": 0.5597014925373134,
|
531 |
+
"grad_norm": 0.05135823978259575,
|
532 |
+
"learning_rate": 9.052733380987554e-05,
|
533 |
+
"loss": 0.0655,
|
534 |
+
"step": 75
|
535 |
+
},
|
536 |
+
{
|
537 |
+
"epoch": 0.5671641791044776,
|
538 |
+
"grad_norm": 0.054146689118899194,
|
539 |
+
"learning_rate": 9.014216908520618e-05,
|
540 |
+
"loss": 0.0672,
|
541 |
+
"step": 76
|
542 |
+
},
|
543 |
+
{
|
544 |
+
"epoch": 0.5746268656716418,
|
545 |
+
"grad_norm": 0.0436469335582032,
|
546 |
+
"learning_rate": 8.975018316740278e-05,
|
547 |
+
"loss": 0.0471,
|
548 |
+
"step": 77
|
549 |
+
},
|
550 |
+
{
|
551 |
+
"epoch": 0.582089552238806,
|
552 |
+
"grad_norm": 0.048548356045030626,
|
553 |
+
"learning_rate": 8.935144266501469e-05,
|
554 |
+
"loss": 0.0657,
|
555 |
+
"step": 78
|
556 |
+
},
|
557 |
+
{
|
558 |
+
"epoch": 0.5895522388059702,
|
559 |
+
"grad_norm": 0.04229399922006005,
|
560 |
+
"learning_rate": 8.894601533436999e-05,
|
561 |
+
"loss": 0.0587,
|
562 |
+
"step": 79
|
563 |
+
},
|
564 |
+
{
|
565 |
+
"epoch": 0.5970149253731343,
|
566 |
+
"grad_norm": 0.04826920188334625,
|
567 |
+
"learning_rate": 8.853397006806182e-05,
|
568 |
+
"loss": 0.0695,
|
569 |
+
"step": 80
|
570 |
+
},
|
571 |
+
{
|
572 |
+
"epoch": 0.6044776119402985,
|
573 |
+
"grad_norm": 0.04635385316613699,
|
574 |
+
"learning_rate": 8.811537688324188e-05,
|
575 |
+
"loss": 0.0615,
|
576 |
+
"step": 81
|
577 |
+
},
|
578 |
+
{
|
579 |
+
"epoch": 0.6119402985074627,
|
580 |
+
"grad_norm": 0.05001197052013922,
|
581 |
+
"learning_rate": 8.769030690972262e-05,
|
582 |
+
"loss": 0.0736,
|
583 |
+
"step": 82
|
584 |
+
},
|
585 |
+
{
|
586 |
+
"epoch": 0.6194029850746269,
|
587 |
+
"grad_norm": 0.047237913626514194,
|
588 |
+
"learning_rate": 8.725883237789045e-05,
|
589 |
+
"loss": 0.05,
|
590 |
+
"step": 83
|
591 |
+
},
|
592 |
+
{
|
593 |
+
"epoch": 0.6268656716417911,
|
594 |
+
"grad_norm": 0.046414550216027146,
|
595 |
+
"learning_rate": 8.682102660643197e-05,
|
596 |
+
"loss": 0.0547,
|
597 |
+
"step": 84
|
598 |
+
},
|
599 |
+
{
|
600 |
+
"epoch": 0.6343283582089553,
|
601 |
+
"grad_norm": 0.06675177360180477,
|
602 |
+
"learning_rate": 8.637696398987516e-05,
|
603 |
+
"loss": 0.0755,
|
604 |
+
"step": 85
|
605 |
+
},
|
606 |
+
{
|
607 |
+
"epoch": 0.6417910447761194,
|
608 |
+
"grad_norm": 0.04260892791512932,
|
609 |
+
"learning_rate": 8.592671998594794e-05,
|
610 |
+
"loss": 0.0596,
|
611 |
+
"step": 86
|
612 |
+
},
|
613 |
+
{
|
614 |
+
"epoch": 0.6492537313432836,
|
615 |
+
"grad_norm": 0.04140469655494379,
|
616 |
+
"learning_rate": 8.547037110275579e-05,
|
617 |
+
"loss": 0.0498,
|
618 |
+
"step": 87
|
619 |
+
},
|
620 |
+
{
|
621 |
+
"epoch": 0.6567164179104478,
|
622 |
+
"grad_norm": 0.049197123849644295,
|
623 |
+
"learning_rate": 8.50079948857812e-05,
|
624 |
+
"loss": 0.0693,
|
625 |
+
"step": 88
|
626 |
+
},
|
627 |
+
{
|
628 |
+
"epoch": 0.664179104477612,
|
629 |
+
"grad_norm": 0.04353374191129125,
|
630 |
+
"learning_rate": 8.453966990470656e-05,
|
631 |
+
"loss": 0.0667,
|
632 |
+
"step": 89
|
633 |
+
},
|
634 |
+
{
|
635 |
+
"epoch": 0.6716417910447762,
|
636 |
+
"grad_norm": 0.0426226996944138,
|
637 |
+
"learning_rate": 8.406547574006325e-05,
|
638 |
+
"loss": 0.0657,
|
639 |
+
"step": 90
|
640 |
+
},
|
641 |
+
{
|
642 |
+
"epoch": 0.6791044776119403,
|
643 |
+
"grad_norm": 0.04669227970514863,
|
644 |
+
"learning_rate": 8.358549296970876e-05,
|
645 |
+
"loss": 0.062,
|
646 |
+
"step": 91
|
647 |
+
},
|
648 |
+
{
|
649 |
+
"epoch": 0.6865671641791045,
|
650 |
+
"grad_norm": 0.04701963106139751,
|
651 |
+
"learning_rate": 8.309980315513444e-05,
|
652 |
+
"loss": 0.0619,
|
653 |
+
"step": 92
|
654 |
+
},
|
655 |
+
{
|
656 |
+
"epoch": 0.6940298507462687,
|
657 |
+
"grad_norm": 0.04909462370201748,
|
658 |
+
"learning_rate": 8.260848882760615e-05,
|
659 |
+
"loss": 0.0607,
|
660 |
+
"step": 93
|
661 |
+
},
|
662 |
+
{
|
663 |
+
"epoch": 0.7014925373134329,
|
664 |
+
"grad_norm": 0.06109724640086526,
|
665 |
+
"learning_rate": 8.211163347414003e-05,
|
666 |
+
"loss": 0.0641,
|
667 |
+
"step": 94
|
668 |
+
},
|
669 |
+
{
|
670 |
+
"epoch": 0.7089552238805971,
|
671 |
+
"grad_norm": 0.0452019392921164,
|
672 |
+
"learning_rate": 8.160932152331586e-05,
|
673 |
+
"loss": 0.052,
|
674 |
+
"step": 95
|
675 |
+
},
|
676 |
+
{
|
677 |
+
"epoch": 0.7164179104477612,
|
678 |
+
"grad_norm": 0.04995525069883283,
|
679 |
+
"learning_rate": 8.11016383309305e-05,
|
680 |
+
"loss": 0.0642,
|
681 |
+
"step": 96
|
682 |
+
},
|
683 |
+
{
|
684 |
+
"epoch": 0.7238805970149254,
|
685 |
+
"grad_norm": 0.05097665050212965,
|
686 |
+
"learning_rate": 8.058867016549372e-05,
|
687 |
+
"loss": 0.0511,
|
688 |
+
"step": 97
|
689 |
+
},
|
690 |
+
{
|
691 |
+
"epoch": 0.7313432835820896,
|
692 |
+
"grad_norm": 0.052426864602295876,
|
693 |
+
"learning_rate": 8.007050419356899e-05,
|
694 |
+
"loss": 0.0639,
|
695 |
+
"step": 98
|
696 |
+
},
|
697 |
+
{
|
698 |
+
"epoch": 0.7388059701492538,
|
699 |
+
"grad_norm": 0.040293486315801506,
|
700 |
+
"learning_rate": 7.95472284649615e-05,
|
701 |
+
"loss": 0.0459,
|
702 |
+
"step": 99
|
703 |
+
},
|
704 |
+
{
|
705 |
+
"epoch": 0.746268656716418,
|
706 |
+
"grad_norm": 0.04023826231697103,
|
707 |
+
"learning_rate": 7.90189318977564e-05,
|
708 |
+
"loss": 0.0511,
|
709 |
+
"step": 100
|
710 |
+
},
|
711 |
+
{
|
712 |
+
"epoch": 0.753731343283582,
|
713 |
+
"grad_norm": 0.046683930135103635,
|
714 |
+
"learning_rate": 7.848570426320917e-05,
|
715 |
+
"loss": 0.0628,
|
716 |
+
"step": 101
|
717 |
+
},
|
718 |
+
{
|
719 |
+
"epoch": 0.7611940298507462,
|
720 |
+
"grad_norm": 0.059216734501315524,
|
721 |
+
"learning_rate": 7.794763617049124e-05,
|
722 |
+
"loss": 0.0703,
|
723 |
+
"step": 102
|
724 |
+
},
|
725 |
+
{
|
726 |
+
"epoch": 0.7686567164179104,
|
727 |
+
"grad_norm": 0.05141886608418429,
|
728 |
+
"learning_rate": 7.740481905129306e-05,
|
729 |
+
"loss": 0.077,
|
730 |
+
"step": 103
|
731 |
+
},
|
732 |
+
{
|
733 |
+
"epoch": 0.7761194029850746,
|
734 |
+
"grad_norm": 0.054962350231581254,
|
735 |
+
"learning_rate": 7.685734514428766e-05,
|
736 |
+
"loss": 0.0679,
|
737 |
+
"step": 104
|
738 |
+
},
|
739 |
+
{
|
740 |
+
"epoch": 0.7835820895522388,
|
741 |
+
"grad_norm": 0.038468617770978104,
|
742 |
+
"learning_rate": 7.630530747945673e-05,
|
743 |
+
"loss": 0.0437,
|
744 |
+
"step": 105
|
745 |
+
},
|
746 |
+
{
|
747 |
+
"epoch": 0.7910447761194029,
|
748 |
+
"grad_norm": 0.05202447048914299,
|
749 |
+
"learning_rate": 7.574879986228245e-05,
|
750 |
+
"loss": 0.0619,
|
751 |
+
"step": 106
|
752 |
+
},
|
753 |
+
{
|
754 |
+
"epoch": 0.7985074626865671,
|
755 |
+
"grad_norm": 0.05303959137597775,
|
756 |
+
"learning_rate": 7.518791685780768e-05,
|
757 |
+
"loss": 0.0584,
|
758 |
+
"step": 107
|
759 |
+
},
|
760 |
+
{
|
761 |
+
"epoch": 0.8059701492537313,
|
762 |
+
"grad_norm": 0.05249396950922748,
|
763 |
+
"learning_rate": 7.46227537745667e-05,
|
764 |
+
"loss": 0.071,
|
765 |
+
"step": 108
|
766 |
+
},
|
767 |
+
{
|
768 |
+
"epoch": 0.8134328358208955,
|
769 |
+
"grad_norm": 0.09166647782946208,
|
770 |
+
"learning_rate": 7.405340664838993e-05,
|
771 |
+
"loss": 0.0703,
|
772 |
+
"step": 109
|
773 |
+
},
|
774 |
+
{
|
775 |
+
"epoch": 0.8208955223880597,
|
776 |
+
"grad_norm": 0.048179258631028794,
|
777 |
+
"learning_rate": 7.347997222608492e-05,
|
778 |
+
"loss": 0.058,
|
779 |
+
"step": 110
|
780 |
+
},
|
781 |
+
{
|
782 |
+
"epoch": 0.8283582089552238,
|
783 |
+
"grad_norm": 0.03829536928873207,
|
784 |
+
"learning_rate": 7.290254794899664e-05,
|
785 |
+
"loss": 0.0476,
|
786 |
+
"step": 111
|
787 |
+
},
|
788 |
+
{
|
789 |
+
"epoch": 0.835820895522388,
|
790 |
+
"grad_norm": 0.047644186186153,
|
791 |
+
"learning_rate": 7.232123193644957e-05,
|
792 |
+
"loss": 0.0617,
|
793 |
+
"step": 112
|
794 |
+
},
|
795 |
+
{
|
796 |
+
"epoch": 0.8432835820895522,
|
797 |
+
"grad_norm": 0.045695845294737504,
|
798 |
+
"learning_rate": 7.173612296907472e-05,
|
799 |
+
"loss": 0.0557,
|
800 |
+
"step": 113
|
801 |
+
},
|
802 |
+
{
|
803 |
+
"epoch": 0.8507462686567164,
|
804 |
+
"grad_norm": 0.041154907539465366,
|
805 |
+
"learning_rate": 7.114732047202433e-05,
|
806 |
+
"loss": 0.0461,
|
807 |
+
"step": 114
|
808 |
+
},
|
809 |
+
{
|
810 |
+
"epoch": 0.8582089552238806,
|
811 |
+
"grad_norm": 0.04472457421440064,
|
812 |
+
"learning_rate": 7.055492449807684e-05,
|
813 |
+
"loss": 0.0578,
|
814 |
+
"step": 115
|
815 |
+
},
|
816 |
+
{
|
817 |
+
"epoch": 0.8656716417910447,
|
818 |
+
"grad_norm": 0.044003200428972454,
|
819 |
+
"learning_rate": 6.99590357106354e-05,
|
820 |
+
"loss": 0.0559,
|
821 |
+
"step": 116
|
822 |
+
},
|
823 |
+
{
|
824 |
+
"epoch": 0.8731343283582089,
|
825 |
+
"grad_norm": 0.044897401580975174,
|
826 |
+
"learning_rate": 6.935975536662253e-05,
|
827 |
+
"loss": 0.0567,
|
828 |
+
"step": 117
|
829 |
+
},
|
830 |
+
{
|
831 |
+
"epoch": 0.8805970149253731,
|
832 |
+
"grad_norm": 0.06205996022296411,
|
833 |
+
"learning_rate": 6.875718529927405e-05,
|
834 |
+
"loss": 0.0711,
|
835 |
+
"step": 118
|
836 |
+
},
|
837 |
+
{
|
838 |
+
"epoch": 0.8880597014925373,
|
839 |
+
"grad_norm": 0.07334951273655922,
|
840 |
+
"learning_rate": 6.815142790083472e-05,
|
841 |
+
"loss": 0.0638,
|
842 |
+
"step": 119
|
843 |
+
},
|
844 |
+
{
|
845 |
+
"epoch": 0.8955223880597015,
|
846 |
+
"grad_norm": 0.048838297880021586,
|
847 |
+
"learning_rate": 6.75425861051595e-05,
|
848 |
+
"loss": 0.0592,
|
849 |
+
"step": 120
|
850 |
+
},
|
851 |
+
{
|
852 |
+
"epoch": 0.9029850746268657,
|
853 |
+
"grad_norm": 0.039421775418231796,
|
854 |
+
"learning_rate": 6.693076337022211e-05,
|
855 |
+
"loss": 0.0527,
|
856 |
+
"step": 121
|
857 |
+
},
|
858 |
+
{
|
859 |
+
"epoch": 0.9104477611940298,
|
860 |
+
"grad_norm": 0.05254528114345878,
|
861 |
+
"learning_rate": 6.631606366053506e-05,
|
862 |
+
"loss": 0.0619,
|
863 |
+
"step": 122
|
864 |
+
},
|
865 |
+
{
|
866 |
+
"epoch": 0.917910447761194,
|
867 |
+
"grad_norm": 0.03995739666790466,
|
868 |
+
"learning_rate": 6.569859142948328e-05,
|
869 |
+
"loss": 0.0496,
|
870 |
+
"step": 123
|
871 |
+
},
|
872 |
+
{
|
873 |
+
"epoch": 0.9253731343283582,
|
874 |
+
"grad_norm": 0.051970179463065154,
|
875 |
+
"learning_rate": 6.507845160157477e-05,
|
876 |
+
"loss": 0.069,
|
877 |
+
"step": 124
|
878 |
+
},
|
879 |
+
{
|
880 |
+
"epoch": 0.9328358208955224,
|
881 |
+
"grad_norm": 0.04793683722888212,
|
882 |
+
"learning_rate": 6.445574955461134e-05,
|
883 |
+
"loss": 0.0525,
|
884 |
+
"step": 125
|
885 |
+
},
|
886 |
+
{
|
887 |
+
"epoch": 0.9402985074626866,
|
888 |
+
"grad_norm": 0.05379432228035319,
|
889 |
+
"learning_rate": 6.383059110178204e-05,
|
890 |
+
"loss": 0.0652,
|
891 |
+
"step": 126
|
892 |
+
},
|
893 |
+
{
|
894 |
+
"epoch": 0.9477611940298507,
|
895 |
+
"grad_norm": 0.039301722434928965,
|
896 |
+
"learning_rate": 6.320308247368286e-05,
|
897 |
+
"loss": 0.0436,
|
898 |
+
"step": 127
|
899 |
+
},
|
900 |
+
{
|
901 |
+
"epoch": 0.9552238805970149,
|
902 |
+
"grad_norm": 0.054268725778200196,
|
903 |
+
"learning_rate": 6.257333030026538e-05,
|
904 |
+
"loss": 0.0778,
|
905 |
+
"step": 128
|
906 |
+
},
|
907 |
+
{
|
908 |
+
"epoch": 0.9626865671641791,
|
909 |
+
"grad_norm": 0.05347351895550439,
|
910 |
+
"learning_rate": 6.194144159271756e-05,
|
911 |
+
"loss": 0.0674,
|
912 |
+
"step": 129
|
913 |
+
},
|
914 |
+
{
|
915 |
+
"epoch": 0.9701492537313433,
|
916 |
+
"grad_norm": 0.04106166151438287,
|
917 |
+
"learning_rate": 6.130752372527982e-05,
|
918 |
+
"loss": 0.0556,
|
919 |
+
"step": 130
|
920 |
+
},
|
921 |
+
{
|
922 |
+
"epoch": 0.9776119402985075,
|
923 |
+
"grad_norm": 0.042096329064543504,
|
924 |
+
"learning_rate": 6.0671684416999273e-05,
|
925 |
+
"loss": 0.0563,
|
926 |
+
"step": 131
|
927 |
+
},
|
928 |
+
{
|
929 |
+
"epoch": 0.9850746268656716,
|
930 |
+
"grad_norm": 0.042856416255926746,
|
931 |
+
"learning_rate": 6.003403171342563e-05,
|
932 |
+
"loss": 0.0571,
|
933 |
+
"step": 132
|
934 |
+
},
|
935 |
+
{
|
936 |
+
"epoch": 0.9925373134328358,
|
937 |
+
"grad_norm": 0.09639449839659897,
|
938 |
+
"learning_rate": 5.939467396825137e-05,
|
939 |
+
"loss": 0.0688,
|
940 |
+
"step": 133
|
941 |
+
},
|
942 |
+
{
|
943 |
+
"epoch": 1.0,
|
944 |
+
"grad_norm": 0.04602779722879925,
|
945 |
+
"learning_rate": 5.875371982489959e-05,
|
946 |
+
"loss": 0.0656,
|
947 |
+
"step": 134
|
948 |
+
},
|
949 |
+
{
|
950 |
+
"epoch": 1.007462686567164,
|
951 |
+
"grad_norm": 0.040909439202533183,
|
952 |
+
"learning_rate": 5.811127819806277e-05,
|
953 |
+
"loss": 0.0471,
|
954 |
+
"step": 135
|
955 |
+
},
|
956 |
+
{
|
957 |
+
"epoch": 1.0149253731343284,
|
958 |
+
"grad_norm": 0.038911267954833365,
|
959 |
+
"learning_rate": 5.7467458255195384e-05,
|
960 |
+
"loss": 0.0469,
|
961 |
+
"step": 136
|
962 |
+
},
|
963 |
+
{
|
964 |
+
"epoch": 1.0223880597014925,
|
965 |
+
"grad_norm": 0.041384462535256024,
|
966 |
+
"learning_rate": 5.682236939796337e-05,
|
967 |
+
"loss": 0.0474,
|
968 |
+
"step": 137
|
969 |
+
},
|
970 |
+
{
|
971 |
+
"epoch": 1.0298507462686568,
|
972 |
+
"grad_norm": 0.04094166724352337,
|
973 |
+
"learning_rate": 5.61761212436541e-05,
|
974 |
+
"loss": 0.0564,
|
975 |
+
"step": 138
|
976 |
+
},
|
977 |
+
{
|
978 |
+
"epoch": 1.037313432835821,
|
979 |
+
"grad_norm": 0.04242464397313712,
|
980 |
+
"learning_rate": 5.55288236065495e-05,
|
981 |
+
"loss": 0.0562,
|
982 |
+
"step": 139
|
983 |
+
},
|
984 |
+
{
|
985 |
+
"epoch": 1.044776119402985,
|
986 |
+
"grad_norm": 0.04195771321895051,
|
987 |
+
"learning_rate": 5.488058647926577e-05,
|
988 |
+
"loss": 0.0579,
|
989 |
+
"step": 140
|
990 |
+
},
|
991 |
+
{
|
992 |
+
"epoch": 1.0522388059701493,
|
993 |
+
"grad_norm": 0.04417558638478372,
|
994 |
+
"learning_rate": 5.423152001406282e-05,
|
995 |
+
"loss": 0.0491,
|
996 |
+
"step": 141
|
997 |
+
},
|
998 |
+
{
|
999 |
+
"epoch": 1.0597014925373134,
|
1000 |
+
"grad_norm": 0.042202769607083065,
|
1001 |
+
"learning_rate": 5.3581734504126494e-05,
|
1002 |
+
"loss": 0.0422,
|
1003 |
+
"step": 142
|
1004 |
+
},
|
1005 |
+
{
|
1006 |
+
"epoch": 1.0671641791044777,
|
1007 |
+
"grad_norm": 0.0535587333202052,
|
1008 |
+
"learning_rate": 5.293134036482698e-05,
|
1009 |
+
"loss": 0.0685,
|
1010 |
+
"step": 143
|
1011 |
+
},
|
1012 |
+
{
|
1013 |
+
"epoch": 1.0746268656716418,
|
1014 |
+
"grad_norm": 0.055138388599660325,
|
1015 |
+
"learning_rate": 5.2280448114956316e-05,
|
1016 |
+
"loss": 0.0431,
|
1017 |
+
"step": 144
|
1018 |
+
},
|
1019 |
+
{
|
1020 |
+
"epoch": 1.0820895522388059,
|
1021 |
+
"grad_norm": 0.04652366823377736,
|
1022 |
+
"learning_rate": 5.1629168357948435e-05,
|
1023 |
+
"loss": 0.0561,
|
1024 |
+
"step": 145
|
1025 |
+
},
|
1026 |
+
{
|
1027 |
+
"epoch": 1.0895522388059702,
|
1028 |
+
"grad_norm": 0.04332617542969012,
|
1029 |
+
"learning_rate": 5.097761176308471e-05,
|
1030 |
+
"loss": 0.0502,
|
1031 |
+
"step": 146
|
1032 |
+
},
|
1033 |
+
{
|
1034 |
+
"epoch": 1.0970149253731343,
|
1035 |
+
"grad_norm": 0.050237476394082006,
|
1036 |
+
"learning_rate": 5.032588904668851e-05,
|
1037 |
+
"loss": 0.0532,
|
1038 |
+
"step": 147
|
1039 |
+
},
|
1040 |
+
{
|
1041 |
+
"epoch": 1.1044776119402986,
|
1042 |
+
"grad_norm": 0.04573637385227261,
|
1043 |
+
"learning_rate": 4.967411095331149e-05,
|
1044 |
+
"loss": 0.0535,
|
1045 |
+
"step": 148
|
1046 |
+
},
|
1047 |
+
{
|
1048 |
+
"epoch": 1.1119402985074627,
|
1049 |
+
"grad_norm": 0.052957518437750144,
|
1050 |
+
"learning_rate": 4.90223882369153e-05,
|
1051 |
+
"loss": 0.0554,
|
1052 |
+
"step": 149
|
1053 |
+
},
|
1054 |
+
{
|
1055 |
+
"epoch": 1.1194029850746268,
|
1056 |
+
"grad_norm": 0.049130859361772125,
|
1057 |
+
"learning_rate": 4.837083164205159e-05,
|
1058 |
+
"loss": 0.0513,
|
1059 |
+
"step": 150
|
1060 |
+
},
|
1061 |
+
{
|
1062 |
+
"epoch": 1.126865671641791,
|
1063 |
+
"grad_norm": 0.0634402040983951,
|
1064 |
+
"learning_rate": 4.771955188504371e-05,
|
1065 |
+
"loss": 0.0624,
|
1066 |
+
"step": 151
|
1067 |
+
},
|
1068 |
+
{
|
1069 |
+
"epoch": 1.1343283582089552,
|
1070 |
+
"grad_norm": 0.05147023431572683,
|
1071 |
+
"learning_rate": 4.7068659635173026e-05,
|
1072 |
+
"loss": 0.0565,
|
1073 |
+
"step": 152
|
1074 |
+
},
|
1075 |
+
{
|
1076 |
+
"epoch": 1.1417910447761195,
|
1077 |
+
"grad_norm": 0.05221315881539783,
|
1078 |
+
"learning_rate": 4.641826549587352e-05,
|
1079 |
+
"loss": 0.0547,
|
1080 |
+
"step": 153
|
1081 |
+
},
|
1082 |
+
{
|
1083 |
+
"epoch": 1.1492537313432836,
|
1084 |
+
"grad_norm": 0.03426733553834016,
|
1085 |
+
"learning_rate": 4.57684799859372e-05,
|
1086 |
+
"loss": 0.0386,
|
1087 |
+
"step": 154
|
1088 |
+
},
|
1089 |
+
{
|
1090 |
+
"epoch": 1.1567164179104479,
|
1091 |
+
"grad_norm": 0.06218464776243875,
|
1092 |
+
"learning_rate": 4.511941352073424e-05,
|
1093 |
+
"loss": 0.0602,
|
1094 |
+
"step": 155
|
1095 |
+
},
|
1096 |
+
{
|
1097 |
+
"epoch": 1.164179104477612,
|
1098 |
+
"grad_norm": 0.058610888212060334,
|
1099 |
+
"learning_rate": 4.447117639345052e-05,
|
1100 |
+
"loss": 0.0546,
|
1101 |
+
"step": 156
|
1102 |
+
},
|
1103 |
+
{
|
1104 |
+
"epoch": 1.171641791044776,
|
1105 |
+
"grad_norm": 0.047137689334926215,
|
1106 |
+
"learning_rate": 4.382387875634591e-05,
|
1107 |
+
"loss": 0.0585,
|
1108 |
+
"step": 157
|
1109 |
+
},
|
1110 |
+
{
|
1111 |
+
"epoch": 1.1791044776119404,
|
1112 |
+
"grad_norm": 0.04620501362827189,
|
1113 |
+
"learning_rate": 4.317763060203664e-05,
|
1114 |
+
"loss": 0.0433,
|
1115 |
+
"step": 158
|
1116 |
+
},
|
1117 |
+
{
|
1118 |
+
"epoch": 1.1865671641791045,
|
1119 |
+
"grad_norm": 0.047425544118622535,
|
1120 |
+
"learning_rate": 4.253254174480462e-05,
|
1121 |
+
"loss": 0.0479,
|
1122 |
+
"step": 159
|
1123 |
+
},
|
1124 |
+
{
|
1125 |
+
"epoch": 1.1940298507462686,
|
1126 |
+
"grad_norm": 0.04524911074502717,
|
1127 |
+
"learning_rate": 4.188872180193723e-05,
|
1128 |
+
"loss": 0.0455,
|
1129 |
+
"step": 160
|
1130 |
+
},
|
1131 |
+
{
|
1132 |
+
"epoch": 1.2014925373134329,
|
1133 |
+
"grad_norm": 0.06371288429132108,
|
1134 |
+
"learning_rate": 4.124628017510043e-05,
|
1135 |
+
"loss": 0.0598,
|
1136 |
+
"step": 161
|
1137 |
+
},
|
1138 |
+
{
|
1139 |
+
"epoch": 1.208955223880597,
|
1140 |
+
"grad_norm": 0.05407226554415295,
|
1141 |
+
"learning_rate": 4.0605326031748645e-05,
|
1142 |
+
"loss": 0.0454,
|
1143 |
+
"step": 162
|
1144 |
+
},
|
1145 |
+
{
|
1146 |
+
"epoch": 1.2164179104477613,
|
1147 |
+
"grad_norm": 0.052130810413710434,
|
1148 |
+
"learning_rate": 3.9965968286574376e-05,
|
1149 |
+
"loss": 0.0486,
|
1150 |
+
"step": 163
|
1151 |
+
},
|
1152 |
+
{
|
1153 |
+
"epoch": 1.2238805970149254,
|
1154 |
+
"grad_norm": 0.049882104045062874,
|
1155 |
+
"learning_rate": 3.932831558300074e-05,
|
1156 |
+
"loss": 0.06,
|
1157 |
+
"step": 164
|
1158 |
+
},
|
1159 |
+
{
|
1160 |
+
"epoch": 1.2313432835820897,
|
1161 |
+
"grad_norm": 0.047714903046634234,
|
1162 |
+
"learning_rate": 3.869247627472021e-05,
|
1163 |
+
"loss": 0.0423,
|
1164 |
+
"step": 165
|
1165 |
+
},
|
1166 |
+
{
|
1167 |
+
"epoch": 1.2388059701492538,
|
1168 |
+
"grad_norm": 0.04738164856167183,
|
1169 |
+
"learning_rate": 3.8058558407282464e-05,
|
1170 |
+
"loss": 0.0514,
|
1171 |
+
"step": 166
|
1172 |
+
},
|
1173 |
+
{
|
1174 |
+
"epoch": 1.2462686567164178,
|
1175 |
+
"grad_norm": 0.054171340333948444,
|
1176 |
+
"learning_rate": 3.742666969973463e-05,
|
1177 |
+
"loss": 0.0506,
|
1178 |
+
"step": 167
|
1179 |
+
},
|
1180 |
+
{
|
1181 |
+
"epoch": 1.2537313432835822,
|
1182 |
+
"grad_norm": 0.06193913420989349,
|
1183 |
+
"learning_rate": 3.6796917526317156e-05,
|
1184 |
+
"loss": 0.0607,
|
1185 |
+
"step": 168
|
1186 |
+
},
|
1187 |
+
{
|
1188 |
+
"epoch": 1.2611940298507462,
|
1189 |
+
"grad_norm": 0.05192825206437601,
|
1190 |
+
"learning_rate": 3.616940889821797e-05,
|
1191 |
+
"loss": 0.0613,
|
1192 |
+
"step": 169
|
1193 |
+
},
|
1194 |
+
{
|
1195 |
+
"epoch": 1.2686567164179103,
|
1196 |
+
"grad_norm": 0.04777566009651317,
|
1197 |
+
"learning_rate": 3.5544250445388675e-05,
|
1198 |
+
"loss": 0.045,
|
1199 |
+
"step": 170
|
1200 |
+
},
|
1201 |
+
{
|
1202 |
+
"epoch": 1.2761194029850746,
|
1203 |
+
"grad_norm": 0.06626981462090721,
|
1204 |
+
"learning_rate": 3.492154839842525e-05,
|
1205 |
+
"loss": 0.0484,
|
1206 |
+
"step": 171
|
1207 |
+
},
|
1208 |
+
{
|
1209 |
+
"epoch": 1.2835820895522387,
|
1210 |
+
"grad_norm": 0.05088197809774847,
|
1211 |
+
"learning_rate": 3.430140857051675e-05,
|
1212 |
+
"loss": 0.0495,
|
1213 |
+
"step": 172
|
1214 |
+
},
|
1215 |
+
{
|
1216 |
+
"epoch": 1.291044776119403,
|
1217 |
+
"grad_norm": 0.06694542314545353,
|
1218 |
+
"learning_rate": 3.368393633946496e-05,
|
1219 |
+
"loss": 0.0565,
|
1220 |
+
"step": 173
|
1221 |
+
},
|
1222 |
+
{
|
1223 |
+
"epoch": 1.2985074626865671,
|
1224 |
+
"grad_norm": 0.05208668148986899,
|
1225 |
+
"learning_rate": 3.306923662977789e-05,
|
1226 |
+
"loss": 0.0536,
|
1227 |
+
"step": 174
|
1228 |
+
},
|
1229 |
+
{
|
1230 |
+
"epoch": 1.3059701492537314,
|
1231 |
+
"grad_norm": 0.05156528214968674,
|
1232 |
+
"learning_rate": 3.245741389484052e-05,
|
1233 |
+
"loss": 0.0502,
|
1234 |
+
"step": 175
|
1235 |
+
},
|
1236 |
+
{
|
1237 |
+
"epoch": 1.3134328358208955,
|
1238 |
+
"grad_norm": 0.04744808607095092,
|
1239 |
+
"learning_rate": 3.184857209916528e-05,
|
1240 |
+
"loss": 0.0432,
|
1241 |
+
"step": 176
|
1242 |
+
},
|
1243 |
+
{
|
1244 |
+
"epoch": 1.3208955223880596,
|
1245 |
+
"grad_norm": 0.06528967128573542,
|
1246 |
+
"learning_rate": 3.124281470072597e-05,
|
1247 |
+
"loss": 0.0644,
|
1248 |
+
"step": 177
|
1249 |
+
},
|
1250 |
+
{
|
1251 |
+
"epoch": 1.328358208955224,
|
1252 |
+
"grad_norm": 0.058894947513357526,
|
1253 |
+
"learning_rate": 3.064024463337747e-05,
|
1254 |
+
"loss": 0.0484,
|
1255 |
+
"step": 178
|
1256 |
+
},
|
1257 |
+
{
|
1258 |
+
"epoch": 1.335820895522388,
|
1259 |
+
"grad_norm": 0.05026020412511888,
|
1260 |
+
"learning_rate": 3.0040964289364616e-05,
|
1261 |
+
"loss": 0.0417,
|
1262 |
+
"step": 179
|
1263 |
+
},
|
1264 |
+
{
|
1265 |
+
"epoch": 1.3432835820895521,
|
1266 |
+
"grad_norm": 0.06005388252167612,
|
1267 |
+
"learning_rate": 2.944507550192318e-05,
|
1268 |
+
"loss": 0.0682,
|
1269 |
+
"step": 180
|
1270 |
+
},
|
1271 |
+
{
|
1272 |
+
"epoch": 1.3507462686567164,
|
1273 |
+
"grad_norm": 0.051085898707810704,
|
1274 |
+
"learning_rate": 2.885267952797569e-05,
|
1275 |
+
"loss": 0.042,
|
1276 |
+
"step": 181
|
1277 |
+
},
|
1278 |
+
{
|
1279 |
+
"epoch": 1.3582089552238805,
|
1280 |
+
"grad_norm": 0.05394693029172925,
|
1281 |
+
"learning_rate": 2.8263877030925277e-05,
|
1282 |
+
"loss": 0.0555,
|
1283 |
+
"step": 182
|
1284 |
+
},
|
1285 |
+
{
|
1286 |
+
"epoch": 1.3656716417910448,
|
1287 |
+
"grad_norm": 0.053531960618490665,
|
1288 |
+
"learning_rate": 2.7678768063550452e-05,
|
1289 |
+
"loss": 0.058,
|
1290 |
+
"step": 183
|
1291 |
+
},
|
1292 |
+
{
|
1293 |
+
"epoch": 1.373134328358209,
|
1294 |
+
"grad_norm": 0.059380977888763835,
|
1295 |
+
"learning_rate": 2.7097452051003375e-05,
|
1296 |
+
"loss": 0.0565,
|
1297 |
+
"step": 184
|
1298 |
+
},
|
1299 |
+
{
|
1300 |
+
"epoch": 1.3805970149253732,
|
1301 |
+
"grad_norm": 0.06658133987250674,
|
1302 |
+
"learning_rate": 2.6520027773915075e-05,
|
1303 |
+
"loss": 0.0347,
|
1304 |
+
"step": 185
|
1305 |
+
},
|
1306 |
+
{
|
1307 |
+
"epoch": 1.3880597014925373,
|
1308 |
+
"grad_norm": 0.05180468695999146,
|
1309 |
+
"learning_rate": 2.5946593351610082e-05,
|
1310 |
+
"loss": 0.0455,
|
1311 |
+
"step": 186
|
1312 |
+
},
|
1313 |
+
{
|
1314 |
+
"epoch": 1.3955223880597014,
|
1315 |
+
"grad_norm": 0.05344942915082364,
|
1316 |
+
"learning_rate": 2.5377246225433303e-05,
|
1317 |
+
"loss": 0.0538,
|
1318 |
+
"step": 187
|
1319 |
+
},
|
1320 |
+
{
|
1321 |
+
"epoch": 1.4029850746268657,
|
1322 |
+
"grad_norm": 0.047334027874124045,
|
1323 |
+
"learning_rate": 2.4812083142192328e-05,
|
1324 |
+
"loss": 0.0431,
|
1325 |
+
"step": 188
|
1326 |
+
},
|
1327 |
+
{
|
1328 |
+
"epoch": 1.4104477611940298,
|
1329 |
+
"grad_norm": 0.05091860796198701,
|
1330 |
+
"learning_rate": 2.4251200137717544e-05,
|
1331 |
+
"loss": 0.0528,
|
1332 |
+
"step": 189
|
1333 |
+
},
|
1334 |
+
{
|
1335 |
+
"epoch": 1.417910447761194,
|
1336 |
+
"grad_norm": 0.07249786708752295,
|
1337 |
+
"learning_rate": 2.3694692520543295e-05,
|
1338 |
+
"loss": 0.0451,
|
1339 |
+
"step": 190
|
1340 |
+
},
|
1341 |
+
{
|
1342 |
+
"epoch": 1.4253731343283582,
|
1343 |
+
"grad_norm": 0.04534779714214529,
|
1344 |
+
"learning_rate": 2.3142654855712354e-05,
|
1345 |
+
"loss": 0.0389,
|
1346 |
+
"step": 191
|
1347 |
+
},
|
1348 |
+
{
|
1349 |
+
"epoch": 1.4328358208955223,
|
1350 |
+
"grad_norm": 0.043997856969624605,
|
1351 |
+
"learning_rate": 2.259518094870693e-05,
|
1352 |
+
"loss": 0.0328,
|
1353 |
+
"step": 192
|
1354 |
+
},
|
1355 |
+
{
|
1356 |
+
"epoch": 1.4402985074626866,
|
1357 |
+
"grad_norm": 0.06299574448257929,
|
1358 |
+
"learning_rate": 2.2052363829508775e-05,
|
1359 |
+
"loss": 0.0556,
|
1360 |
+
"step": 193
|
1361 |
+
},
|
1362 |
+
{
|
1363 |
+
"epoch": 1.4477611940298507,
|
1364 |
+
"grad_norm": 0.053125137372870675,
|
1365 |
+
"learning_rate": 2.151429573679084e-05,
|
1366 |
+
"loss": 0.0504,
|
1367 |
+
"step": 194
|
1368 |
+
},
|
1369 |
+
{
|
1370 |
+
"epoch": 1.455223880597015,
|
1371 |
+
"grad_norm": 0.06653237383041379,
|
1372 |
+
"learning_rate": 2.0981068102243616e-05,
|
1373 |
+
"loss": 0.0518,
|
1374 |
+
"step": 195
|
1375 |
+
},
|
1376 |
+
{
|
1377 |
+
"epoch": 1.462686567164179,
|
1378 |
+
"grad_norm": 0.06313206614449261,
|
1379 |
+
"learning_rate": 2.0452771535038518e-05,
|
1380 |
+
"loss": 0.0588,
|
1381 |
+
"step": 196
|
1382 |
+
},
|
1383 |
+
{
|
1384 |
+
"epoch": 1.4701492537313432,
|
1385 |
+
"grad_norm": 0.05503341919363748,
|
1386 |
+
"learning_rate": 1.9929495806431025e-05,
|
1387 |
+
"loss": 0.0464,
|
1388 |
+
"step": 197
|
1389 |
+
},
|
1390 |
+
{
|
1391 |
+
"epoch": 1.4776119402985075,
|
1392 |
+
"grad_norm": 0.08475277341464003,
|
1393 |
+
"learning_rate": 1.9411329834506286e-05,
|
1394 |
+
"loss": 0.0559,
|
1395 |
+
"step": 198
|
1396 |
+
},
|
1397 |
+
{
|
1398 |
+
"epoch": 1.4850746268656716,
|
1399 |
+
"grad_norm": 0.059758311961399115,
|
1400 |
+
"learning_rate": 1.8898361669069497e-05,
|
1401 |
+
"loss": 0.0502,
|
1402 |
+
"step": 199
|
1403 |
+
},
|
1404 |
+
{
|
1405 |
+
"epoch": 1.4925373134328357,
|
1406 |
+
"grad_norm": 0.0566937190251894,
|
1407 |
+
"learning_rate": 1.8390678476684142e-05,
|
1408 |
+
"loss": 0.045,
|
1409 |
+
"step": 200
|
1410 |
+
},
|
1411 |
+
{
|
1412 |
+
"epoch": 1.4925373134328357,
|
1413 |
+
"eval_loss": 0.05160621926188469,
|
1414 |
+
"eval_runtime": 2.8825,
|
1415 |
+
"eval_samples_per_second": 2.082,
|
1416 |
+
"eval_steps_per_second": 0.694,
|
1417 |
+
"step": 200
|
1418 |
+
},
|
1419 |
+
{
|
1420 |
+
"epoch": 1.5,
|
1421 |
+
"grad_norm": 0.05932628824267549,
|
1422 |
+
"learning_rate": 1.7888366525859968e-05,
|
1423 |
+
"loss": 0.0566,
|
1424 |
+
"step": 201
|
1425 |
+
},
|
1426 |
+
{
|
1427 |
+
"epoch": 1.5074626865671643,
|
1428 |
+
"grad_norm": 0.06517365948496257,
|
1429 |
+
"learning_rate": 1.739151117239385e-05,
|
1430 |
+
"loss": 0.0477,
|
1431 |
+
"step": 202
|
1432 |
+
},
|
1433 |
+
{
|
1434 |
+
"epoch": 1.5149253731343284,
|
1435 |
+
"grad_norm": 0.06025109480656823,
|
1436 |
+
"learning_rate": 1.6900196844865573e-05,
|
1437 |
+
"loss": 0.0533,
|
1438 |
+
"step": 203
|
1439 |
+
},
|
1440 |
+
{
|
1441 |
+
"epoch": 1.5223880597014925,
|
1442 |
+
"grad_norm": 0.05776683125652848,
|
1443 |
+
"learning_rate": 1.641450703029125e-05,
|
1444 |
+
"loss": 0.0519,
|
1445 |
+
"step": 204
|
1446 |
+
},
|
1447 |
+
{
|
1448 |
+
"epoch": 1.5298507462686568,
|
1449 |
+
"grad_norm": 0.05126422412747507,
|
1450 |
+
"learning_rate": 1.5934524259936756e-05,
|
1451 |
+
"loss": 0.046,
|
1452 |
+
"step": 205
|
1453 |
+
},
|
1454 |
+
{
|
1455 |
+
"epoch": 1.537313432835821,
|
1456 |
+
"grad_norm": 0.05499081947502462,
|
1457 |
+
"learning_rate": 1.5460330095293447e-05,
|
1458 |
+
"loss": 0.0498,
|
1459 |
+
"step": 206
|
1460 |
+
},
|
1461 |
+
{
|
1462 |
+
"epoch": 1.544776119402985,
|
1463 |
+
"grad_norm": 0.054532441322362145,
|
1464 |
+
"learning_rate": 1.4992005114218805e-05,
|
1465 |
+
"loss": 0.0497,
|
1466 |
+
"step": 207
|
1467 |
+
},
|
1468 |
+
{
|
1469 |
+
"epoch": 1.5522388059701493,
|
1470 |
+
"grad_norm": 0.05155352979429458,
|
1471 |
+
"learning_rate": 1.4529628897244212e-05,
|
1472 |
+
"loss": 0.0495,
|
1473 |
+
"step": 208
|
1474 |
+
},
|
1475 |
+
{
|
1476 |
+
"epoch": 1.5597014925373134,
|
1477 |
+
"grad_norm": 0.05542559514479037,
|
1478 |
+
"learning_rate": 1.4073280014052077e-05,
|
1479 |
+
"loss": 0.0514,
|
1480 |
+
"step": 209
|
1481 |
+
},
|
1482 |
+
{
|
1483 |
+
"epoch": 1.5671641791044775,
|
1484 |
+
"grad_norm": 0.05135507971158835,
|
1485 |
+
"learning_rate": 1.3623036010124846e-05,
|
1486 |
+
"loss": 0.044,
|
1487 |
+
"step": 210
|
1488 |
+
},
|
1489 |
+
{
|
1490 |
+
"epoch": 1.5746268656716418,
|
1491 |
+
"grad_norm": 0.058416874316592246,
|
1492 |
+
"learning_rate": 1.3178973393568057e-05,
|
1493 |
+
"loss": 0.0529,
|
1494 |
+
"step": 211
|
1495 |
+
},
|
1496 |
+
{
|
1497 |
+
"epoch": 1.582089552238806,
|
1498 |
+
"grad_norm": 0.06608672504247858,
|
1499 |
+
"learning_rate": 1.2741167622109556e-05,
|
1500 |
+
"loss": 0.047,
|
1501 |
+
"step": 212
|
1502 |
+
},
|
1503 |
+
{
|
1504 |
+
"epoch": 1.5895522388059702,
|
1505 |
+
"grad_norm": 0.05339647478698718,
|
1506 |
+
"learning_rate": 1.230969309027739e-05,
|
1507 |
+
"loss": 0.0476,
|
1508 |
+
"step": 213
|
1509 |
+
},
|
1510 |
+
{
|
1511 |
+
"epoch": 1.5970149253731343,
|
1512 |
+
"grad_norm": 0.05349408967847287,
|
1513 |
+
"learning_rate": 1.1884623116758121e-05,
|
1514 |
+
"loss": 0.0539,
|
1515 |
+
"step": 214
|
1516 |
+
},
|
1517 |
+
{
|
1518 |
+
"epoch": 1.6044776119402986,
|
1519 |
+
"grad_norm": 0.06251421298143098,
|
1520 |
+
"learning_rate": 1.1466029931938182e-05,
|
1521 |
+
"loss": 0.0576,
|
1522 |
+
"step": 215
|
1523 |
+
},
|
1524 |
+
{
|
1525 |
+
"epoch": 1.6119402985074627,
|
1526 |
+
"grad_norm": 0.05818884547573859,
|
1527 |
+
"learning_rate": 1.1053984665630024e-05,
|
1528 |
+
"loss": 0.0449,
|
1529 |
+
"step": 216
|
1530 |
+
},
|
1531 |
+
{
|
1532 |
+
"epoch": 1.6194029850746268,
|
1533 |
+
"grad_norm": 0.06635422601410046,
|
1534 |
+
"learning_rate": 1.0648557334985309e-05,
|
1535 |
+
"loss": 0.0589,
|
1536 |
+
"step": 217
|
1537 |
+
},
|
1538 |
+
{
|
1539 |
+
"epoch": 1.626865671641791,
|
1540 |
+
"grad_norm": 0.06199310551396039,
|
1541 |
+
"learning_rate": 1.024981683259723e-05,
|
1542 |
+
"loss": 0.0549,
|
1543 |
+
"step": 218
|
1544 |
+
},
|
1545 |
+
{
|
1546 |
+
"epoch": 1.6343283582089554,
|
1547 |
+
"grad_norm": 0.05854052760292475,
|
1548 |
+
"learning_rate": 9.857830914793826e-06,
|
1549 |
+
"loss": 0.0537,
|
1550 |
+
"step": 219
|
1551 |
+
},
|
1552 |
+
{
|
1553 |
+
"epoch": 1.6417910447761193,
|
1554 |
+
"grad_norm": 0.05630714698637382,
|
1555 |
+
"learning_rate": 9.472666190124457e-06,
|
1556 |
+
"loss": 0.0436,
|
1557 |
+
"step": 220
|
1558 |
+
},
|
1559 |
+
{
|
1560 |
+
"epoch": 1.6492537313432836,
|
1561 |
+
"grad_norm": 0.07031869032406533,
|
1562 |
+
"learning_rate": 9.094388108041302e-06,
|
1563 |
+
"loss": 0.0571,
|
1564 |
+
"step": 221
|
1565 |
+
},
|
1566 |
+
{
|
1567 |
+
"epoch": 1.6567164179104479,
|
1568 |
+
"grad_norm": 0.05814344040804166,
|
1569 |
+
"learning_rate": 8.723060947777777e-06,
|
1570 |
+
"loss": 0.0516,
|
1571 |
+
"step": 222
|
1572 |
+
},
|
1573 |
+
{
|
1574 |
+
"epoch": 1.664179104477612,
|
1575 |
+
"grad_norm": 0.05546972348565669,
|
1576 |
+
"learning_rate": 8.358747807425826e-06,
|
1577 |
+
"loss": 0.0501,
|
1578 |
+
"step": 223
|
1579 |
+
},
|
1580 |
+
{
|
1581 |
+
"epoch": 1.671641791044776,
|
1582 |
+
"grad_norm": 0.05885213108993019,
|
1583 |
+
"learning_rate": 8.001510593213946e-06,
|
1584 |
+
"loss": 0.0531,
|
1585 |
+
"step": 224
|
1586 |
+
},
|
1587 |
+
{
|
1588 |
+
"epoch": 1.6791044776119404,
|
1589 |
+
"grad_norm": 0.05781771358261457,
|
1590 |
+
"learning_rate": 7.651410008987697e-06,
|
1591 |
+
"loss": 0.0564,
|
1592 |
+
"step": 225
|
1593 |
+
},
|
1594 |
+
{
|
1595 |
+
"epoch": 1.6865671641791045,
|
1596 |
+
"grad_norm": 0.04969672388657159,
|
1597 |
+
"learning_rate": 7.308505545894567e-06,
|
1598 |
+
"loss": 0.0459,
|
1599 |
+
"step": 226
|
1600 |
+
},
|
1601 |
+
{
|
1602 |
+
"epoch": 1.6940298507462686,
|
1603 |
+
"grad_norm": 0.06333951352732294,
|
1604 |
+
"learning_rate": 6.972855472274853e-06,
|
1605 |
+
"loss": 0.0539,
|
1606 |
+
"step": 227
|
1607 |
+
},
|
1608 |
+
{
|
1609 |
+
"epoch": 1.7014925373134329,
|
1610 |
+
"grad_norm": 0.054243290434551374,
|
1611 |
+
"learning_rate": 6.6445168237604385e-06,
|
1612 |
+
"loss": 0.0583,
|
1613 |
+
"step": 228
|
1614 |
+
},
|
1615 |
+
{
|
1616 |
+
"epoch": 1.7089552238805972,
|
1617 |
+
"grad_norm": 0.062381356109196505,
|
1618 |
+
"learning_rate": 6.323545393582847e-06,
|
1619 |
+
"loss": 0.0526,
|
1620 |
+
"step": 229
|
1621 |
+
},
|
1622 |
+
{
|
1623 |
+
"epoch": 1.716417910447761,
|
1624 |
+
"grad_norm": 0.05352131312449274,
|
1625 |
+
"learning_rate": 6.009995723092654e-06,
|
1626 |
+
"loss": 0.051,
|
1627 |
+
"step": 230
|
1628 |
+
},
|
1629 |
+
{
|
1630 |
+
"epoch": 1.7238805970149254,
|
1631 |
+
"grad_norm": 0.08133118347909825,
|
1632 |
+
"learning_rate": 5.703921092491393e-06,
|
1633 |
+
"loss": 0.0546,
|
1634 |
+
"step": 231
|
1635 |
+
},
|
1636 |
+
{
|
1637 |
+
"epoch": 1.7313432835820897,
|
1638 |
+
"grad_norm": 0.061792530859342706,
|
1639 |
+
"learning_rate": 5.405373511777939e-06,
|
1640 |
+
"loss": 0.0624,
|
1641 |
+
"step": 232
|
1642 |
+
},
|
1643 |
+
{
|
1644 |
+
"epoch": 1.7388059701492538,
|
1645 |
+
"grad_norm": 0.06261273067356346,
|
1646 |
+
"learning_rate": 5.114403711910632e-06,
|
1647 |
+
"loss": 0.0565,
|
1648 |
+
"step": 233
|
1649 |
+
},
|
1650 |
+
{
|
1651 |
+
"epoch": 1.7462686567164178,
|
1652 |
+
"grad_norm": 0.06920687367618827,
|
1653 |
+
"learning_rate": 4.8310611361867875e-06,
|
1654 |
+
"loss": 0.0569,
|
1655 |
+
"step": 234
|
1656 |
+
},
|
1657 |
+
{
|
1658 |
+
"epoch": 1.7537313432835822,
|
1659 |
+
"grad_norm": 0.05264649139145527,
|
1660 |
+
"learning_rate": 4.555393931841001e-06,
|
1661 |
+
"loss": 0.0457,
|
1662 |
+
"step": 235
|
1663 |
+
},
|
1664 |
+
{
|
1665 |
+
"epoch": 1.7611940298507462,
|
1666 |
+
"grad_norm": 0.05790379077052085,
|
1667 |
+
"learning_rate": 4.287448941863692e-06,
|
1668 |
+
"loss": 0.0462,
|
1669 |
+
"step": 236
|
1670 |
+
},
|
1671 |
+
{
|
1672 |
+
"epoch": 1.7686567164179103,
|
1673 |
+
"grad_norm": 0.056254761733788176,
|
1674 |
+
"learning_rate": 4.027271697041252e-06,
|
1675 |
+
"loss": 0.0497,
|
1676 |
+
"step": 237
|
1677 |
+
},
|
1678 |
+
{
|
1679 |
+
"epoch": 1.7761194029850746,
|
1680 |
+
"grad_norm": 0.05681655517912698,
|
1681 |
+
"learning_rate": 3.7749064082191977e-06,
|
1682 |
+
"loss": 0.0432,
|
1683 |
+
"step": 238
|
1684 |
+
},
|
1685 |
+
{
|
1686 |
+
"epoch": 1.783582089552239,
|
1687 |
+
"grad_norm": 0.05498430562467159,
|
1688 |
+
"learning_rate": 3.5303959587895898e-06,
|
1689 |
+
"loss": 0.0523,
|
1690 |
+
"step": 239
|
1691 |
+
},
|
1692 |
+
{
|
1693 |
+
"epoch": 1.7910447761194028,
|
1694 |
+
"grad_norm": 0.06063185430675877,
|
1695 |
+
"learning_rate": 3.2937818974040635e-06,
|
1696 |
+
"loss": 0.0636,
|
1697 |
+
"step": 240
|
1698 |
+
},
|
1699 |
+
{
|
1700 |
+
"epoch": 1.7985074626865671,
|
1701 |
+
"grad_norm": 0.058955207729966874,
|
1702 |
+
"learning_rate": 3.065104430913601e-06,
|
1703 |
+
"loss": 0.058,
|
1704 |
+
"step": 241
|
1705 |
+
},
|
1706 |
+
{
|
1707 |
+
"epoch": 1.8059701492537314,
|
1708 |
+
"grad_norm": 0.05466396031679419,
|
1709 |
+
"learning_rate": 2.844402417536374e-06,
|
1710 |
+
"loss": 0.0521,
|
1711 |
+
"step": 242
|
1712 |
+
},
|
1713 |
+
{
|
1714 |
+
"epoch": 1.8134328358208955,
|
1715 |
+
"grad_norm": 0.05835530864435512,
|
1716 |
+
"learning_rate": 2.631713360254734e-06,
|
1717 |
+
"loss": 0.0533,
|
1718 |
+
"step": 243
|
1719 |
+
},
|
1720 |
+
{
|
1721 |
+
"epoch": 1.8208955223880596,
|
1722 |
+
"grad_norm": 0.05292006427906579,
|
1723 |
+
"learning_rate": 2.4270734004424643e-06,
|
1724 |
+
"loss": 0.043,
|
1725 |
+
"step": 244
|
1726 |
+
},
|
1727 |
+
{
|
1728 |
+
"epoch": 1.828358208955224,
|
1729 |
+
"grad_norm": 0.06947783795016407,
|
1730 |
+
"learning_rate": 2.2305173117234236e-06,
|
1731 |
+
"loss": 0.0552,
|
1732 |
+
"step": 245
|
1733 |
+
},
|
1734 |
+
{
|
1735 |
+
"epoch": 1.835820895522388,
|
1736 |
+
"grad_norm": 0.05437361507622075,
|
1737 |
+
"learning_rate": 2.0420784940626157e-06,
|
1738 |
+
"loss": 0.0474,
|
1739 |
+
"step": 246
|
1740 |
+
},
|
1741 |
+
{
|
1742 |
+
"epoch": 1.8432835820895521,
|
1743 |
+
"grad_norm": 0.06127815980853201,
|
1744 |
+
"learning_rate": 1.861788968090683e-06,
|
1745 |
+
"loss": 0.0511,
|
1746 |
+
"step": 247
|
1747 |
+
},
|
1748 |
+
{
|
1749 |
+
"epoch": 1.8507462686567164,
|
1750 |
+
"grad_norm": 0.055640283755883826,
|
1751 |
+
"learning_rate": 1.68967936966275e-06,
|
1752 |
+
"loss": 0.047,
|
1753 |
+
"step": 248
|
1754 |
+
},
|
1755 |
+
{
|
1756 |
+
"epoch": 1.8582089552238807,
|
1757 |
+
"grad_norm": 0.05893302677913718,
|
1758 |
+
"learning_rate": 1.5257789446526172e-06,
|
1759 |
+
"loss": 0.046,
|
1760 |
+
"step": 249
|
1761 |
+
},
|
1762 |
+
{
|
1763 |
+
"epoch": 1.8656716417910446,
|
1764 |
+
"grad_norm": 0.050927982629340215,
|
1765 |
+
"learning_rate": 1.3701155439831249e-06,
|
1766 |
+
"loss": 0.0457,
|
1767 |
+
"step": 250
|
1768 |
+
},
|
1769 |
+
{
|
1770 |
+
"epoch": 1.873134328358209,
|
1771 |
+
"grad_norm": 0.05896469429383079,
|
1772 |
+
"learning_rate": 1.222715618893555e-06,
|
1773 |
+
"loss": 0.0499,
|
1774 |
+
"step": 251
|
1775 |
+
},
|
1776 |
+
{
|
1777 |
+
"epoch": 1.8805970149253732,
|
1778 |
+
"grad_norm": 0.0477546246707909,
|
1779 |
+
"learning_rate": 1.0836042164448945e-06,
|
1780 |
+
"loss": 0.044,
|
1781 |
+
"step": 252
|
1782 |
+
},
|
1783 |
+
{
|
1784 |
+
"epoch": 1.8880597014925373,
|
1785 |
+
"grad_norm": 0.05397513733906539,
|
1786 |
+
"learning_rate": 9.528049752636714e-07,
|
1787 |
+
"loss": 0.0422,
|
1788 |
+
"step": 253
|
1789 |
+
},
|
1790 |
+
{
|
1791 |
+
"epoch": 1.8955223880597014,
|
1792 |
+
"grad_norm": 0.06350294630947854,
|
1793 |
+
"learning_rate": 8.303401215251583e-07,
|
1794 |
+
"loss": 0.0671,
|
1795 |
+
"step": 254
|
1796 |
+
},
|
1797 |
+
{
|
1798 |
+
"epoch": 1.9029850746268657,
|
1799 |
+
"grad_norm": 0.049892210446187565,
|
1800 |
+
"learning_rate": 7.16230465176565e-07,
|
1801 |
+
"loss": 0.0405,
|
1802 |
+
"step": 255
|
1803 |
+
},
|
1804 |
+
{
|
1805 |
+
"epoch": 1.9104477611940298,
|
1806 |
+
"grad_norm": 0.0499500114993745,
|
1807 |
+
"learning_rate": 6.104953964008897e-07,
|
1808 |
+
"loss": 0.0425,
|
1809 |
+
"step": 256
|
1810 |
+
},
|
1811 |
+
{
|
1812 |
+
"epoch": 1.917910447761194,
|
1813 |
+
"grad_norm": 0.06303224345852801,
|
1814 |
+
"learning_rate": 5.131528823220099e-07,
|
1815 |
+
"loss": 0.0518,
|
1816 |
+
"step": 257
|
1817 |
+
},
|
1818 |
+
{
|
1819 |
+
"epoch": 1.9253731343283582,
|
1820 |
+
"grad_norm": 0.05856885863654038,
|
1821 |
+
"learning_rate": 4.242194639516417e-07,
|
1822 |
+
"loss": 0.0526,
|
1823 |
+
"step": 258
|
1824 |
+
},
|
1825 |
+
{
|
1826 |
+
"epoch": 1.9328358208955225,
|
1827 |
+
"grad_norm": 0.05923832528751469,
|
1828 |
+
"learning_rate": 3.4371025337855413e-07,
|
1829 |
+
"loss": 0.0495,
|
1830 |
+
"step": 259
|
1831 |
+
},
|
1832 |
+
{
|
1833 |
+
"epoch": 1.9402985074626866,
|
1834 |
+
"grad_norm": 0.04422044691408464,
|
1835 |
+
"learning_rate": 2.7163893120066285e-07,
|
1836 |
+
"loss": 0.036,
|
1837 |
+
"step": 260
|
1838 |
+
},
|
1839 |
+
{
|
1840 |
+
"epoch": 1.9477611940298507,
|
1841 |
+
"grad_norm": 0.05697295897119153,
|
1842 |
+
"learning_rate": 2.0801774420031173e-07,
|
1843 |
+
"loss": 0.0519,
|
1844 |
+
"step": 261
|
1845 |
+
},
|
1846 |
+
{
|
1847 |
+
"epoch": 1.955223880597015,
|
1848 |
+
"grad_norm": 0.052672848618579474,
|
1849 |
+
"learning_rate": 1.5285750326325954e-07,
|
1850 |
+
"loss": 0.0447,
|
1851 |
+
"step": 262
|
1852 |
+
},
|
1853 |
+
{
|
1854 |
+
"epoch": 1.962686567164179,
|
1855 |
+
"grad_norm": 0.06192118854388749,
|
1856 |
+
"learning_rate": 1.0616758154161632e-07,
|
1857 |
+
"loss": 0.0567,
|
1858 |
+
"step": 263
|
1859 |
+
},
|
1860 |
+
{
|
1861 |
+
"epoch": 1.9701492537313432,
|
1862 |
+
"grad_norm": 0.05915043537671657,
|
1863 |
+
"learning_rate": 6.795591286109515e-08,
|
1864 |
+
"loss": 0.0612,
|
1865 |
+
"step": 264
|
1866 |
+
},
|
1867 |
+
{
|
1868 |
+
"epoch": 1.9776119402985075,
|
1869 |
+
"grad_norm": 0.05763945739833529,
|
1870 |
+
"learning_rate": 3.822899037286276e-08,
|
1871 |
+
"loss": 0.0481,
|
1872 |
+
"step": 265
|
1873 |
+
},
|
1874 |
+
{
|
1875 |
+
"epoch": 1.9850746268656716,
|
1876 |
+
"grad_norm": 0.06233545894964188,
|
1877 |
+
"learning_rate": 1.6991865450188827e-08,
|
1878 |
+
"loss": 0.0534,
|
1879 |
+
"step": 266
|
1880 |
+
},
|
1881 |
+
{
|
1882 |
+
"epoch": 1.9925373134328357,
|
1883 |
+
"grad_norm": 0.06099418986675789,
|
1884 |
+
"learning_rate": 4.248146830060362e-09,
|
1885 |
+
"loss": 0.0647,
|
1886 |
+
"step": 267
|
1887 |
+
},
|
1888 |
+
{
|
1889 |
+
"epoch": 2.0,
|
1890 |
+
"grad_norm": 0.04973680972569267,
|
1891 |
+
"learning_rate": 0.0,
|
1892 |
+
"loss": 0.0434,
|
1893 |
+
"step": 268
|
1894 |
+
},
|
1895 |
+
{
|
1896 |
+
"epoch": 2.0,
|
1897 |
+
"step": 268,
|
1898 |
+
"total_flos": 509043416236032.0,
|
1899 |
+
"train_loss": 0.058696881270230705,
|
1900 |
+
"train_runtime": 998.1148,
|
1901 |
+
"train_samples_per_second": 1.068,
|
1902 |
+
"train_steps_per_second": 0.269
|
1903 |
+
}
|
1904 |
+
],
|
1905 |
+
"logging_steps": 1,
|
1906 |
+
"max_steps": 268,
|
1907 |
+
"num_input_tokens_seen": 0,
|
1908 |
+
"num_train_epochs": 2,
|
1909 |
+
"save_steps": 300,
|
1910 |
+
"stateful_callbacks": {
|
1911 |
+
"TrainerControl": {
|
1912 |
+
"args": {
|
1913 |
+
"should_epoch_stop": false,
|
1914 |
+
"should_evaluate": false,
|
1915 |
+
"should_log": false,
|
1916 |
+
"should_save": true,
|
1917 |
+
"should_training_stop": true
|
1918 |
+
},
|
1919 |
+
"attributes": {}
|
1920 |
+
}
|
1921 |
+
},
|
1922 |
+
"total_flos": 509043416236032.0,
|
1923 |
+
"train_batch_size": 1,
|
1924 |
+
"trial_name": null,
|
1925 |
+
"trial_params": null
|
1926 |
+
}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b7d39e7a6abc5bf7f75ea9a4af04367ad04f0fac9bd08928b72afd285b3541c5
|
3 |
+
size 7224
|
training_eval_loss.png
ADDED
![]() |
training_loss.png
ADDED
![]() |
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|