Philhoon commited on
Commit
18ed226
·
verified ·
1 Parent(s): 5120cd3

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/workspace/philhoon/models/kv-fusion/tqa-kv-llama3.1-base",
3
+ "architectures": [
4
+ "LlamaForEncoderDecoder2"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": 128001,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 4096,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 14336,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 32,
19
+ "num_hidden_layers": 32,
20
+ "num_key_value_heads": 8,
21
+ "pretraining_tp": 1,
22
+ "rms_norm_eps": 1e-05,
23
+ "rope_scaling": {
24
+ "factor": 8.0,
25
+ "high_freq_factor": 4.0,
26
+ "low_freq_factor": 1.0,
27
+ "original_max_position_embeddings": 8192,
28
+ "rope_type": "llama3"
29
+ },
30
+ "rope_theta": 500000.0,
31
+ "tie_word_embeddings": false,
32
+ "torch_dtype": "bfloat16",
33
+ "transformers_version": "4.45.0.dev0",
34
+ "use_cache": true,
35
+ "vocab_size": 128257
36
+ }
model-00001-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7285f423d51c18a4421ee370bbae108323c05b94ad51a6ace18633b9b1bdbed
3
+ size 4976707520
model-00002-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75ca2c523e6061e2df25b30aa7135256f632f216f71430172690f757513a7bbd
3
+ size 4999803552
model-00003-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bfcf774a75f3296b74f9bc535baf303394f20b6f1e013e7f5cad25c29ef342de
3
+ size 4915916976
model-00004-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72e4259fbe4a9f14aa1e493c671a609215282892314eaed01841e84a62c2dad5
3
+ size 4920065288
model-00005-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:329669675b5f66a5c0225b2fd9f139ace96d9352820eaa6e37672bad5dc65814
3
+ size 4915916144
model-00006-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7c779dcc2e3eefb7e902859603bda94e830be01458d3367122a8dd476d462d1
3
+ size 4915916176
model-00007-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3f119a497de9119a0d24f321aebdcca69d5adbb572ca0799fcb847d0b93df81
3
+ size 2476822152
model.safetensors.index.json ADDED
@@ -0,0 +1,589 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 32121077760
4
+ },
5
+ "weight_map": {
6
+ "encoder.lm_head.weight": "model-00004-of-00007.safetensors",
7
+ "encoder.model.embed_tokens.weight": "model-00001-of-00007.safetensors",
8
+ "encoder.model.layers.0.input_layernorm.weight": "model-00001-of-00007.safetensors",
9
+ "encoder.model.layers.0.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
10
+ "encoder.model.layers.0.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
11
+ "encoder.model.layers.0.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
12
+ "encoder.model.layers.0.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
13
+ "encoder.model.layers.0.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
14
+ "encoder.model.layers.0.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
15
+ "encoder.model.layers.0.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
16
+ "encoder.model.layers.0.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
17
+ "encoder.model.layers.1.input_layernorm.weight": "model-00001-of-00007.safetensors",
18
+ "encoder.model.layers.1.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
19
+ "encoder.model.layers.1.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
20
+ "encoder.model.layers.1.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
21
+ "encoder.model.layers.1.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
22
+ "encoder.model.layers.1.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
23
+ "encoder.model.layers.1.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
24
+ "encoder.model.layers.1.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
25
+ "encoder.model.layers.1.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
26
+ "encoder.model.layers.10.input_layernorm.weight": "model-00002-of-00007.safetensors",
27
+ "encoder.model.layers.10.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
28
+ "encoder.model.layers.10.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
29
+ "encoder.model.layers.10.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
30
+ "encoder.model.layers.10.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
31
+ "encoder.model.layers.10.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
32
+ "encoder.model.layers.10.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
33
+ "encoder.model.layers.10.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
34
+ "encoder.model.layers.10.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
35
+ "encoder.model.layers.11.input_layernorm.weight": "model-00002-of-00007.safetensors",
36
+ "encoder.model.layers.11.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
37
+ "encoder.model.layers.11.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
38
+ "encoder.model.layers.11.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
39
+ "encoder.model.layers.11.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
40
+ "encoder.model.layers.11.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
41
+ "encoder.model.layers.11.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
42
+ "encoder.model.layers.11.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
43
+ "encoder.model.layers.11.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
44
+ "encoder.model.layers.12.input_layernorm.weight": "model-00002-of-00007.safetensors",
45
+ "encoder.model.layers.12.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
46
+ "encoder.model.layers.12.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
47
+ "encoder.model.layers.12.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
48
+ "encoder.model.layers.12.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
49
+ "encoder.model.layers.12.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
50
+ "encoder.model.layers.12.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
51
+ "encoder.model.layers.12.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
52
+ "encoder.model.layers.12.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
53
+ "encoder.model.layers.13.input_layernorm.weight": "model-00002-of-00007.safetensors",
54
+ "encoder.model.layers.13.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
55
+ "encoder.model.layers.13.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
56
+ "encoder.model.layers.13.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
57
+ "encoder.model.layers.13.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
58
+ "encoder.model.layers.13.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
59
+ "encoder.model.layers.13.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
60
+ "encoder.model.layers.13.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
61
+ "encoder.model.layers.13.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
62
+ "encoder.model.layers.14.input_layernorm.weight": "model-00002-of-00007.safetensors",
63
+ "encoder.model.layers.14.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
64
+ "encoder.model.layers.14.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
65
+ "encoder.model.layers.14.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
66
+ "encoder.model.layers.14.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
67
+ "encoder.model.layers.14.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
68
+ "encoder.model.layers.14.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
69
+ "encoder.model.layers.14.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
70
+ "encoder.model.layers.14.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
71
+ "encoder.model.layers.15.input_layernorm.weight": "model-00002-of-00007.safetensors",
72
+ "encoder.model.layers.15.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
73
+ "encoder.model.layers.15.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
74
+ "encoder.model.layers.15.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
75
+ "encoder.model.layers.15.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
76
+ "encoder.model.layers.15.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
77
+ "encoder.model.layers.15.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
78
+ "encoder.model.layers.15.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
79
+ "encoder.model.layers.15.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
80
+ "encoder.model.layers.16.input_layernorm.weight": "model-00002-of-00007.safetensors",
81
+ "encoder.model.layers.16.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
82
+ "encoder.model.layers.16.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
83
+ "encoder.model.layers.16.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
84
+ "encoder.model.layers.16.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
85
+ "encoder.model.layers.16.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
86
+ "encoder.model.layers.16.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
87
+ "encoder.model.layers.16.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
88
+ "encoder.model.layers.16.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
89
+ "encoder.model.layers.17.input_layernorm.weight": "model-00002-of-00007.safetensors",
90
+ "encoder.model.layers.17.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
91
+ "encoder.model.layers.17.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
92
+ "encoder.model.layers.17.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
93
+ "encoder.model.layers.17.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
94
+ "encoder.model.layers.17.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
95
+ "encoder.model.layers.17.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
96
+ "encoder.model.layers.17.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
97
+ "encoder.model.layers.17.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
98
+ "encoder.model.layers.18.input_layernorm.weight": "model-00002-of-00007.safetensors",
99
+ "encoder.model.layers.18.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
100
+ "encoder.model.layers.18.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
101
+ "encoder.model.layers.18.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
102
+ "encoder.model.layers.18.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
103
+ "encoder.model.layers.18.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
104
+ "encoder.model.layers.18.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
105
+ "encoder.model.layers.18.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
106
+ "encoder.model.layers.18.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
107
+ "encoder.model.layers.19.input_layernorm.weight": "model-00002-of-00007.safetensors",
108
+ "encoder.model.layers.19.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
109
+ "encoder.model.layers.19.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
110
+ "encoder.model.layers.19.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
111
+ "encoder.model.layers.19.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
112
+ "encoder.model.layers.19.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
113
+ "encoder.model.layers.19.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
114
+ "encoder.model.layers.19.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
115
+ "encoder.model.layers.19.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
116
+ "encoder.model.layers.2.input_layernorm.weight": "model-00001-of-00007.safetensors",
117
+ "encoder.model.layers.2.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
118
+ "encoder.model.layers.2.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
119
+ "encoder.model.layers.2.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
120
+ "encoder.model.layers.2.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
121
+ "encoder.model.layers.2.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
122
+ "encoder.model.layers.2.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
123
+ "encoder.model.layers.2.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
124
+ "encoder.model.layers.2.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
125
+ "encoder.model.layers.20.input_layernorm.weight": "model-00003-of-00007.safetensors",
126
+ "encoder.model.layers.20.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
127
+ "encoder.model.layers.20.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
128
+ "encoder.model.layers.20.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
129
+ "encoder.model.layers.20.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
130
+ "encoder.model.layers.20.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
131
+ "encoder.model.layers.20.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
132
+ "encoder.model.layers.20.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
133
+ "encoder.model.layers.20.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
134
+ "encoder.model.layers.21.input_layernorm.weight": "model-00003-of-00007.safetensors",
135
+ "encoder.model.layers.21.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
136
+ "encoder.model.layers.21.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
137
+ "encoder.model.layers.21.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
138
+ "encoder.model.layers.21.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
139
+ "encoder.model.layers.21.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
140
+ "encoder.model.layers.21.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
141
+ "encoder.model.layers.21.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
142
+ "encoder.model.layers.21.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
143
+ "encoder.model.layers.22.input_layernorm.weight": "model-00003-of-00007.safetensors",
144
+ "encoder.model.layers.22.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
145
+ "encoder.model.layers.22.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
146
+ "encoder.model.layers.22.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
147
+ "encoder.model.layers.22.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
148
+ "encoder.model.layers.22.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
149
+ "encoder.model.layers.22.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
150
+ "encoder.model.layers.22.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
151
+ "encoder.model.layers.22.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
152
+ "encoder.model.layers.23.input_layernorm.weight": "model-00003-of-00007.safetensors",
153
+ "encoder.model.layers.23.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
154
+ "encoder.model.layers.23.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
155
+ "encoder.model.layers.23.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
156
+ "encoder.model.layers.23.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
157
+ "encoder.model.layers.23.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
158
+ "encoder.model.layers.23.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
159
+ "encoder.model.layers.23.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
160
+ "encoder.model.layers.23.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
161
+ "encoder.model.layers.24.input_layernorm.weight": "model-00003-of-00007.safetensors",
162
+ "encoder.model.layers.24.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
163
+ "encoder.model.layers.24.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
164
+ "encoder.model.layers.24.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
165
+ "encoder.model.layers.24.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
166
+ "encoder.model.layers.24.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
167
+ "encoder.model.layers.24.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
168
+ "encoder.model.layers.24.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
169
+ "encoder.model.layers.24.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
170
+ "encoder.model.layers.25.input_layernorm.weight": "model-00003-of-00007.safetensors",
171
+ "encoder.model.layers.25.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
172
+ "encoder.model.layers.25.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
173
+ "encoder.model.layers.25.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
174
+ "encoder.model.layers.25.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
175
+ "encoder.model.layers.25.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
176
+ "encoder.model.layers.25.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
177
+ "encoder.model.layers.25.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
178
+ "encoder.model.layers.25.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
179
+ "encoder.model.layers.26.input_layernorm.weight": "model-00003-of-00007.safetensors",
180
+ "encoder.model.layers.26.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
181
+ "encoder.model.layers.26.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
182
+ "encoder.model.layers.26.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
183
+ "encoder.model.layers.26.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
184
+ "encoder.model.layers.26.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
185
+ "encoder.model.layers.26.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
186
+ "encoder.model.layers.26.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
187
+ "encoder.model.layers.26.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
188
+ "encoder.model.layers.27.input_layernorm.weight": "model-00003-of-00007.safetensors",
189
+ "encoder.model.layers.27.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
190
+ "encoder.model.layers.27.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
191
+ "encoder.model.layers.27.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
192
+ "encoder.model.layers.27.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
193
+ "encoder.model.layers.27.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
194
+ "encoder.model.layers.27.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
195
+ "encoder.model.layers.27.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
196
+ "encoder.model.layers.27.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
197
+ "encoder.model.layers.28.input_layernorm.weight": "model-00003-of-00007.safetensors",
198
+ "encoder.model.layers.28.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
199
+ "encoder.model.layers.28.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
200
+ "encoder.model.layers.28.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
201
+ "encoder.model.layers.28.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
202
+ "encoder.model.layers.28.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
203
+ "encoder.model.layers.28.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
204
+ "encoder.model.layers.28.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
205
+ "encoder.model.layers.28.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
206
+ "encoder.model.layers.29.input_layernorm.weight": "model-00003-of-00007.safetensors",
207
+ "encoder.model.layers.29.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
208
+ "encoder.model.layers.29.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
209
+ "encoder.model.layers.29.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
210
+ "encoder.model.layers.29.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
211
+ "encoder.model.layers.29.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
212
+ "encoder.model.layers.29.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
213
+ "encoder.model.layers.29.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
214
+ "encoder.model.layers.29.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
215
+ "encoder.model.layers.3.input_layernorm.weight": "model-00001-of-00007.safetensors",
216
+ "encoder.model.layers.3.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
217
+ "encoder.model.layers.3.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
218
+ "encoder.model.layers.3.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
219
+ "encoder.model.layers.3.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
220
+ "encoder.model.layers.3.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
221
+ "encoder.model.layers.3.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
222
+ "encoder.model.layers.3.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
223
+ "encoder.model.layers.3.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
224
+ "encoder.model.layers.30.input_layernorm.weight": "model-00003-of-00007.safetensors",
225
+ "encoder.model.layers.30.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
226
+ "encoder.model.layers.30.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
227
+ "encoder.model.layers.30.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
228
+ "encoder.model.layers.30.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
229
+ "encoder.model.layers.30.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
230
+ "encoder.model.layers.30.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
231
+ "encoder.model.layers.30.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
232
+ "encoder.model.layers.30.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
233
+ "encoder.model.layers.31.input_layernorm.weight": "model-00004-of-00007.safetensors",
234
+ "encoder.model.layers.31.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
235
+ "encoder.model.layers.31.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
236
+ "encoder.model.layers.31.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
237
+ "encoder.model.layers.31.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
238
+ "encoder.model.layers.31.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
239
+ "encoder.model.layers.31.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
240
+ "encoder.model.layers.31.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
241
+ "encoder.model.layers.31.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
242
+ "encoder.model.layers.4.input_layernorm.weight": "model-00001-of-00007.safetensors",
243
+ "encoder.model.layers.4.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
244
+ "encoder.model.layers.4.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
245
+ "encoder.model.layers.4.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
246
+ "encoder.model.layers.4.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
247
+ "encoder.model.layers.4.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
248
+ "encoder.model.layers.4.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
249
+ "encoder.model.layers.4.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
250
+ "encoder.model.layers.4.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
251
+ "encoder.model.layers.5.input_layernorm.weight": "model-00001-of-00007.safetensors",
252
+ "encoder.model.layers.5.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
253
+ "encoder.model.layers.5.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
254
+ "encoder.model.layers.5.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
255
+ "encoder.model.layers.5.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
256
+ "encoder.model.layers.5.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
257
+ "encoder.model.layers.5.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
258
+ "encoder.model.layers.5.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
259
+ "encoder.model.layers.5.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
260
+ "encoder.model.layers.6.input_layernorm.weight": "model-00001-of-00007.safetensors",
261
+ "encoder.model.layers.6.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
262
+ "encoder.model.layers.6.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
263
+ "encoder.model.layers.6.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
264
+ "encoder.model.layers.6.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
265
+ "encoder.model.layers.6.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
266
+ "encoder.model.layers.6.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
267
+ "encoder.model.layers.6.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
268
+ "encoder.model.layers.6.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
269
+ "encoder.model.layers.7.input_layernorm.weight": "model-00001-of-00007.safetensors",
270
+ "encoder.model.layers.7.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
271
+ "encoder.model.layers.7.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
272
+ "encoder.model.layers.7.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
273
+ "encoder.model.layers.7.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
274
+ "encoder.model.layers.7.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
275
+ "encoder.model.layers.7.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
276
+ "encoder.model.layers.7.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
277
+ "encoder.model.layers.7.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
278
+ "encoder.model.layers.8.input_layernorm.weight": "model-00001-of-00007.safetensors",
279
+ "encoder.model.layers.8.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
280
+ "encoder.model.layers.8.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
281
+ "encoder.model.layers.8.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
282
+ "encoder.model.layers.8.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
283
+ "encoder.model.layers.8.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
284
+ "encoder.model.layers.8.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
285
+ "encoder.model.layers.8.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
286
+ "encoder.model.layers.8.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
287
+ "encoder.model.layers.9.input_layernorm.weight": "model-00002-of-00007.safetensors",
288
+ "encoder.model.layers.9.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
289
+ "encoder.model.layers.9.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
290
+ "encoder.model.layers.9.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
291
+ "encoder.model.layers.9.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
292
+ "encoder.model.layers.9.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
293
+ "encoder.model.layers.9.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
294
+ "encoder.model.layers.9.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
295
+ "encoder.model.layers.9.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
296
+ "encoder.model.norm.weight": "model-00004-of-00007.safetensors",
297
+ "lm_head.weight": "model-00007-of-00007.safetensors",
298
+ "model.embed_tokens.weight": "model-00004-of-00007.safetensors",
299
+ "model.layers.0.input_layernorm.weight": "model-00004-of-00007.safetensors",
300
+ "model.layers.0.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
301
+ "model.layers.0.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
302
+ "model.layers.0.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
303
+ "model.layers.0.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
304
+ "model.layers.0.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
305
+ "model.layers.0.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
306
+ "model.layers.0.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
307
+ "model.layers.0.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
308
+ "model.layers.1.input_layernorm.weight": "model-00004-of-00007.safetensors",
309
+ "model.layers.1.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
310
+ "model.layers.1.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
311
+ "model.layers.1.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
312
+ "model.layers.1.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
313
+ "model.layers.1.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
314
+ "model.layers.1.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
315
+ "model.layers.1.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
316
+ "model.layers.1.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
317
+ "model.layers.10.input_layernorm.weight": "model-00005-of-00007.safetensors",
318
+ "model.layers.10.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
319
+ "model.layers.10.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
320
+ "model.layers.10.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
321
+ "model.layers.10.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
322
+ "model.layers.10.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
323
+ "model.layers.10.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
324
+ "model.layers.10.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
325
+ "model.layers.10.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
326
+ "model.layers.11.input_layernorm.weight": "model-00005-of-00007.safetensors",
327
+ "model.layers.11.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
328
+ "model.layers.11.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
329
+ "model.layers.11.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
330
+ "model.layers.11.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
331
+ "model.layers.11.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
332
+ "model.layers.11.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
333
+ "model.layers.11.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
334
+ "model.layers.11.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
335
+ "model.layers.12.input_layernorm.weight": "model-00005-of-00007.safetensors",
336
+ "model.layers.12.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
337
+ "model.layers.12.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
338
+ "model.layers.12.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
339
+ "model.layers.12.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
340
+ "model.layers.12.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
341
+ "model.layers.12.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
342
+ "model.layers.12.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
343
+ "model.layers.12.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
344
+ "model.layers.13.input_layernorm.weight": "model-00005-of-00007.safetensors",
345
+ "model.layers.13.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
346
+ "model.layers.13.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
347
+ "model.layers.13.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
348
+ "model.layers.13.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
349
+ "model.layers.13.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
350
+ "model.layers.13.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
351
+ "model.layers.13.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
352
+ "model.layers.13.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
353
+ "model.layers.14.input_layernorm.weight": "model-00005-of-00007.safetensors",
354
+ "model.layers.14.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
355
+ "model.layers.14.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
356
+ "model.layers.14.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
357
+ "model.layers.14.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
358
+ "model.layers.14.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
359
+ "model.layers.14.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
360
+ "model.layers.14.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
361
+ "model.layers.14.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
362
+ "model.layers.15.input_layernorm.weight": "model-00005-of-00007.safetensors",
363
+ "model.layers.15.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
364
+ "model.layers.15.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
365
+ "model.layers.15.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
366
+ "model.layers.15.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
367
+ "model.layers.15.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
368
+ "model.layers.15.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
369
+ "model.layers.15.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
370
+ "model.layers.15.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
371
+ "model.layers.16.input_layernorm.weight": "model-00005-of-00007.safetensors",
372
+ "model.layers.16.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
373
+ "model.layers.16.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
374
+ "model.layers.16.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
375
+ "model.layers.16.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
376
+ "model.layers.16.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
377
+ "model.layers.16.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
378
+ "model.layers.16.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
379
+ "model.layers.16.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
380
+ "model.layers.17.input_layernorm.weight": "model-00006-of-00007.safetensors",
381
+ "model.layers.17.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
382
+ "model.layers.17.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
383
+ "model.layers.17.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
384
+ "model.layers.17.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
385
+ "model.layers.17.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
386
+ "model.layers.17.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
387
+ "model.layers.17.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
388
+ "model.layers.17.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
389
+ "model.layers.18.input_layernorm.weight": "model-00006-of-00007.safetensors",
390
+ "model.layers.18.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
391
+ "model.layers.18.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
392
+ "model.layers.18.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
393
+ "model.layers.18.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
394
+ "model.layers.18.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
395
+ "model.layers.18.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
396
+ "model.layers.18.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
397
+ "model.layers.18.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
398
+ "model.layers.19.input_layernorm.weight": "model-00006-of-00007.safetensors",
399
+ "model.layers.19.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
400
+ "model.layers.19.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
401
+ "model.layers.19.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
402
+ "model.layers.19.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
403
+ "model.layers.19.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
404
+ "model.layers.19.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
405
+ "model.layers.19.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
406
+ "model.layers.19.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
407
+ "model.layers.2.input_layernorm.weight": "model-00004-of-00007.safetensors",
408
+ "model.layers.2.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
409
+ "model.layers.2.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
410
+ "model.layers.2.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
411
+ "model.layers.2.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
412
+ "model.layers.2.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
413
+ "model.layers.2.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
414
+ "model.layers.2.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
415
+ "model.layers.2.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
416
+ "model.layers.20.input_layernorm.weight": "model-00006-of-00007.safetensors",
417
+ "model.layers.20.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
418
+ "model.layers.20.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
419
+ "model.layers.20.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
420
+ "model.layers.20.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
421
+ "model.layers.20.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
422
+ "model.layers.20.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
423
+ "model.layers.20.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
424
+ "model.layers.20.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
425
+ "model.layers.21.input_layernorm.weight": "model-00006-of-00007.safetensors",
426
+ "model.layers.21.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
427
+ "model.layers.21.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
428
+ "model.layers.21.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
429
+ "model.layers.21.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
430
+ "model.layers.21.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
431
+ "model.layers.21.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
432
+ "model.layers.21.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
433
+ "model.layers.21.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
434
+ "model.layers.22.input_layernorm.weight": "model-00006-of-00007.safetensors",
435
+ "model.layers.22.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
436
+ "model.layers.22.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
437
+ "model.layers.22.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
438
+ "model.layers.22.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
439
+ "model.layers.22.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
440
+ "model.layers.22.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
441
+ "model.layers.22.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
442
+ "model.layers.22.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
443
+ "model.layers.23.input_layernorm.weight": "model-00006-of-00007.safetensors",
444
+ "model.layers.23.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
445
+ "model.layers.23.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
446
+ "model.layers.23.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
447
+ "model.layers.23.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
448
+ "model.layers.23.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
449
+ "model.layers.23.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
450
+ "model.layers.23.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
451
+ "model.layers.23.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
452
+ "model.layers.24.input_layernorm.weight": "model-00006-of-00007.safetensors",
453
+ "model.layers.24.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
454
+ "model.layers.24.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
455
+ "model.layers.24.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
456
+ "model.layers.24.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
457
+ "model.layers.24.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
458
+ "model.layers.24.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
459
+ "model.layers.24.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
460
+ "model.layers.24.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
461
+ "model.layers.25.input_layernorm.weight": "model-00006-of-00007.safetensors",
462
+ "model.layers.25.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
463
+ "model.layers.25.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
464
+ "model.layers.25.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
465
+ "model.layers.25.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
466
+ "model.layers.25.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
467
+ "model.layers.25.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
468
+ "model.layers.25.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
469
+ "model.layers.25.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
470
+ "model.layers.26.input_layernorm.weight": "model-00006-of-00007.safetensors",
471
+ "model.layers.26.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
472
+ "model.layers.26.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
473
+ "model.layers.26.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
474
+ "model.layers.26.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
475
+ "model.layers.26.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
476
+ "model.layers.26.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
477
+ "model.layers.26.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
478
+ "model.layers.26.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
479
+ "model.layers.27.input_layernorm.weight": "model-00006-of-00007.safetensors",
480
+ "model.layers.27.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
481
+ "model.layers.27.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
482
+ "model.layers.27.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
483
+ "model.layers.27.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
484
+ "model.layers.27.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
485
+ "model.layers.27.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
486
+ "model.layers.27.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
487
+ "model.layers.27.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
488
+ "model.layers.28.input_layernorm.weight": "model-00007-of-00007.safetensors",
489
+ "model.layers.28.mlp.down_proj.weight": "model-00007-of-00007.safetensors",
490
+ "model.layers.28.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
491
+ "model.layers.28.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
492
+ "model.layers.28.post_attention_layernorm.weight": "model-00007-of-00007.safetensors",
493
+ "model.layers.28.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
494
+ "model.layers.28.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
495
+ "model.layers.28.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
496
+ "model.layers.28.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
497
+ "model.layers.29.input_layernorm.weight": "model-00007-of-00007.safetensors",
498
+ "model.layers.29.mlp.down_proj.weight": "model-00007-of-00007.safetensors",
499
+ "model.layers.29.mlp.gate_proj.weight": "model-00007-of-00007.safetensors",
500
+ "model.layers.29.mlp.up_proj.weight": "model-00007-of-00007.safetensors",
501
+ "model.layers.29.post_attention_layernorm.weight": "model-00007-of-00007.safetensors",
502
+ "model.layers.29.self_attn.k_proj.weight": "model-00007-of-00007.safetensors",
503
+ "model.layers.29.self_attn.o_proj.weight": "model-00007-of-00007.safetensors",
504
+ "model.layers.29.self_attn.q_proj.weight": "model-00007-of-00007.safetensors",
505
+ "model.layers.29.self_attn.v_proj.weight": "model-00007-of-00007.safetensors",
506
+ "model.layers.3.input_layernorm.weight": "model-00004-of-00007.safetensors",
507
+ "model.layers.3.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
508
+ "model.layers.3.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
509
+ "model.layers.3.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
510
+ "model.layers.3.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
511
+ "model.layers.3.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
512
+ "model.layers.3.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
513
+ "model.layers.3.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
514
+ "model.layers.3.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
515
+ "model.layers.30.input_layernorm.weight": "model-00007-of-00007.safetensors",
516
+ "model.layers.30.mlp.down_proj.weight": "model-00007-of-00007.safetensors",
517
+ "model.layers.30.mlp.gate_proj.weight": "model-00007-of-00007.safetensors",
518
+ "model.layers.30.mlp.up_proj.weight": "model-00007-of-00007.safetensors",
519
+ "model.layers.30.post_attention_layernorm.weight": "model-00007-of-00007.safetensors",
520
+ "model.layers.30.self_attn.k_proj.weight": "model-00007-of-00007.safetensors",
521
+ "model.layers.30.self_attn.o_proj.weight": "model-00007-of-00007.safetensors",
522
+ "model.layers.30.self_attn.q_proj.weight": "model-00007-of-00007.safetensors",
523
+ "model.layers.30.self_attn.v_proj.weight": "model-00007-of-00007.safetensors",
524
+ "model.layers.31.input_layernorm.weight": "model-00007-of-00007.safetensors",
525
+ "model.layers.31.mlp.down_proj.weight": "model-00007-of-00007.safetensors",
526
+ "model.layers.31.mlp.gate_proj.weight": "model-00007-of-00007.safetensors",
527
+ "model.layers.31.mlp.up_proj.weight": "model-00007-of-00007.safetensors",
528
+ "model.layers.31.post_attention_layernorm.weight": "model-00007-of-00007.safetensors",
529
+ "model.layers.31.self_attn.k_proj.weight": "model-00007-of-00007.safetensors",
530
+ "model.layers.31.self_attn.o_proj.weight": "model-00007-of-00007.safetensors",
531
+ "model.layers.31.self_attn.q_proj.weight": "model-00007-of-00007.safetensors",
532
+ "model.layers.31.self_attn.v_proj.weight": "model-00007-of-00007.safetensors",
533
+ "model.layers.4.input_layernorm.weight": "model-00004-of-00007.safetensors",
534
+ "model.layers.4.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
535
+ "model.layers.4.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
536
+ "model.layers.4.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
537
+ "model.layers.4.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
538
+ "model.layers.4.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
539
+ "model.layers.4.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
540
+ "model.layers.4.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
541
+ "model.layers.4.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
542
+ "model.layers.5.input_layernorm.weight": "model-00004-of-00007.safetensors",
543
+ "model.layers.5.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
544
+ "model.layers.5.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
545
+ "model.layers.5.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
546
+ "model.layers.5.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
547
+ "model.layers.5.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
548
+ "model.layers.5.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
549
+ "model.layers.5.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
550
+ "model.layers.5.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
551
+ "model.layers.6.input_layernorm.weight": "model-00005-of-00007.safetensors",
552
+ "model.layers.6.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
553
+ "model.layers.6.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
554
+ "model.layers.6.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
555
+ "model.layers.6.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
556
+ "model.layers.6.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
557
+ "model.layers.6.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
558
+ "model.layers.6.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
559
+ "model.layers.6.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
560
+ "model.layers.7.input_layernorm.weight": "model-00005-of-00007.safetensors",
561
+ "model.layers.7.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
562
+ "model.layers.7.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
563
+ "model.layers.7.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
564
+ "model.layers.7.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
565
+ "model.layers.7.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
566
+ "model.layers.7.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
567
+ "model.layers.7.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
568
+ "model.layers.7.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
569
+ "model.layers.8.input_layernorm.weight": "model-00005-of-00007.safetensors",
570
+ "model.layers.8.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
571
+ "model.layers.8.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
572
+ "model.layers.8.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
573
+ "model.layers.8.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
574
+ "model.layers.8.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
575
+ "model.layers.8.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
576
+ "model.layers.8.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
577
+ "model.layers.8.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
578
+ "model.layers.9.input_layernorm.weight": "model-00005-of-00007.safetensors",
579
+ "model.layers.9.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
580
+ "model.layers.9.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
581
+ "model.layers.9.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
582
+ "model.layers.9.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
583
+ "model.layers.9.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
584
+ "model.layers.9.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
585
+ "model.layers.9.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
586
+ "model.layers.9.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
587
+ "model.norm.weight": "model-00007-of-00007.safetensors"
588
+ }
589
+ }
rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92cc13315f24c28015d695b6cde08bb1cd6fea4cbc435998485ed6fbe4c91285
3
+ size 15024
rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4c154b6a63e0b1f98f7d2847944398f99f1657d35e8eddf7fdf0ae2c24b0552
3
+ size 15024
rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f784c6a9507b51189f2caffbd178ea9882103b75852e31c15f47fdae6a43af1d
3
+ size 15024
rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34b023e05bc2d12b91dc436d4922b990d50ec8dc56d40dc3e36b3bb34fc81341
3
+ size 15024
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:754972bd89a0ad01d3e86f4bc7920b352b519a155dc9755a815c98aa76c268ae
3
+ size 1064
trainer_state.json ADDED
@@ -0,0 +1,1671 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 1.9968,
5
+ "eval_steps": 500,
6
+ "global_step": 936,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.008533333333333334,
13
+ "grad_norm": 2.515625,
14
+ "learning_rate": 1.6000000000000001e-06,
15
+ "loss": 0.2639,
16
+ "step": 4
17
+ },
18
+ {
19
+ "epoch": 0.017066666666666667,
20
+ "grad_norm": 1.96875,
21
+ "learning_rate": 3.2000000000000003e-06,
22
+ "loss": 0.2509,
23
+ "step": 8
24
+ },
25
+ {
26
+ "epoch": 0.0256,
27
+ "grad_norm": 1.9296875,
28
+ "learning_rate": 4.800000000000001e-06,
29
+ "loss": 0.2595,
30
+ "step": 12
31
+ },
32
+ {
33
+ "epoch": 0.034133333333333335,
34
+ "grad_norm": 2.046875,
35
+ "learning_rate": 6.4000000000000006e-06,
36
+ "loss": 0.2416,
37
+ "step": 16
38
+ },
39
+ {
40
+ "epoch": 0.042666666666666665,
41
+ "grad_norm": 2.546875,
42
+ "learning_rate": 8.000000000000001e-06,
43
+ "loss": 0.2478,
44
+ "step": 20
45
+ },
46
+ {
47
+ "epoch": 0.0512,
48
+ "grad_norm": 2.296875,
49
+ "learning_rate": 9.600000000000001e-06,
50
+ "loss": 0.2367,
51
+ "step": 24
52
+ },
53
+ {
54
+ "epoch": 0.05973333333333333,
55
+ "grad_norm": 2.03125,
56
+ "learning_rate": 1.1200000000000001e-05,
57
+ "loss": 0.2397,
58
+ "step": 28
59
+ },
60
+ {
61
+ "epoch": 0.06826666666666667,
62
+ "grad_norm": 1.8359375,
63
+ "learning_rate": 1.2800000000000001e-05,
64
+ "loss": 0.2416,
65
+ "step": 32
66
+ },
67
+ {
68
+ "epoch": 0.0768,
69
+ "grad_norm": 1.9609375,
70
+ "learning_rate": 1.4400000000000001e-05,
71
+ "loss": 0.2421,
72
+ "step": 36
73
+ },
74
+ {
75
+ "epoch": 0.08533333333333333,
76
+ "grad_norm": 2.0,
77
+ "learning_rate": 1.6000000000000003e-05,
78
+ "loss": 0.2392,
79
+ "step": 40
80
+ },
81
+ {
82
+ "epoch": 0.09386666666666667,
83
+ "grad_norm": 2.046875,
84
+ "learning_rate": 1.76e-05,
85
+ "loss": 0.2356,
86
+ "step": 44
87
+ },
88
+ {
89
+ "epoch": 0.1024,
90
+ "grad_norm": 2.203125,
91
+ "learning_rate": 1.9200000000000003e-05,
92
+ "loss": 0.2367,
93
+ "step": 48
94
+ },
95
+ {
96
+ "epoch": 0.11093333333333333,
97
+ "grad_norm": 2.09375,
98
+ "learning_rate": 1.999974854488333e-05,
99
+ "loss": 0.2418,
100
+ "step": 52
101
+ },
102
+ {
103
+ "epoch": 0.11946666666666667,
104
+ "grad_norm": 1.921875,
105
+ "learning_rate": 1.9997736979824944e-05,
106
+ "loss": 0.2379,
107
+ "step": 56
108
+ },
109
+ {
110
+ "epoch": 0.128,
111
+ "grad_norm": 1.9140625,
112
+ "learning_rate": 1.999371425435775e-05,
113
+ "loss": 0.241,
114
+ "step": 60
115
+ },
116
+ {
117
+ "epoch": 0.13653333333333334,
118
+ "grad_norm": 1.9453125,
119
+ "learning_rate": 1.9987681177699486e-05,
120
+ "loss": 0.2509,
121
+ "step": 64
122
+ },
123
+ {
124
+ "epoch": 0.14506666666666668,
125
+ "grad_norm": 1.8828125,
126
+ "learning_rate": 1.9979638963473294e-05,
127
+ "loss": 0.2412,
128
+ "step": 68
129
+ },
130
+ {
131
+ "epoch": 0.1536,
132
+ "grad_norm": 1.8125,
133
+ "learning_rate": 1.996958922946357e-05,
134
+ "loss": 0.2501,
135
+ "step": 72
136
+ },
137
+ {
138
+ "epoch": 0.16213333333333332,
139
+ "grad_norm": 2.6875,
140
+ "learning_rate": 1.9957533997290524e-05,
141
+ "loss": 0.2482,
142
+ "step": 76
143
+ },
144
+ {
145
+ "epoch": 0.17066666666666666,
146
+ "grad_norm": 1.8359375,
147
+ "learning_rate": 1.9943475692003514e-05,
148
+ "loss": 0.2371,
149
+ "step": 80
150
+ },
151
+ {
152
+ "epoch": 0.1792,
153
+ "grad_norm": 1.9765625,
154
+ "learning_rate": 1.992741714159322e-05,
155
+ "loss": 0.2459,
156
+ "step": 84
157
+ },
158
+ {
159
+ "epoch": 0.18773333333333334,
160
+ "grad_norm": 2.671875,
161
+ "learning_rate": 1.990936157642277e-05,
162
+ "loss": 0.2448,
163
+ "step": 88
164
+ },
165
+ {
166
+ "epoch": 0.19626666666666667,
167
+ "grad_norm": 1.8359375,
168
+ "learning_rate": 1.9889312628577887e-05,
169
+ "loss": 0.2515,
170
+ "step": 92
171
+ },
172
+ {
173
+ "epoch": 0.2048,
174
+ "grad_norm": 2.15625,
175
+ "learning_rate": 1.9867274331136276e-05,
176
+ "loss": 0.2417,
177
+ "step": 96
178
+ },
179
+ {
180
+ "epoch": 0.21333333333333335,
181
+ "grad_norm": 1.921875,
182
+ "learning_rate": 1.984325111735633e-05,
183
+ "loss": 0.2506,
184
+ "step": 100
185
+ },
186
+ {
187
+ "epoch": 0.22186666666666666,
188
+ "grad_norm": 2.734375,
189
+ "learning_rate": 1.9817247819785303e-05,
190
+ "loss": 0.2513,
191
+ "step": 104
192
+ },
193
+ {
194
+ "epoch": 0.2304,
195
+ "grad_norm": 1.8671875,
196
+ "learning_rate": 1.9789269669287212e-05,
197
+ "loss": 0.2456,
198
+ "step": 108
199
+ },
200
+ {
201
+ "epoch": 0.23893333333333333,
202
+ "grad_norm": 2.15625,
203
+ "learning_rate": 1.975932229399057e-05,
204
+ "loss": 0.2572,
205
+ "step": 112
206
+ },
207
+ {
208
+ "epoch": 0.24746666666666667,
209
+ "grad_norm": 1.9609375,
210
+ "learning_rate": 1.972741171815623e-05,
211
+ "loss": 0.2476,
212
+ "step": 116
213
+ },
214
+ {
215
+ "epoch": 0.256,
216
+ "grad_norm": 1.9609375,
217
+ "learning_rate": 1.9693544360965548e-05,
218
+ "loss": 0.2442,
219
+ "step": 120
220
+ },
221
+ {
222
+ "epoch": 0.26453333333333334,
223
+ "grad_norm": 1.734375,
224
+ "learning_rate": 1.9657727035229066e-05,
225
+ "loss": 0.2655,
226
+ "step": 124
227
+ },
228
+ {
229
+ "epoch": 0.2730666666666667,
230
+ "grad_norm": 1.9140625,
231
+ "learning_rate": 1.9619966946016054e-05,
232
+ "loss": 0.248,
233
+ "step": 128
234
+ },
235
+ {
236
+ "epoch": 0.2816,
237
+ "grad_norm": 2.203125,
238
+ "learning_rate": 1.958027168920512e-05,
239
+ "loss": 0.2478,
240
+ "step": 132
241
+ },
242
+ {
243
+ "epoch": 0.29013333333333335,
244
+ "grad_norm": 1.65625,
245
+ "learning_rate": 1.953864924995621e-05,
246
+ "loss": 0.2534,
247
+ "step": 136
248
+ },
249
+ {
250
+ "epoch": 0.2986666666666667,
251
+ "grad_norm": 2.015625,
252
+ "learning_rate": 1.9495108001104312e-05,
253
+ "loss": 0.2453,
254
+ "step": 140
255
+ },
256
+ {
257
+ "epoch": 0.3072,
258
+ "grad_norm": 1.9453125,
259
+ "learning_rate": 1.9449656701475147e-05,
260
+ "loss": 0.2313,
261
+ "step": 144
262
+ },
263
+ {
264
+ "epoch": 0.3157333333333333,
265
+ "grad_norm": 1.8828125,
266
+ "learning_rate": 1.940230449412324e-05,
267
+ "loss": 0.2412,
268
+ "step": 148
269
+ },
270
+ {
271
+ "epoch": 0.32426666666666665,
272
+ "grad_norm": 1.9453125,
273
+ "learning_rate": 1.9353060904492694e-05,
274
+ "loss": 0.2615,
275
+ "step": 152
276
+ },
277
+ {
278
+ "epoch": 0.3328,
279
+ "grad_norm": 2.234375,
280
+ "learning_rate": 1.930193583850102e-05,
281
+ "loss": 0.2571,
282
+ "step": 156
283
+ },
284
+ {
285
+ "epoch": 0.3413333333333333,
286
+ "grad_norm": 2.078125,
287
+ "learning_rate": 1.9248939580546453e-05,
288
+ "loss": 0.2753,
289
+ "step": 160
290
+ },
291
+ {
292
+ "epoch": 0.34986666666666666,
293
+ "grad_norm": 1.6953125,
294
+ "learning_rate": 1.9194082791439146e-05,
295
+ "loss": 0.2462,
296
+ "step": 164
297
+ },
298
+ {
299
+ "epoch": 0.3584,
300
+ "grad_norm": 1.8828125,
301
+ "learning_rate": 1.91373765062566e-05,
302
+ "loss": 0.2574,
303
+ "step": 168
304
+ },
305
+ {
306
+ "epoch": 0.36693333333333333,
307
+ "grad_norm": 1.7734375,
308
+ "learning_rate": 1.9078832132123833e-05,
309
+ "loss": 0.2394,
310
+ "step": 172
311
+ },
312
+ {
313
+ "epoch": 0.37546666666666667,
314
+ "grad_norm": 1.8671875,
315
+ "learning_rate": 1.9018461445918727e-05,
316
+ "loss": 0.252,
317
+ "step": 176
318
+ },
319
+ {
320
+ "epoch": 0.384,
321
+ "grad_norm": 1.8046875,
322
+ "learning_rate": 1.895627659190294e-05,
323
+ "loss": 0.2529,
324
+ "step": 180
325
+ },
326
+ {
327
+ "epoch": 0.39253333333333335,
328
+ "grad_norm": 1.84375,
329
+ "learning_rate": 1.889229007927897e-05,
330
+ "loss": 0.2576,
331
+ "step": 184
332
+ },
333
+ {
334
+ "epoch": 0.4010666666666667,
335
+ "grad_norm": 2.015625,
336
+ "learning_rate": 1.8826514779673792e-05,
337
+ "loss": 0.2541,
338
+ "step": 188
339
+ },
340
+ {
341
+ "epoch": 0.4096,
342
+ "grad_norm": 1.8046875,
343
+ "learning_rate": 1.875896392454955e-05,
344
+ "loss": 0.2659,
345
+ "step": 192
346
+ },
347
+ {
348
+ "epoch": 0.41813333333333336,
349
+ "grad_norm": 1.6875,
350
+ "learning_rate": 1.8689651102541915e-05,
351
+ "loss": 0.2551,
352
+ "step": 196
353
+ },
354
+ {
355
+ "epoch": 0.4266666666666667,
356
+ "grad_norm": 1.9296875,
357
+ "learning_rate": 1.8618590256726587e-05,
358
+ "loss": 0.2597,
359
+ "step": 200
360
+ },
361
+ {
362
+ "epoch": 0.4352,
363
+ "grad_norm": 1.9140625,
364
+ "learning_rate": 1.854579568181446e-05,
365
+ "loss": 0.2627,
366
+ "step": 204
367
+ },
368
+ {
369
+ "epoch": 0.4437333333333333,
370
+ "grad_norm": 1.9296875,
371
+ "learning_rate": 1.8471282021276073e-05,
372
+ "loss": 0.2557,
373
+ "step": 208
374
+ },
375
+ {
376
+ "epoch": 0.45226666666666665,
377
+ "grad_norm": 1.9609375,
378
+ "learning_rate": 1.8395064264395945e-05,
379
+ "loss": 0.2573,
380
+ "step": 212
381
+ },
382
+ {
383
+ "epoch": 0.4608,
384
+ "grad_norm": 1.75,
385
+ "learning_rate": 1.831715774325726e-05,
386
+ "loss": 0.2515,
387
+ "step": 216
388
+ },
389
+ {
390
+ "epoch": 0.4693333333333333,
391
+ "grad_norm": 1.75,
392
+ "learning_rate": 1.8237578129657664e-05,
393
+ "loss": 0.2501,
394
+ "step": 220
395
+ },
396
+ {
397
+ "epoch": 0.47786666666666666,
398
+ "grad_norm": 1.6640625,
399
+ "learning_rate": 1.8156341431956706e-05,
400
+ "loss": 0.2521,
401
+ "step": 224
402
+ },
403
+ {
404
+ "epoch": 0.4864,
405
+ "grad_norm": 1.9296875,
406
+ "learning_rate": 1.8073463991855562e-05,
407
+ "loss": 0.2427,
408
+ "step": 228
409
+ },
410
+ {
411
+ "epoch": 0.49493333333333334,
412
+ "grad_norm": 1.8671875,
413
+ "learning_rate": 1.7988962481109716e-05,
414
+ "loss": 0.2639,
415
+ "step": 232
416
+ },
417
+ {
418
+ "epoch": 0.5034666666666666,
419
+ "grad_norm": 1.8203125,
420
+ "learning_rate": 1.7902853898175244e-05,
421
+ "loss": 0.2537,
422
+ "step": 236
423
+ },
424
+ {
425
+ "epoch": 0.512,
426
+ "grad_norm": 1.9375,
427
+ "learning_rate": 1.7815155564789374e-05,
428
+ "loss": 0.2611,
429
+ "step": 240
430
+ },
431
+ {
432
+ "epoch": 0.5205333333333333,
433
+ "grad_norm": 1.921875,
434
+ "learning_rate": 1.772588512248602e-05,
435
+ "loss": 0.2648,
436
+ "step": 244
437
+ },
438
+ {
439
+ "epoch": 0.5290666666666667,
440
+ "grad_norm": 2.03125,
441
+ "learning_rate": 1.7635060529046994e-05,
442
+ "loss": 0.2848,
443
+ "step": 248
444
+ },
445
+ {
446
+ "epoch": 0.5376,
447
+ "grad_norm": 1.75,
448
+ "learning_rate": 1.7542700054889572e-05,
449
+ "loss": 0.2695,
450
+ "step": 252
451
+ },
452
+ {
453
+ "epoch": 0.5461333333333334,
454
+ "grad_norm": 2.078125,
455
+ "learning_rate": 1.7448822279391204e-05,
456
+ "loss": 0.2653,
457
+ "step": 256
458
+ },
459
+ {
460
+ "epoch": 0.5546666666666666,
461
+ "grad_norm": 1.765625,
462
+ "learning_rate": 1.7353446087152038e-05,
463
+ "loss": 0.249,
464
+ "step": 260
465
+ },
466
+ {
467
+ "epoch": 0.5632,
468
+ "grad_norm": 1.75,
469
+ "learning_rate": 1.72565906641961e-05,
470
+ "loss": 0.2462,
471
+ "step": 264
472
+ },
473
+ {
474
+ "epoch": 0.5717333333333333,
475
+ "grad_norm": 1.6328125,
476
+ "learning_rate": 1.7158275494111763e-05,
477
+ "loss": 0.2373,
478
+ "step": 268
479
+ },
480
+ {
481
+ "epoch": 0.5802666666666667,
482
+ "grad_norm": 2.0625,
483
+ "learning_rate": 1.705852035413242e-05,
484
+ "loss": 0.2728,
485
+ "step": 272
486
+ },
487
+ {
488
+ "epoch": 0.5888,
489
+ "grad_norm": 1.671875,
490
+ "learning_rate": 1.6957345311158066e-05,
491
+ "loss": 0.2409,
492
+ "step": 276
493
+ },
494
+ {
495
+ "epoch": 0.5973333333333334,
496
+ "grad_norm": 2.015625,
497
+ "learning_rate": 1.6854770717718587e-05,
498
+ "loss": 0.254,
499
+ "step": 280
500
+ },
501
+ {
502
+ "epoch": 0.6058666666666667,
503
+ "grad_norm": 1.75,
504
+ "learning_rate": 1.6750817207879655e-05,
505
+ "loss": 0.2462,
506
+ "step": 284
507
+ },
508
+ {
509
+ "epoch": 0.6144,
510
+ "grad_norm": 1.6640625,
511
+ "learning_rate": 1.6645505693091897e-05,
512
+ "loss": 0.2561,
513
+ "step": 288
514
+ },
515
+ {
516
+ "epoch": 0.6229333333333333,
517
+ "grad_norm": 2.25,
518
+ "learning_rate": 1.6538857357984358e-05,
519
+ "loss": 0.2647,
520
+ "step": 292
521
+ },
522
+ {
523
+ "epoch": 0.6314666666666666,
524
+ "grad_norm": 1.9140625,
525
+ "learning_rate": 1.6430893656102942e-05,
526
+ "loss": 0.2398,
527
+ "step": 296
528
+ },
529
+ {
530
+ "epoch": 0.64,
531
+ "grad_norm": 2.078125,
532
+ "learning_rate": 1.6321636305594784e-05,
533
+ "loss": 0.2575,
534
+ "step": 300
535
+ },
536
+ {
537
+ "epoch": 0.6485333333333333,
538
+ "grad_norm": 1.8125,
539
+ "learning_rate": 1.6211107284839417e-05,
540
+ "loss": 0.2436,
541
+ "step": 304
542
+ },
543
+ {
544
+ "epoch": 0.6570666666666667,
545
+ "grad_norm": 1.984375,
546
+ "learning_rate": 1.609932882802753e-05,
547
+ "loss": 0.2427,
548
+ "step": 308
549
+ },
550
+ {
551
+ "epoch": 0.6656,
552
+ "grad_norm": 1.8671875,
553
+ "learning_rate": 1.5986323420688335e-05,
554
+ "loss": 0.2391,
555
+ "step": 312
556
+ },
557
+ {
558
+ "epoch": 0.6741333333333334,
559
+ "grad_norm": 2.0,
560
+ "learning_rate": 1.5872113795166337e-05,
561
+ "loss": 0.2543,
562
+ "step": 316
563
+ },
564
+ {
565
+ "epoch": 0.6826666666666666,
566
+ "grad_norm": 1.875,
567
+ "learning_rate": 1.575672292604844e-05,
568
+ "loss": 0.2541,
569
+ "step": 320
570
+ },
571
+ {
572
+ "epoch": 0.6912,
573
+ "grad_norm": 1.671875,
574
+ "learning_rate": 1.564017402554237e-05,
575
+ "loss": 0.253,
576
+ "step": 324
577
+ },
578
+ {
579
+ "epoch": 0.6997333333333333,
580
+ "grad_norm": 2.078125,
581
+ "learning_rate": 1.5522490538807248e-05,
582
+ "loss": 0.2565,
583
+ "step": 328
584
+ },
585
+ {
586
+ "epoch": 0.7082666666666667,
587
+ "grad_norm": 1.7265625,
588
+ "learning_rate": 1.5403696139237338e-05,
589
+ "loss": 0.2587,
590
+ "step": 332
591
+ },
592
+ {
593
+ "epoch": 0.7168,
594
+ "grad_norm": 1.90625,
595
+ "learning_rate": 1.5283814723699877e-05,
596
+ "loss": 0.2609,
597
+ "step": 336
598
+ },
599
+ {
600
+ "epoch": 0.7253333333333334,
601
+ "grad_norm": 1.8359375,
602
+ "learning_rate": 1.5162870407727922e-05,
603
+ "loss": 0.2522,
604
+ "step": 340
605
+ },
606
+ {
607
+ "epoch": 0.7338666666666667,
608
+ "grad_norm": 1.7109375,
609
+ "learning_rate": 1.5040887520669245e-05,
610
+ "loss": 0.2532,
611
+ "step": 344
612
+ },
613
+ {
614
+ "epoch": 0.7424,
615
+ "grad_norm": 1.859375,
616
+ "learning_rate": 1.4917890600792215e-05,
617
+ "loss": 0.2434,
618
+ "step": 348
619
+ },
620
+ {
621
+ "epoch": 0.7509333333333333,
622
+ "grad_norm": 1.890625,
623
+ "learning_rate": 1.4793904390349618e-05,
624
+ "loss": 0.2491,
625
+ "step": 352
626
+ },
627
+ {
628
+ "epoch": 0.7594666666666666,
629
+ "grad_norm": 1.7890625,
630
+ "learning_rate": 1.4668953830601473e-05,
631
+ "loss": 0.2336,
632
+ "step": 356
633
+ },
634
+ {
635
+ "epoch": 0.768,
636
+ "grad_norm": 1.765625,
637
+ "learning_rate": 1.4543064056797826e-05,
638
+ "loss": 0.2665,
639
+ "step": 360
640
+ },
641
+ {
642
+ "epoch": 0.7765333333333333,
643
+ "grad_norm": 1.78125,
644
+ "learning_rate": 1.4416260393122487e-05,
645
+ "loss": 0.2538,
646
+ "step": 364
647
+ },
648
+ {
649
+ "epoch": 0.7850666666666667,
650
+ "grad_norm": 2.0625,
651
+ "learning_rate": 1.4288568347598777e-05,
652
+ "loss": 0.2551,
653
+ "step": 368
654
+ },
655
+ {
656
+ "epoch": 0.7936,
657
+ "grad_norm": 2.28125,
658
+ "learning_rate": 1.4160013606958303e-05,
659
+ "loss": 0.2349,
660
+ "step": 372
661
+ },
662
+ {
663
+ "epoch": 0.8021333333333334,
664
+ "grad_norm": 1.7734375,
665
+ "learning_rate": 1.403062203147377e-05,
666
+ "loss": 0.2347,
667
+ "step": 376
668
+ },
669
+ {
670
+ "epoch": 0.8106666666666666,
671
+ "grad_norm": 1.8359375,
672
+ "learning_rate": 1.3900419649756895e-05,
673
+ "loss": 0.2491,
674
+ "step": 380
675
+ },
676
+ {
677
+ "epoch": 0.8192,
678
+ "grad_norm": 1.8125,
679
+ "learning_rate": 1.3769432653522436e-05,
680
+ "loss": 0.2424,
681
+ "step": 384
682
+ },
683
+ {
684
+ "epoch": 0.8277333333333333,
685
+ "grad_norm": 1.828125,
686
+ "learning_rate": 1.3637687392319443e-05,
687
+ "loss": 0.2477,
688
+ "step": 388
689
+ },
690
+ {
691
+ "epoch": 0.8362666666666667,
692
+ "grad_norm": 1.7265625,
693
+ "learning_rate": 1.3505210368230723e-05,
694
+ "loss": 0.249,
695
+ "step": 392
696
+ },
697
+ {
698
+ "epoch": 0.8448,
699
+ "grad_norm": 1.7578125,
700
+ "learning_rate": 1.3372028230541658e-05,
701
+ "loss": 0.2325,
702
+ "step": 396
703
+ },
704
+ {
705
+ "epoch": 0.8533333333333334,
706
+ "grad_norm": 1.640625,
707
+ "learning_rate": 1.3238167770379384e-05,
708
+ "loss": 0.2358,
709
+ "step": 400
710
+ },
711
+ {
712
+ "epoch": 0.8618666666666667,
713
+ "grad_norm": 1.921875,
714
+ "learning_rate": 1.3103655915323444e-05,
715
+ "loss": 0.2371,
716
+ "step": 404
717
+ },
718
+ {
719
+ "epoch": 0.8704,
720
+ "grad_norm": 1.8359375,
721
+ "learning_rate": 1.2968519723988994e-05,
722
+ "loss": 0.234,
723
+ "step": 408
724
+ },
725
+ {
726
+ "epoch": 0.8789333333333333,
727
+ "grad_norm": 1.7734375,
728
+ "learning_rate": 1.2832786380583664e-05,
729
+ "loss": 0.256,
730
+ "step": 412
731
+ },
732
+ {
733
+ "epoch": 0.8874666666666666,
734
+ "grad_norm": 1.8515625,
735
+ "learning_rate": 1.2696483189439113e-05,
736
+ "loss": 0.2462,
737
+ "step": 416
738
+ },
739
+ {
740
+ "epoch": 0.896,
741
+ "grad_norm": 1.84375,
742
+ "learning_rate": 1.2559637569518472e-05,
743
+ "loss": 0.2554,
744
+ "step": 420
745
+ },
746
+ {
747
+ "epoch": 0.9045333333333333,
748
+ "grad_norm": 1.9296875,
749
+ "learning_rate": 1.2422277048900694e-05,
750
+ "loss": 0.2495,
751
+ "step": 424
752
+ },
753
+ {
754
+ "epoch": 0.9130666666666667,
755
+ "grad_norm": 1.765625,
756
+ "learning_rate": 1.2284429259242958e-05,
757
+ "loss": 0.2407,
758
+ "step": 428
759
+ },
760
+ {
761
+ "epoch": 0.9216,
762
+ "grad_norm": 1.8671875,
763
+ "learning_rate": 1.2146121930222241e-05,
764
+ "loss": 0.2532,
765
+ "step": 432
766
+ },
767
+ {
768
+ "epoch": 0.9301333333333334,
769
+ "grad_norm": 1.8046875,
770
+ "learning_rate": 1.2007382883957186e-05,
771
+ "loss": 0.2342,
772
+ "step": 436
773
+ },
774
+ {
775
+ "epoch": 0.9386666666666666,
776
+ "grad_norm": 1.984375,
777
+ "learning_rate": 1.1868240029411351e-05,
778
+ "loss": 0.243,
779
+ "step": 440
780
+ },
781
+ {
782
+ "epoch": 0.9472,
783
+ "grad_norm": 1.8671875,
784
+ "learning_rate": 1.1728721356778994e-05,
785
+ "loss": 0.2406,
786
+ "step": 444
787
+ },
788
+ {
789
+ "epoch": 0.9557333333333333,
790
+ "grad_norm": 1.8359375,
791
+ "learning_rate": 1.158885493185453e-05,
792
+ "loss": 0.2393,
793
+ "step": 448
794
+ },
795
+ {
796
+ "epoch": 0.9642666666666667,
797
+ "grad_norm": 1.984375,
798
+ "learning_rate": 1.1448668890386765e-05,
799
+ "loss": 0.2434,
800
+ "step": 452
801
+ },
802
+ {
803
+ "epoch": 0.9728,
804
+ "grad_norm": 1.96875,
805
+ "learning_rate": 1.1308191432419078e-05,
806
+ "loss": 0.2312,
807
+ "step": 456
808
+ },
809
+ {
810
+ "epoch": 0.9813333333333333,
811
+ "grad_norm": 1.6484375,
812
+ "learning_rate": 1.1167450816616639e-05,
813
+ "loss": 0.2402,
814
+ "step": 460
815
+ },
816
+ {
817
+ "epoch": 0.9898666666666667,
818
+ "grad_norm": 2.09375,
819
+ "learning_rate": 1.102647535458186e-05,
820
+ "loss": 0.2479,
821
+ "step": 464
822
+ },
823
+ {
824
+ "epoch": 0.9984,
825
+ "grad_norm": 1.8515625,
826
+ "learning_rate": 1.0885293405159196e-05,
827
+ "loss": 0.2402,
828
+ "step": 468
829
+ },
830
+ {
831
+ "epoch": 1.0069333333333332,
832
+ "grad_norm": 1.28125,
833
+ "learning_rate": 1.0743933368730417e-05,
834
+ "loss": 0.1636,
835
+ "step": 472
836
+ },
837
+ {
838
+ "epoch": 1.0154666666666667,
839
+ "grad_norm": 1.8125,
840
+ "learning_rate": 1.0602423681501564e-05,
841
+ "loss": 0.1315,
842
+ "step": 476
843
+ },
844
+ {
845
+ "epoch": 1.024,
846
+ "grad_norm": 2.265625,
847
+ "learning_rate": 1.0460792809782659e-05,
848
+ "loss": 0.1396,
849
+ "step": 480
850
+ },
851
+ {
852
+ "epoch": 1.0325333333333333,
853
+ "grad_norm": 1.453125,
854
+ "learning_rate": 1.031906924426139e-05,
855
+ "loss": 0.1295,
856
+ "step": 484
857
+ },
858
+ {
859
+ "epoch": 1.0410666666666666,
860
+ "grad_norm": 1.484375,
861
+ "learning_rate": 1.0177281494271873e-05,
862
+ "loss": 0.1258,
863
+ "step": 488
864
+ },
865
+ {
866
+ "epoch": 1.0496,
867
+ "grad_norm": 1.9140625,
868
+ "learning_rate": 1.0035458082059672e-05,
869
+ "loss": 0.1304,
870
+ "step": 492
871
+ },
872
+ {
873
+ "epoch": 1.0581333333333334,
874
+ "grad_norm": 2.0,
875
+ "learning_rate": 9.893627537044223e-06,
876
+ "loss": 0.1347,
877
+ "step": 496
878
+ },
879
+ {
880
+ "epoch": 1.0666666666666667,
881
+ "grad_norm": 1.75,
882
+ "learning_rate": 9.751818390079805e-06,
883
+ "loss": 0.1332,
884
+ "step": 500
885
+ },
886
+ {
887
+ "epoch": 1.0752,
888
+ "grad_norm": 1.6953125,
889
+ "learning_rate": 9.61005916771623e-06,
890
+ "loss": 0.1265,
891
+ "step": 504
892
+ },
893
+ {
894
+ "epoch": 1.0837333333333334,
895
+ "grad_norm": 1.7578125,
896
+ "learning_rate": 9.468378386460406e-06,
897
+ "loss": 0.1304,
898
+ "step": 508
899
+ },
900
+ {
901
+ "epoch": 1.0922666666666667,
902
+ "grad_norm": 1.8515625,
903
+ "learning_rate": 9.326804547039894e-06,
904
+ "loss": 0.1346,
905
+ "step": 512
906
+ },
907
+ {
908
+ "epoch": 1.1008,
909
+ "grad_norm": 1.7421875,
910
+ "learning_rate": 9.185366128669682e-06,
911
+ "loss": 0.1264,
912
+ "step": 516
913
+ },
914
+ {
915
+ "epoch": 1.1093333333333333,
916
+ "grad_norm": 2.078125,
917
+ "learning_rate": 9.044091583323231e-06,
918
+ "loss": 0.1206,
919
+ "step": 520
920
+ },
921
+ {
922
+ "epoch": 1.1178666666666666,
923
+ "grad_norm": 1.6328125,
924
+ "learning_rate": 8.903009330009063e-06,
925
+ "loss": 0.1307,
926
+ "step": 524
927
+ },
928
+ {
929
+ "epoch": 1.1264,
930
+ "grad_norm": 2.15625,
931
+ "learning_rate": 8.762147749053928e-06,
932
+ "loss": 0.1338,
933
+ "step": 528
934
+ },
935
+ {
936
+ "epoch": 1.1349333333333333,
937
+ "grad_norm": 1.5625,
938
+ "learning_rate": 8.621535176393776e-06,
939
+ "loss": 0.1212,
940
+ "step": 532
941
+ },
942
+ {
943
+ "epoch": 1.1434666666666666,
944
+ "grad_norm": 1.5078125,
945
+ "learning_rate": 8.481199897873667e-06,
946
+ "loss": 0.1301,
947
+ "step": 536
948
+ },
949
+ {
950
+ "epoch": 1.152,
951
+ "grad_norm": 1.8203125,
952
+ "learning_rate": 8.341170143557733e-06,
953
+ "loss": 0.1252,
954
+ "step": 540
955
+ },
956
+ {
957
+ "epoch": 1.1605333333333334,
958
+ "grad_norm": 1.6953125,
959
+ "learning_rate": 8.201474082050376e-06,
960
+ "loss": 0.1176,
961
+ "step": 544
962
+ },
963
+ {
964
+ "epoch": 1.1690666666666667,
965
+ "grad_norm": 1.6875,
966
+ "learning_rate": 8.062139814829839e-06,
967
+ "loss": 0.1164,
968
+ "step": 548
969
+ },
970
+ {
971
+ "epoch": 1.1776,
972
+ "grad_norm": 1.703125,
973
+ "learning_rate": 7.92319537059525e-06,
974
+ "loss": 0.1326,
975
+ "step": 552
976
+ },
977
+ {
978
+ "epoch": 1.1861333333333333,
979
+ "grad_norm": 1.9296875,
980
+ "learning_rate": 7.784668699628345e-06,
981
+ "loss": 0.1334,
982
+ "step": 556
983
+ },
984
+ {
985
+ "epoch": 1.1946666666666665,
986
+ "grad_norm": 1.6796875,
987
+ "learning_rate": 7.64658766817093e-06,
988
+ "loss": 0.1253,
989
+ "step": 560
990
+ },
991
+ {
992
+ "epoch": 1.2032,
993
+ "grad_norm": 1.75,
994
+ "learning_rate": 7.508980052819274e-06,
995
+ "loss": 0.1327,
996
+ "step": 564
997
+ },
998
+ {
999
+ "epoch": 1.2117333333333333,
1000
+ "grad_norm": 1.5390625,
1001
+ "learning_rate": 7.371873534936522e-06,
1002
+ "loss": 0.1318,
1003
+ "step": 568
1004
+ },
1005
+ {
1006
+ "epoch": 1.2202666666666666,
1007
+ "grad_norm": 1.6640625,
1008
+ "learning_rate": 7.235295695084259e-06,
1009
+ "loss": 0.1266,
1010
+ "step": 572
1011
+ },
1012
+ {
1013
+ "epoch": 1.2288000000000001,
1014
+ "grad_norm": 1.7890625,
1015
+ "learning_rate": 7.0992740074743835e-06,
1016
+ "loss": 0.1275,
1017
+ "step": 576
1018
+ },
1019
+ {
1020
+ "epoch": 1.2373333333333334,
1021
+ "grad_norm": 1.8671875,
1022
+ "learning_rate": 6.963835834442336e-06,
1023
+ "loss": 0.1215,
1024
+ "step": 580
1025
+ },
1026
+ {
1027
+ "epoch": 1.2458666666666667,
1028
+ "grad_norm": 1.8046875,
1029
+ "learning_rate": 6.829008420942842e-06,
1030
+ "loss": 0.1287,
1031
+ "step": 584
1032
+ },
1033
+ {
1034
+ "epoch": 1.2544,
1035
+ "grad_norm": 2.03125,
1036
+ "learning_rate": 6.694818889069294e-06,
1037
+ "loss": 0.1206,
1038
+ "step": 588
1039
+ },
1040
+ {
1041
+ "epoch": 1.2629333333333332,
1042
+ "grad_norm": 1.6015625,
1043
+ "learning_rate": 6.561294232597817e-06,
1044
+ "loss": 0.1237,
1045
+ "step": 592
1046
+ },
1047
+ {
1048
+ "epoch": 1.2714666666666667,
1049
+ "grad_norm": 1.6875,
1050
+ "learning_rate": 6.428461311557159e-06,
1051
+ "loss": 0.1244,
1052
+ "step": 596
1053
+ },
1054
+ {
1055
+ "epoch": 1.28,
1056
+ "grad_norm": 1.828125,
1057
+ "learning_rate": 6.29634684682549e-06,
1058
+ "loss": 0.1266,
1059
+ "step": 600
1060
+ },
1061
+ {
1062
+ "epoch": 1.2885333333333333,
1063
+ "grad_norm": 1.5703125,
1064
+ "learning_rate": 6.1649774147551755e-06,
1065
+ "loss": 0.1249,
1066
+ "step": 604
1067
+ },
1068
+ {
1069
+ "epoch": 1.2970666666666666,
1070
+ "grad_norm": 1.703125,
1071
+ "learning_rate": 6.034379441826659e-06,
1072
+ "loss": 0.1211,
1073
+ "step": 608
1074
+ },
1075
+ {
1076
+ "epoch": 1.3056,
1077
+ "grad_norm": 1.6796875,
1078
+ "learning_rate": 5.904579199332443e-06,
1079
+ "loss": 0.122,
1080
+ "step": 612
1081
+ },
1082
+ {
1083
+ "epoch": 1.3141333333333334,
1084
+ "grad_norm": 2.28125,
1085
+ "learning_rate": 5.775602798092335e-06,
1086
+ "loss": 0.1299,
1087
+ "step": 616
1088
+ },
1089
+ {
1090
+ "epoch": 1.3226666666666667,
1091
+ "grad_norm": 1.6484375,
1092
+ "learning_rate": 5.6474761832009554e-06,
1093
+ "loss": 0.1238,
1094
+ "step": 620
1095
+ },
1096
+ {
1097
+ "epoch": 1.3312,
1098
+ "grad_norm": 1.6484375,
1099
+ "learning_rate": 5.520225128808555e-06,
1100
+ "loss": 0.1185,
1101
+ "step": 624
1102
+ },
1103
+ {
1104
+ "epoch": 1.3397333333333332,
1105
+ "grad_norm": 1.703125,
1106
+ "learning_rate": 5.393875232936283e-06,
1107
+ "loss": 0.1267,
1108
+ "step": 628
1109
+ },
1110
+ {
1111
+ "epoch": 1.3482666666666667,
1112
+ "grad_norm": 1.734375,
1113
+ "learning_rate": 5.2684519123268155e-06,
1114
+ "loss": 0.1272,
1115
+ "step": 632
1116
+ },
1117
+ {
1118
+ "epoch": 1.3568,
1119
+ "grad_norm": 4.125,
1120
+ "learning_rate": 5.143980397331512e-06,
1121
+ "loss": 0.1348,
1122
+ "step": 636
1123
+ },
1124
+ {
1125
+ "epoch": 1.3653333333333333,
1126
+ "grad_norm": 1.828125,
1127
+ "learning_rate": 5.02048572683502e-06,
1128
+ "loss": 0.1269,
1129
+ "step": 640
1130
+ },
1131
+ {
1132
+ "epoch": 1.3738666666666668,
1133
+ "grad_norm": 1.5625,
1134
+ "learning_rate": 4.897992743218419e-06,
1135
+ "loss": 0.1178,
1136
+ "step": 644
1137
+ },
1138
+ {
1139
+ "epoch": 1.3824,
1140
+ "grad_norm": 1.9375,
1141
+ "learning_rate": 4.776526087361896e-06,
1142
+ "loss": 0.1215,
1143
+ "step": 648
1144
+ },
1145
+ {
1146
+ "epoch": 1.3909333333333334,
1147
+ "grad_norm": 1.671875,
1148
+ "learning_rate": 4.656110193687925e-06,
1149
+ "loss": 0.1246,
1150
+ "step": 652
1151
+ },
1152
+ {
1153
+ "epoch": 1.3994666666666666,
1154
+ "grad_norm": 1.8359375,
1155
+ "learning_rate": 4.536769285246033e-06,
1156
+ "loss": 0.129,
1157
+ "step": 656
1158
+ },
1159
+ {
1160
+ "epoch": 1.408,
1161
+ "grad_norm": 1.6015625,
1162
+ "learning_rate": 4.4185273688400274e-06,
1163
+ "loss": 0.126,
1164
+ "step": 660
1165
+ },
1166
+ {
1167
+ "epoch": 1.4165333333333332,
1168
+ "grad_norm": 1.71875,
1169
+ "learning_rate": 4.301408230198763e-06,
1170
+ "loss": 0.1203,
1171
+ "step": 664
1172
+ },
1173
+ {
1174
+ "epoch": 1.4250666666666667,
1175
+ "grad_norm": 1.890625,
1176
+ "learning_rate": 4.1854354291913594e-06,
1177
+ "loss": 0.126,
1178
+ "step": 668
1179
+ },
1180
+ {
1181
+ "epoch": 1.4336,
1182
+ "grad_norm": 1.7421875,
1183
+ "learning_rate": 4.070632295087863e-06,
1184
+ "loss": 0.1243,
1185
+ "step": 672
1186
+ },
1187
+ {
1188
+ "epoch": 1.4421333333333333,
1189
+ "grad_norm": 2.015625,
1190
+ "learning_rate": 3.957021921866301e-06,
1191
+ "loss": 0.1246,
1192
+ "step": 676
1193
+ },
1194
+ {
1195
+ "epoch": 1.4506666666666668,
1196
+ "grad_norm": 1.8984375,
1197
+ "learning_rate": 3.844627163567059e-06,
1198
+ "loss": 0.128,
1199
+ "step": 680
1200
+ },
1201
+ {
1202
+ "epoch": 1.4592,
1203
+ "grad_norm": 1.6640625,
1204
+ "learning_rate": 3.7334706296955093e-06,
1205
+ "loss": 0.115,
1206
+ "step": 684
1207
+ },
1208
+ {
1209
+ "epoch": 1.4677333333333333,
1210
+ "grad_norm": 1.6328125,
1211
+ "learning_rate": 3.623574680673879e-06,
1212
+ "loss": 0.1265,
1213
+ "step": 688
1214
+ },
1215
+ {
1216
+ "epoch": 1.4762666666666666,
1217
+ "grad_norm": 1.8046875,
1218
+ "learning_rate": 3.5149614233431616e-06,
1219
+ "loss": 0.1273,
1220
+ "step": 692
1221
+ },
1222
+ {
1223
+ "epoch": 1.4848,
1224
+ "grad_norm": 1.921875,
1225
+ "learning_rate": 3.4076527065160914e-06,
1226
+ "loss": 0.1287,
1227
+ "step": 696
1228
+ },
1229
+ {
1230
+ "epoch": 1.4933333333333334,
1231
+ "grad_norm": 1.625,
1232
+ "learning_rate": 3.3016701165819943e-06,
1233
+ "loss": 0.1268,
1234
+ "step": 700
1235
+ },
1236
+ {
1237
+ "epoch": 1.5018666666666667,
1238
+ "grad_norm": 2.015625,
1239
+ "learning_rate": 3.197034973164429e-06,
1240
+ "loss": 0.1173,
1241
+ "step": 704
1242
+ },
1243
+ {
1244
+ "epoch": 1.5104,
1245
+ "grad_norm": 1.7578125,
1246
+ "learning_rate": 3.0937683248325133e-06,
1247
+ "loss": 0.1213,
1248
+ "step": 708
1249
+ },
1250
+ {
1251
+ "epoch": 1.5189333333333335,
1252
+ "grad_norm": 1.8828125,
1253
+ "learning_rate": 2.991890944866752e-06,
1254
+ "loss": 0.1234,
1255
+ "step": 712
1256
+ },
1257
+ {
1258
+ "epoch": 1.5274666666666668,
1259
+ "grad_norm": 1.6875,
1260
+ "learning_rate": 2.891423327080246e-06,
1261
+ "loss": 0.1347,
1262
+ "step": 716
1263
+ },
1264
+ {
1265
+ "epoch": 1.536,
1266
+ "grad_norm": 1.7578125,
1267
+ "learning_rate": 2.792385681696138e-06,
1268
+ "loss": 0.127,
1269
+ "step": 720
1270
+ },
1271
+ {
1272
+ "epoch": 1.5445333333333333,
1273
+ "grad_norm": 1.7265625,
1274
+ "learning_rate": 2.6947979312820825e-06,
1275
+ "loss": 0.1193,
1276
+ "step": 724
1277
+ },
1278
+ {
1279
+ "epoch": 1.5530666666666666,
1280
+ "grad_norm": 1.71875,
1281
+ "learning_rate": 2.5986797067425972e-06,
1282
+ "loss": 0.1336,
1283
+ "step": 728
1284
+ },
1285
+ {
1286
+ "epoch": 1.5615999999999999,
1287
+ "grad_norm": 1.46875,
1288
+ "learning_rate": 2.5040503433700702e-06,
1289
+ "loss": 0.1153,
1290
+ "step": 732
1291
+ },
1292
+ {
1293
+ "epoch": 1.5701333333333334,
1294
+ "grad_norm": 1.484375,
1295
+ "learning_rate": 2.4109288769552518e-06,
1296
+ "loss": 0.1195,
1297
+ "step": 736
1298
+ },
1299
+ {
1300
+ "epoch": 1.5786666666666667,
1301
+ "grad_norm": 1.8359375,
1302
+ "learning_rate": 2.3193340399579865e-06,
1303
+ "loss": 0.1188,
1304
+ "step": 740
1305
+ },
1306
+ {
1307
+ "epoch": 1.5872000000000002,
1308
+ "grad_norm": 1.6484375,
1309
+ "learning_rate": 2.229284257738946e-06,
1310
+ "loss": 0.1203,
1311
+ "step": 744
1312
+ },
1313
+ {
1314
+ "epoch": 1.5957333333333334,
1315
+ "grad_norm": 1.6953125,
1316
+ "learning_rate": 2.1407976448531776e-06,
1317
+ "loss": 0.1249,
1318
+ "step": 748
1319
+ },
1320
+ {
1321
+ "epoch": 1.6042666666666667,
1322
+ "grad_norm": 1.7734375,
1323
+ "learning_rate": 2.053892001406136e-06,
1324
+ "loss": 0.1181,
1325
+ "step": 752
1326
+ },
1327
+ {
1328
+ "epoch": 1.6128,
1329
+ "grad_norm": 1.7265625,
1330
+ "learning_rate": 1.9685848094729853e-06,
1331
+ "loss": 0.129,
1332
+ "step": 756
1333
+ },
1334
+ {
1335
+ "epoch": 1.6213333333333333,
1336
+ "grad_norm": 1.75,
1337
+ "learning_rate": 1.8848932295818945e-06,
1338
+ "loss": 0.1213,
1339
+ "step": 760
1340
+ },
1341
+ {
1342
+ "epoch": 1.6298666666666666,
1343
+ "grad_norm": 1.5859375,
1344
+ "learning_rate": 1.802834097261975e-06,
1345
+ "loss": 0.1248,
1346
+ "step": 764
1347
+ },
1348
+ {
1349
+ "epoch": 1.6383999999999999,
1350
+ "grad_norm": 1.8125,
1351
+ "learning_rate": 1.7224239196566395e-06,
1352
+ "loss": 0.1313,
1353
+ "step": 768
1354
+ },
1355
+ {
1356
+ "epoch": 1.6469333333333334,
1357
+ "grad_norm": 2.0,
1358
+ "learning_rate": 1.6436788722029906e-06,
1359
+ "loss": 0.1283,
1360
+ "step": 772
1361
+ },
1362
+ {
1363
+ "epoch": 1.6554666666666666,
1364
+ "grad_norm": 1.578125,
1365
+ "learning_rate": 1.5666147953779376e-06,
1366
+ "loss": 0.1176,
1367
+ "step": 776
1368
+ },
1369
+ {
1370
+ "epoch": 1.6640000000000001,
1371
+ "grad_norm": 1.6875,
1372
+ "learning_rate": 1.4912471915117189e-06,
1373
+ "loss": 0.1203,
1374
+ "step": 780
1375
+ },
1376
+ {
1377
+ "epoch": 1.6725333333333334,
1378
+ "grad_norm": 1.765625,
1379
+ "learning_rate": 1.417591221669412e-06,
1380
+ "loss": 0.1218,
1381
+ "step": 784
1382
+ },
1383
+ {
1384
+ "epoch": 1.6810666666666667,
1385
+ "grad_norm": 1.578125,
1386
+ "learning_rate": 1.3456617026011233e-06,
1387
+ "loss": 0.1226,
1388
+ "step": 788
1389
+ },
1390
+ {
1391
+ "epoch": 1.6896,
1392
+ "grad_norm": 1.734375,
1393
+ "learning_rate": 1.2754731037614122e-06,
1394
+ "loss": 0.1265,
1395
+ "step": 792
1396
+ },
1397
+ {
1398
+ "epoch": 1.6981333333333333,
1399
+ "grad_norm": 1.8046875,
1400
+ "learning_rate": 1.207039544398607e-06,
1401
+ "loss": 0.12,
1402
+ "step": 796
1403
+ },
1404
+ {
1405
+ "epoch": 1.7066666666666666,
1406
+ "grad_norm": 1.8828125,
1407
+ "learning_rate": 1.1403747907145546e-06,
1408
+ "loss": 0.1325,
1409
+ "step": 800
1410
+ },
1411
+ {
1412
+ "epoch": 1.7151999999999998,
1413
+ "grad_norm": 1.6796875,
1414
+ "learning_rate": 1.0754922530953737e-06,
1415
+ "loss": 0.125,
1416
+ "step": 804
1417
+ },
1418
+ {
1419
+ "epoch": 1.7237333333333333,
1420
+ "grad_norm": 1.625,
1421
+ "learning_rate": 1.0124049834138205e-06,
1422
+ "loss": 0.1161,
1423
+ "step": 808
1424
+ },
1425
+ {
1426
+ "epoch": 1.7322666666666666,
1427
+ "grad_norm": 1.5,
1428
+ "learning_rate": 9.511256724037443e-07,
1429
+ "loss": 0.1244,
1430
+ "step": 812
1431
+ },
1432
+ {
1433
+ "epoch": 1.7408000000000001,
1434
+ "grad_norm": 1.765625,
1435
+ "learning_rate": 8.916666471071922e-07,
1436
+ "loss": 0.1267,
1437
+ "step": 816
1438
+ },
1439
+ {
1440
+ "epoch": 1.7493333333333334,
1441
+ "grad_norm": 1.6640625,
1442
+ "learning_rate": 8.340398683947004e-07,
1443
+ "loss": 0.1303,
1444
+ "step": 820
1445
+ },
1446
+ {
1447
+ "epoch": 1.7578666666666667,
1448
+ "grad_norm": 1.875,
1449
+ "learning_rate": 7.78256928559209e-07,
1450
+ "loss": 0.133,
1451
+ "step": 824
1452
+ },
1453
+ {
1454
+ "epoch": 1.7664,
1455
+ "grad_norm": 1.5859375,
1456
+ "learning_rate": 7.243290489841493e-07,
1457
+ "loss": 0.1188,
1458
+ "step": 828
1459
+ },
1460
+ {
1461
+ "epoch": 1.7749333333333333,
1462
+ "grad_norm": 1.7578125,
1463
+ "learning_rate": 6.722670778861284e-07,
1464
+ "loss": 0.1247,
1465
+ "step": 832
1466
+ },
1467
+ {
1468
+ "epoch": 1.7834666666666665,
1469
+ "grad_norm": 1.671875,
1470
+ "learning_rate": 6.22081488132682e-07,
1471
+ "loss": 0.1215,
1472
+ "step": 836
1473
+ },
1474
+ {
1475
+ "epoch": 1.792,
1476
+ "grad_norm": 1.53125,
1477
+ "learning_rate": 5.737823751355465e-07,
1478
+ "loss": 0.1203,
1479
+ "step": 840
1480
+ },
1481
+ {
1482
+ "epoch": 1.8005333333333333,
1483
+ "grad_norm": 1.6953125,
1484
+ "learning_rate": 5.273794548198374e-07,
1485
+ "loss": 0.1283,
1486
+ "step": 844
1487
+ },
1488
+ {
1489
+ "epoch": 1.8090666666666668,
1490
+ "grad_norm": 1.671875,
1491
+ "learning_rate": 4.828820616695873e-07,
1492
+ "loss": 0.1262,
1493
+ "step": 848
1494
+ },
1495
+ {
1496
+ "epoch": 1.8176,
1497
+ "grad_norm": 1.7265625,
1498
+ "learning_rate": 4.4029914685000176e-07,
1499
+ "loss": 0.1111,
1500
+ "step": 852
1501
+ },
1502
+ {
1503
+ "epoch": 1.8261333333333334,
1504
+ "grad_norm": 1.875,
1505
+ "learning_rate": 3.9963927640683243e-07,
1506
+ "loss": 0.1228,
1507
+ "step": 856
1508
+ },
1509
+ {
1510
+ "epoch": 1.8346666666666667,
1511
+ "grad_norm": 1.59375,
1512
+ "learning_rate": 3.6091062954321634e-07,
1513
+ "loss": 0.1293,
1514
+ "step": 860
1515
+ },
1516
+ {
1517
+ "epoch": 1.8432,
1518
+ "grad_norm": 1.7421875,
1519
+ "learning_rate": 3.241209969743353e-07,
1520
+ "loss": 0.1201,
1521
+ "step": 864
1522
+ },
1523
+ {
1524
+ "epoch": 1.8517333333333332,
1525
+ "grad_norm": 2.390625,
1526
+ "learning_rate": 2.892777793602175e-07,
1527
+ "loss": 0.1187,
1528
+ "step": 868
1529
+ },
1530
+ {
1531
+ "epoch": 1.8602666666666665,
1532
+ "grad_norm": 1.7265625,
1533
+ "learning_rate": 2.563879858170215e-07,
1534
+ "loss": 0.1276,
1535
+ "step": 872
1536
+ },
1537
+ {
1538
+ "epoch": 1.8688,
1539
+ "grad_norm": 1.765625,
1540
+ "learning_rate": 2.2545823250705867e-07,
1541
+ "loss": 0.1334,
1542
+ "step": 876
1543
+ },
1544
+ {
1545
+ "epoch": 1.8773333333333333,
1546
+ "grad_norm": 1.96875,
1547
+ "learning_rate": 1.9649474130788438e-07,
1548
+ "loss": 0.114,
1549
+ "step": 880
1550
+ },
1551
+ {
1552
+ "epoch": 1.8858666666666668,
1553
+ "grad_norm": 1.765625,
1554
+ "learning_rate": 1.6950333856069369e-07,
1555
+ "loss": 0.1221,
1556
+ "step": 884
1557
+ },
1558
+ {
1559
+ "epoch": 1.8944,
1560
+ "grad_norm": 1.8046875,
1561
+ "learning_rate": 1.4448945389827772e-07,
1562
+ "loss": 0.1243,
1563
+ "step": 888
1564
+ },
1565
+ {
1566
+ "epoch": 1.9029333333333334,
1567
+ "grad_norm": 1.765625,
1568
+ "learning_rate": 1.2145811915280414e-07,
1569
+ "loss": 0.1174,
1570
+ "step": 892
1571
+ },
1572
+ {
1573
+ "epoch": 1.9114666666666666,
1574
+ "grad_norm": 1.9921875,
1575
+ "learning_rate": 1.004139673435922e-07,
1576
+ "loss": 0.1165,
1577
+ "step": 896
1578
+ },
1579
+ {
1580
+ "epoch": 1.92,
1581
+ "grad_norm": 2.921875,
1582
+ "learning_rate": 8.136123174513843e-08,
1583
+ "loss": 0.1351,
1584
+ "step": 900
1585
+ },
1586
+ {
1587
+ "epoch": 1.9285333333333332,
1588
+ "grad_norm": 1.640625,
1589
+ "learning_rate": 6.430374503553439e-08,
1590
+ "loss": 0.1208,
1591
+ "step": 904
1592
+ },
1593
+ {
1594
+ "epoch": 1.9370666666666667,
1595
+ "grad_norm": 1.6640625,
1596
+ "learning_rate": 4.924493852549006e-08,
1597
+ "loss": 0.1178,
1598
+ "step": 908
1599
+ },
1600
+ {
1601
+ "epoch": 1.9456,
1602
+ "grad_norm": 1.78125,
1603
+ "learning_rate": 3.618784146807497e-08,
1604
+ "loss": 0.1184,
1605
+ "step": 912
1606
+ },
1607
+ {
1608
+ "epoch": 1.9541333333333335,
1609
+ "grad_norm": 1.7578125,
1610
+ "learning_rate": 2.513508044935775e-08,
1611
+ "loss": 0.1235,
1612
+ "step": 916
1613
+ },
1614
+ {
1615
+ "epoch": 1.9626666666666668,
1616
+ "grad_norm": 1.6171875,
1617
+ "learning_rate": 1.6088878860032187e-08,
1618
+ "loss": 0.1231,
1619
+ "step": 920
1620
+ },
1621
+ {
1622
+ "epoch": 1.9712,
1623
+ "grad_norm": 2.03125,
1624
+ "learning_rate": 9.051056448160511e-09,
1625
+ "loss": 0.1263,
1626
+ "step": 924
1627
+ },
1628
+ {
1629
+ "epoch": 1.9797333333333333,
1630
+ "grad_norm": 1.75,
1631
+ "learning_rate": 4.023028953106245e-09,
1632
+ "loss": 0.1271,
1633
+ "step": 928
1634
+ },
1635
+ {
1636
+ "epoch": 1.9882666666666666,
1637
+ "grad_norm": 1.6875,
1638
+ "learning_rate": 1.0058078207453303e-09,
1639
+ "loss": 0.1201,
1640
+ "step": 932
1641
+ },
1642
+ {
1643
+ "epoch": 1.9968,
1644
+ "grad_norm": 1.953125,
1645
+ "learning_rate": 0.0,
1646
+ "loss": 0.1226,
1647
+ "step": 936
1648
+ }
1649
+ ],
1650
+ "logging_steps": 4,
1651
+ "max_steps": 936,
1652
+ "num_input_tokens_seen": 0,
1653
+ "num_train_epochs": 2,
1654
+ "save_steps": 500,
1655
+ "stateful_callbacks": {
1656
+ "TrainerControl": {
1657
+ "args": {
1658
+ "should_epoch_stop": false,
1659
+ "should_evaluate": false,
1660
+ "should_log": false,
1661
+ "should_save": true,
1662
+ "should_training_stop": true
1663
+ },
1664
+ "attributes": {}
1665
+ }
1666
+ },
1667
+ "total_flos": 6.214928345734316e+19,
1668
+ "train_batch_size": 1,
1669
+ "trial_name": null,
1670
+ "trial_params": null
1671
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6bcd441fcd08e9f847dfca832eddb3952efa99e41aac13b551227030ee3795d
3
+ size 5432