JW17 commited on
Commit
99f7d7b
·
verified ·
1 Parent(s): ba6d8ea

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +23 -0
  2. checkpoint-1000/training_args.bin +3 -0
  3. checkpoint-10000/model.safetensors +3 -0
  4. checkpoint-100000/trainer_state.json +3 -0
  5. checkpoint-11000/config.json +33 -0
  6. checkpoint-11000/generation_config.json +6 -0
  7. checkpoint-11000/special_tokens_map.json +16 -0
  8. checkpoint-11000/tokenizer.json +0 -0
  9. checkpoint-11000/tokenizer_config.json +240 -0
  10. checkpoint-11000/trainer_state.json +0 -0
  11. checkpoint-11000/training_args.bin +3 -0
  12. checkpoint-14000/model.safetensors +3 -0
  13. checkpoint-15000/config.json +33 -0
  14. checkpoint-15000/generation_config.json +6 -0
  15. checkpoint-15000/special_tokens_map.json +16 -0
  16. checkpoint-15000/tokenizer.json +0 -0
  17. checkpoint-15000/tokenizer_config.json +240 -0
  18. checkpoint-15000/trainer_state.json +0 -0
  19. checkpoint-15000/training_args.bin +3 -0
  20. checkpoint-16000/model.safetensors +3 -0
  21. checkpoint-17000/model.safetensors +3 -0
  22. checkpoint-20000/training_args.bin +3 -0
  23. checkpoint-21000/model.safetensors +3 -0
  24. checkpoint-22000/model.safetensors +3 -0
  25. checkpoint-25000/model.safetensors +3 -0
  26. checkpoint-25000/training_args.bin +3 -0
  27. checkpoint-26000/model.safetensors +3 -0
  28. checkpoint-27000/model.safetensors +3 -0
  29. checkpoint-28000/model.safetensors +3 -0
  30. checkpoint-29000/model.safetensors +3 -0
  31. checkpoint-30000/model.safetensors +3 -0
  32. checkpoint-30000/training_args.bin +3 -0
  33. checkpoint-31000/model.safetensors +3 -0
  34. checkpoint-32000/model.safetensors +3 -0
  35. checkpoint-33000/model.safetensors +3 -0
  36. checkpoint-34000/model.safetensors +3 -0
  37. checkpoint-35000/model.safetensors +3 -0
  38. checkpoint-35000/training_args.bin +3 -0
  39. checkpoint-38000/model.safetensors +3 -0
  40. checkpoint-39000/model.safetensors +3 -0
  41. checkpoint-41000/model.safetensors +3 -0
  42. checkpoint-43000/model.safetensors +3 -0
  43. checkpoint-44000/model.safetensors +3 -0
  44. checkpoint-45000/model.safetensors +3 -0
  45. checkpoint-46000/model.safetensors +3 -0
  46. checkpoint-48000/model.safetensors +3 -0
  47. checkpoint-50000/model.safetensors +3 -0
  48. checkpoint-50000/training_args.bin +3 -0
  49. checkpoint-52000/config.json +33 -0
  50. checkpoint-52000/generation_config.json +6 -0
.gitattributes CHANGED
@@ -39,3 +39,26 @@ checkpoint-99000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
39
  checkpoint-94000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
40
  checkpoint-85000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
41
  checkpoint-87000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  checkpoint-94000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
40
  checkpoint-85000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
41
  checkpoint-87000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
42
+ checkpoint-84000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
43
+ checkpoint-73000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
44
+ checkpoint-97000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
45
+ checkpoint-79000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
46
+ checkpoint-74000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
47
+ checkpoint-83000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
48
+ checkpoint-76000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
49
+ checkpoint-89000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
50
+ checkpoint-93000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
51
+ checkpoint-88000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
52
+ checkpoint-100000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
53
+ checkpoint-86000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
54
+ checkpoint-98000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
55
+ checkpoint-72000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
56
+ checkpoint-90000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
57
+ checkpoint-82000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
58
+ checkpoint-78000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
59
+ checkpoint-81000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
60
+ checkpoint-92000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
61
+ checkpoint-91000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
62
+ checkpoint-77000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
63
+ checkpoint-96000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
64
+ checkpoint-95000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
checkpoint-1000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16204b05d2a9d34e94fb9daf7e1142eb825aaf16c8c84d8db978053ba172d2c9
3
+ size 5368
checkpoint-10000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20bcd790dab74ba0af68bfe3f4ba68a0fd0ac77ffe3140b743535a0d8a01d0f1
3
+ size 57791008
checkpoint-100000/trainer_state.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7304a36eed7a4322e86bab45304dba1b0ac67271ea3be4367d83ce612631cde6
3
+ size 14919597
checkpoint-11000/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "JW17/SmolLM-14m-v0.1",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 0,
9
+ "eos_token_id": 0,
10
+ "flash_attn": true,
11
+ "head_dim": 32,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 128,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 512,
16
+ "is_llama_config": true,
17
+ "max_position_embeddings": 2048,
18
+ "mlp_bias": false,
19
+ "model_type": "llama",
20
+ "num_attention_heads": 4,
21
+ "num_hidden_layers": 6,
22
+ "num_key_value_heads": 4,
23
+ "pretraining_tp": 1,
24
+ "rms_norm_eps": 1e-05,
25
+ "rope_interleaved": false,
26
+ "rope_scaling": null,
27
+ "rope_theta": 100000,
28
+ "tie_word_embeddings": false,
29
+ "torch_dtype": "float32",
30
+ "transformers_version": "4.48.1",
31
+ "use_cache": true,
32
+ "vocab_size": 50280
33
+ }
checkpoint-11000/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 0,
5
+ "transformers_version": "4.48.1"
6
+ }
checkpoint-11000/special_tokens_map.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "pad_token": {
10
+ "content": "<|padding|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ }
16
+ }
checkpoint-11000/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-11000/tokenizer_config.json ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": false,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "|||IP_ADDRESS|||",
8
+ "lstrip": false,
9
+ "normalized": true,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": false
13
+ },
14
+ "1": {
15
+ "content": "<|padding|>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "50254": {
23
+ "content": " ",
24
+ "lstrip": false,
25
+ "normalized": true,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": false
29
+ },
30
+ "50255": {
31
+ "content": " ",
32
+ "lstrip": false,
33
+ "normalized": true,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": false
37
+ },
38
+ "50256": {
39
+ "content": " ",
40
+ "lstrip": false,
41
+ "normalized": true,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ },
46
+ "50257": {
47
+ "content": " ",
48
+ "lstrip": false,
49
+ "normalized": true,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": false
53
+ },
54
+ "50258": {
55
+ "content": " ",
56
+ "lstrip": false,
57
+ "normalized": true,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": false
61
+ },
62
+ "50259": {
63
+ "content": " ",
64
+ "lstrip": false,
65
+ "normalized": true,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": false
69
+ },
70
+ "50260": {
71
+ "content": " ",
72
+ "lstrip": false,
73
+ "normalized": true,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": false
77
+ },
78
+ "50261": {
79
+ "content": " ",
80
+ "lstrip": false,
81
+ "normalized": true,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": false
85
+ },
86
+ "50262": {
87
+ "content": " ",
88
+ "lstrip": false,
89
+ "normalized": true,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": false
93
+ },
94
+ "50263": {
95
+ "content": " ",
96
+ "lstrip": false,
97
+ "normalized": true,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": false
101
+ },
102
+ "50264": {
103
+ "content": " ",
104
+ "lstrip": false,
105
+ "normalized": true,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": false
109
+ },
110
+ "50265": {
111
+ "content": " ",
112
+ "lstrip": false,
113
+ "normalized": true,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": false
117
+ },
118
+ "50266": {
119
+ "content": " ",
120
+ "lstrip": false,
121
+ "normalized": true,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": false
125
+ },
126
+ "50267": {
127
+ "content": " ",
128
+ "lstrip": false,
129
+ "normalized": true,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": false
133
+ },
134
+ "50268": {
135
+ "content": " ",
136
+ "lstrip": false,
137
+ "normalized": true,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": false
141
+ },
142
+ "50269": {
143
+ "content": " ",
144
+ "lstrip": false,
145
+ "normalized": true,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": false
149
+ },
150
+ "50270": {
151
+ "content": " ",
152
+ "lstrip": false,
153
+ "normalized": true,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": false
157
+ },
158
+ "50271": {
159
+ "content": " ",
160
+ "lstrip": false,
161
+ "normalized": true,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": false
165
+ },
166
+ "50272": {
167
+ "content": " ",
168
+ "lstrip": false,
169
+ "normalized": true,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": false
173
+ },
174
+ "50273": {
175
+ "content": " ",
176
+ "lstrip": false,
177
+ "normalized": true,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": false
181
+ },
182
+ "50274": {
183
+ "content": " ",
184
+ "lstrip": false,
185
+ "normalized": true,
186
+ "rstrip": false,
187
+ "single_word": false,
188
+ "special": false
189
+ },
190
+ "50275": {
191
+ "content": " ",
192
+ "lstrip": false,
193
+ "normalized": true,
194
+ "rstrip": false,
195
+ "single_word": false,
196
+ "special": false
197
+ },
198
+ "50276": {
199
+ "content": " ",
200
+ "lstrip": false,
201
+ "normalized": true,
202
+ "rstrip": false,
203
+ "single_word": false,
204
+ "special": false
205
+ },
206
+ "50277": {
207
+ "content": "|||EMAIL_ADDRESS|||",
208
+ "lstrip": false,
209
+ "normalized": true,
210
+ "rstrip": false,
211
+ "single_word": false,
212
+ "special": false
213
+ },
214
+ "50278": {
215
+ "content": "|||PHONE_NUMBER|||",
216
+ "lstrip": false,
217
+ "normalized": true,
218
+ "rstrip": false,
219
+ "single_word": false,
220
+ "special": false
221
+ },
222
+ "50279": {
223
+ "content": "<|endoftext|>",
224
+ "lstrip": false,
225
+ "normalized": false,
226
+ "rstrip": false,
227
+ "single_word": false,
228
+ "special": true
229
+ }
230
+ },
231
+ "bos_token": null,
232
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
233
+ "clean_up_tokenization_spaces": true,
234
+ "eos_token": "<|endoftext|>",
235
+ "extra_special_tokens": {},
236
+ "model_max_length": 1000000000000000019884624838656,
237
+ "pad_token": "<|padding|>",
238
+ "tokenizer_class": "GPTNeoXTokenizer",
239
+ "unk_token": null
240
+ }
checkpoint-11000/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-11000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16204b05d2a9d34e94fb9daf7e1142eb825aaf16c8c84d8db978053ba172d2c9
3
+ size 5368
checkpoint-14000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d51a162f642cfcdb8d324a7701269730330216bc111349518d2c8829764d1e2c
3
+ size 57791008
checkpoint-15000/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "JW17/SmolLM-14m-v0.1",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 0,
9
+ "eos_token_id": 0,
10
+ "flash_attn": true,
11
+ "head_dim": 32,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 128,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 512,
16
+ "is_llama_config": true,
17
+ "max_position_embeddings": 2048,
18
+ "mlp_bias": false,
19
+ "model_type": "llama",
20
+ "num_attention_heads": 4,
21
+ "num_hidden_layers": 6,
22
+ "num_key_value_heads": 4,
23
+ "pretraining_tp": 1,
24
+ "rms_norm_eps": 1e-05,
25
+ "rope_interleaved": false,
26
+ "rope_scaling": null,
27
+ "rope_theta": 100000,
28
+ "tie_word_embeddings": false,
29
+ "torch_dtype": "float32",
30
+ "transformers_version": "4.48.1",
31
+ "use_cache": true,
32
+ "vocab_size": 50280
33
+ }
checkpoint-15000/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 0,
5
+ "transformers_version": "4.48.1"
6
+ }
checkpoint-15000/special_tokens_map.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "pad_token": {
10
+ "content": "<|padding|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ }
16
+ }
checkpoint-15000/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-15000/tokenizer_config.json ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": false,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "|||IP_ADDRESS|||",
8
+ "lstrip": false,
9
+ "normalized": true,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": false
13
+ },
14
+ "1": {
15
+ "content": "<|padding|>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "50254": {
23
+ "content": " ",
24
+ "lstrip": false,
25
+ "normalized": true,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": false
29
+ },
30
+ "50255": {
31
+ "content": " ",
32
+ "lstrip": false,
33
+ "normalized": true,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": false
37
+ },
38
+ "50256": {
39
+ "content": " ",
40
+ "lstrip": false,
41
+ "normalized": true,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ },
46
+ "50257": {
47
+ "content": " ",
48
+ "lstrip": false,
49
+ "normalized": true,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": false
53
+ },
54
+ "50258": {
55
+ "content": " ",
56
+ "lstrip": false,
57
+ "normalized": true,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": false
61
+ },
62
+ "50259": {
63
+ "content": " ",
64
+ "lstrip": false,
65
+ "normalized": true,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": false
69
+ },
70
+ "50260": {
71
+ "content": " ",
72
+ "lstrip": false,
73
+ "normalized": true,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": false
77
+ },
78
+ "50261": {
79
+ "content": " ",
80
+ "lstrip": false,
81
+ "normalized": true,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": false
85
+ },
86
+ "50262": {
87
+ "content": " ",
88
+ "lstrip": false,
89
+ "normalized": true,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": false
93
+ },
94
+ "50263": {
95
+ "content": " ",
96
+ "lstrip": false,
97
+ "normalized": true,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": false
101
+ },
102
+ "50264": {
103
+ "content": " ",
104
+ "lstrip": false,
105
+ "normalized": true,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": false
109
+ },
110
+ "50265": {
111
+ "content": " ",
112
+ "lstrip": false,
113
+ "normalized": true,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": false
117
+ },
118
+ "50266": {
119
+ "content": " ",
120
+ "lstrip": false,
121
+ "normalized": true,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": false
125
+ },
126
+ "50267": {
127
+ "content": " ",
128
+ "lstrip": false,
129
+ "normalized": true,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": false
133
+ },
134
+ "50268": {
135
+ "content": " ",
136
+ "lstrip": false,
137
+ "normalized": true,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": false
141
+ },
142
+ "50269": {
143
+ "content": " ",
144
+ "lstrip": false,
145
+ "normalized": true,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": false
149
+ },
150
+ "50270": {
151
+ "content": " ",
152
+ "lstrip": false,
153
+ "normalized": true,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": false
157
+ },
158
+ "50271": {
159
+ "content": " ",
160
+ "lstrip": false,
161
+ "normalized": true,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": false
165
+ },
166
+ "50272": {
167
+ "content": " ",
168
+ "lstrip": false,
169
+ "normalized": true,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": false
173
+ },
174
+ "50273": {
175
+ "content": " ",
176
+ "lstrip": false,
177
+ "normalized": true,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": false
181
+ },
182
+ "50274": {
183
+ "content": " ",
184
+ "lstrip": false,
185
+ "normalized": true,
186
+ "rstrip": false,
187
+ "single_word": false,
188
+ "special": false
189
+ },
190
+ "50275": {
191
+ "content": " ",
192
+ "lstrip": false,
193
+ "normalized": true,
194
+ "rstrip": false,
195
+ "single_word": false,
196
+ "special": false
197
+ },
198
+ "50276": {
199
+ "content": " ",
200
+ "lstrip": false,
201
+ "normalized": true,
202
+ "rstrip": false,
203
+ "single_word": false,
204
+ "special": false
205
+ },
206
+ "50277": {
207
+ "content": "|||EMAIL_ADDRESS|||",
208
+ "lstrip": false,
209
+ "normalized": true,
210
+ "rstrip": false,
211
+ "single_word": false,
212
+ "special": false
213
+ },
214
+ "50278": {
215
+ "content": "|||PHONE_NUMBER|||",
216
+ "lstrip": false,
217
+ "normalized": true,
218
+ "rstrip": false,
219
+ "single_word": false,
220
+ "special": false
221
+ },
222
+ "50279": {
223
+ "content": "<|endoftext|>",
224
+ "lstrip": false,
225
+ "normalized": false,
226
+ "rstrip": false,
227
+ "single_word": false,
228
+ "special": true
229
+ }
230
+ },
231
+ "bos_token": null,
232
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
233
+ "clean_up_tokenization_spaces": true,
234
+ "eos_token": "<|endoftext|>",
235
+ "extra_special_tokens": {},
236
+ "model_max_length": 1000000000000000019884624838656,
237
+ "pad_token": "<|padding|>",
238
+ "tokenizer_class": "GPTNeoXTokenizer",
239
+ "unk_token": null
240
+ }
checkpoint-15000/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-15000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16204b05d2a9d34e94fb9daf7e1142eb825aaf16c8c84d8db978053ba172d2c9
3
+ size 5368
checkpoint-16000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00218322afd086f93ca2249f820f606c4537e3d40c1de98deb3f1e7fd97ce1bb
3
+ size 57791008
checkpoint-17000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:236ac7d677ee0880fb243951059cced7b35b88c20153815670e7a5ad0d9a6983
3
+ size 57791008
checkpoint-20000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16204b05d2a9d34e94fb9daf7e1142eb825aaf16c8c84d8db978053ba172d2c9
3
+ size 5368
checkpoint-21000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2dc82d0aefba1729b6a67e4273b85d6afec56a2a3e57b80f7de2514262876820
3
+ size 57791008
checkpoint-22000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8aed4ebf1809752f8b3e423d65dd7fea7f2171930dd708cf982f9aa8b24ed053
3
+ size 57791008
checkpoint-25000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75c47b9bf31cea231dc91d88f79ec57de2aad6c9fe318ba64f70539f3531651a
3
+ size 57791008
checkpoint-25000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16204b05d2a9d34e94fb9daf7e1142eb825aaf16c8c84d8db978053ba172d2c9
3
+ size 5368
checkpoint-26000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9d1d466717dd11d49fabdc7bc768c11cc122dfe7cd5dd56679ac12932672b5f
3
+ size 57791008
checkpoint-27000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60d19587c45bf7894cfce1bf0ab42e408cc44de580fe69cff382b9c3f128fab4
3
+ size 57791008
checkpoint-28000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:40680c43b4171031784e83cc286e8374f2ceeb798efe779ae2309ad7f6f7fa9d
3
+ size 57791008
checkpoint-29000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92ddc7b1ed099c0c156d52698deaa47b2f95fe7b29d2be40f14fbe9f70aeeeb0
3
+ size 57791008
checkpoint-30000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4964148b8a74ef242d37c73c76dfd36d33cb7a92614ffddffacd723cb937447b
3
+ size 57791008
checkpoint-30000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16204b05d2a9d34e94fb9daf7e1142eb825aaf16c8c84d8db978053ba172d2c9
3
+ size 5368
checkpoint-31000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97d8bc3c0eb967c9099da9a4c1b0d8196fcb1f1e03fa1685cedb0b7b9dd3cfd5
3
+ size 57791008
checkpoint-32000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8df93f6c140255dca515604d78c1d1968aeec9ca56c003024351a2eed5cc1351
3
+ size 57791008
checkpoint-33000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2959eab33f264b7c74deae221814cb8e9d1afa27198accd0f1bc801728933d6
3
+ size 57791008
checkpoint-34000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7dbcfa1c7743ba120b023e0f5d271031ccf4c9a1ac195a42ef4b8ee720a1b53c
3
+ size 57791008
checkpoint-35000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:573c046d18a580b611fb7462e76627ce66f25a13143283cce5fbeba9f8de0e65
3
+ size 57791008
checkpoint-35000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16204b05d2a9d34e94fb9daf7e1142eb825aaf16c8c84d8db978053ba172d2c9
3
+ size 5368
checkpoint-38000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:692ad9ed05b5ce549ab4b86c4d4b539cea26c97117e54bcd83b84e3fd45dbec8
3
+ size 57791008
checkpoint-39000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec9b6a9c5e016b77d176454139ffcedc632832661afbcb2d959b947ed21a953e
3
+ size 57791008
checkpoint-41000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c1d64e3c9fced5d1b4eaeeb6b00335f2e18e299a75ef35d534f129ac0d8c02d0
3
+ size 57791008
checkpoint-43000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53011c6196ae4257463fea4e0c38a49ff307912bc01bb91a7c5938403a3d80fa
3
+ size 57791008
checkpoint-44000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be0d1cd245c62f33d3d7bcaa3da7906d80f8b86a4c2d8d744df958155c73ba37
3
+ size 57791008
checkpoint-45000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a60cee81ca573e2221bc4840426b84197e84881abbc5db5faf9c04ef6868fa48
3
+ size 57791008
checkpoint-46000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11aa0e1f589e02b24341773474c81d945bfef25b84358102ff65dce2991267db
3
+ size 57791008
checkpoint-48000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5bad0f28978ee9247df482efac376f0d3fc7418289717f45c373f7bb54841528
3
+ size 57791008
checkpoint-50000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80debfe102f928aaee371cebab34228c21a048f4d366fcef7b8bbc78c3472fb3
3
+ size 57791008
checkpoint-50000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16204b05d2a9d34e94fb9daf7e1142eb825aaf16c8c84d8db978053ba172d2c9
3
+ size 5368
checkpoint-52000/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "JW17/SmolLM-14m-v0.1",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 0,
9
+ "eos_token_id": 0,
10
+ "flash_attn": true,
11
+ "head_dim": 32,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 128,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 512,
16
+ "is_llama_config": true,
17
+ "max_position_embeddings": 2048,
18
+ "mlp_bias": false,
19
+ "model_type": "llama",
20
+ "num_attention_heads": 4,
21
+ "num_hidden_layers": 6,
22
+ "num_key_value_heads": 4,
23
+ "pretraining_tp": 1,
24
+ "rms_norm_eps": 1e-05,
25
+ "rope_interleaved": false,
26
+ "rope_scaling": null,
27
+ "rope_theta": 100000,
28
+ "tie_word_embeddings": false,
29
+ "torch_dtype": "float32",
30
+ "transformers_version": "4.48.1",
31
+ "use_cache": true,
32
+ "vocab_size": 50280
33
+ }
checkpoint-52000/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 0,
5
+ "transformers_version": "4.48.1"
6
+ }