MikeRoz commited on
Commit
9e6552e
·
verified ·
1 Parent(s): 3470f88

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,430 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model:
3
+ - deepseek-ai/DeepSeek-R1-Distill-Llama-70B
4
+ - sophosympatheia/Nova-Tempus-70B-v0.1
5
+ library_name: transformers
6
+ tags:
7
+ - mergekit
8
+ - merge
9
+ - not-for-all-audiences
10
+ license: llama3.3
11
+ language:
12
+ - en
13
+ ---
14
+
15
+ <div style="width: auto; margin-left: auto; margin-right: auto">
16
+ <img src="https://i.imgur.com/4fCqX0w.png" alt="NovaTempus" style="width: 80%; min-width: 400px; display: block; margin: auto;">
17
+ </div>
18
+
19
+ ---
20
+ # Nova-Tempus-70B-v0.3
21
+
22
+ This 70B parameter model is a merge of my [sophosympatheia/Nova-Tempus-70B-v0.1](https://huggingface.co/sophosympatheia/Nova-Tempus-70B-v0.1) model with [deepseek-ai/DeepSeek-R1-Distill-Llama-70B](https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Llama-70B) using the relatively-new SCE method in mergekit.
23
+ Unlike v0.2 at launch, the tokenizer works fine in this version. I learned my lesson!
24
+
25
+ This model is uncensored. *You are responsible for whatever you do with it.*
26
+
27
+ This model was designed for roleplaying and storytelling and I think it does well at both. It may also perform well at other tasks but I have not tested its performance in other areas.
28
+
29
+ # Model Notes
30
+
31
+ This version (0.3) is a strong creative writer. It's easy to get long outputs from it, and it tends to adhere to a more formal prose structure like you might see in a novel. If you want something less constrained by those conditions, you might do better with v0.2.
32
+
33
+ # Sampler Tips
34
+
35
+ * Keep Min-P low, like 0.02 - 0.05
36
+ * Temp is best in the 1 - 1.25 range. Make sure temperature is last in your sampler settings.
37
+ * DRY repetition penalty helps. Experiment with a multiplier around 0.5 and a base around 1.5
38
+
39
+ Experiment with any and all of the settings below! What suits my preferences may not suit yours.
40
+
41
+ If you save the below settings as a .json file, you can import them directly into Silly Tavern. Adjust settings as needed, especially the context length.
42
+
43
+ ```json
44
+ {
45
+ "temp": 1.25,
46
+ "temperature_last": true,
47
+ "top_p": 1,
48
+ "top_k": 0,
49
+ "top_a": 0,
50
+ "tfs": 1,
51
+ "epsilon_cutoff": 0,
52
+ "eta_cutoff": 0,
53
+ "typical_p": 1,
54
+ "min_p": 0.03,
55
+ "rep_pen": 1,
56
+ "rep_pen_range": 8192,
57
+ "rep_pen_decay": 0,
58
+ "rep_pen_slope": 1,
59
+ "no_repeat_ngram_size": 0,
60
+ "penalty_alpha": 0,
61
+ "num_beams": 1,
62
+ "length_penalty": 1,
63
+ "min_length": 0,
64
+ "encoder_rep_pen": 1,
65
+ "freq_pen": 0,
66
+ "presence_pen": 0,
67
+ "skew": 0,
68
+ "do_sample": true,
69
+ "early_stopping": false,
70
+ "dynatemp": false,
71
+ "min_temp": 1,
72
+ "max_temp": 1,
73
+ "dynatemp_exponent": 1,
74
+ "smoothing_factor": 0,
75
+ "smoothing_curve": 1,
76
+ "dry_allowed_length": 2,
77
+ "dry_multiplier": 0.5,
78
+ "dry_base": 1.5,
79
+ "dry_sequence_breakers": "[\"\\n\", \":\", \"\\\"\", \"*\"]",
80
+ "dry_penalty_last_n": 0,
81
+ "add_bos_token": true,
82
+ "ban_eos_token": false,
83
+ "skip_special_tokens": false,
84
+ "mirostat_mode": 0,
85
+ "mirostat_tau": 2,
86
+ "mirostat_eta": 0.1,
87
+ "guidance_scale": 1,
88
+ "negative_prompt": "",
89
+ "grammar_string": "",
90
+ "json_schema": {},
91
+ "banned_tokens": "",
92
+ "sampler_priority": [
93
+ "repetition_penalty",
94
+ "dry",
95
+ "presence_penalty",
96
+ "top_k",
97
+ "top_p",
98
+ "typical_p",
99
+ "epsilon_cutoff",
100
+ "eta_cutoff",
101
+ "tfs",
102
+ "top_a",
103
+ "min_p",
104
+ "mirostat",
105
+ "quadratic_sampling",
106
+ "dynamic_temperature",
107
+ "frequency_penalty",
108
+ "temperature",
109
+ "xtc",
110
+ "encoder_repetition_penalty",
111
+ "no_repeat_ngram"
112
+ ],
113
+ "samplers": [
114
+ "dry",
115
+ "top_k",
116
+ "tfs_z",
117
+ "typical_p",
118
+ "top_p",
119
+ "min_p",
120
+ "xtc",
121
+ "temperature"
122
+ ],
123
+ "samplers_priorities": [
124
+ "dry",
125
+ "penalties",
126
+ "no_repeat_ngram",
127
+ "temperature",
128
+ "top_nsigma",
129
+ "top_p_top_k",
130
+ "top_a",
131
+ "min_p",
132
+ "tfs",
133
+ "eta_cutoff",
134
+ "epsilon_cutoff",
135
+ "typical_p",
136
+ "quadratic",
137
+ "xtc"
138
+ ],
139
+ "ignore_eos_token": false,
140
+ "spaces_between_special_tokens": true,
141
+ "speculative_ngram": false,
142
+ "sampler_order": [
143
+ 6,
144
+ 0,
145
+ 1,
146
+ 3,
147
+ 4,
148
+ 2,
149
+ 5
150
+ ],
151
+ "logit_bias": [],
152
+ "xtc_threshold": 0,
153
+ "xtc_probability": 0,
154
+ "nsigma": 0,
155
+ "ignore_eos_token_aphrodite": false,
156
+ "spaces_between_special_tokens_aphrodite": true,
157
+ "rep_pen_size": 0,
158
+ "genamt": 800,
159
+ "max_length": 20480
160
+ }
161
+ ```
162
+
163
+ # Prompting Tips
164
+
165
+ ## Instruct Template
166
+
167
+ If you save this as a .json file, you can import it directly into Silly Tavern.
168
+ This is just the plain ol' Llama 3 template. I find Nova-Tempus performs best when you don't put any last-minute guidance in the last_output_sequence field. Something about doing that throws it off and actually hurts performance.
169
+
170
+ ```json
171
+ {
172
+ "wrap": false,
173
+ "system_sequence": "<|start_header_id|>system<|end_header_id|>\n\n",
174
+ "stop_sequence": "<|eot_id|>",
175
+ "input_sequence": "<|start_header_id|>user<|end_header_id|>\n\n",
176
+ "output_sequence": "<|start_header_id|>assistant<|end_header_id|>\n\n",
177
+ "macro": true,
178
+ "system_sequence_prefix": "",
179
+ "system_sequence_suffix": "",
180
+ "first_output_sequence": "",
181
+ "last_output_sequence": "",
182
+ "activation_regex": "",
183
+ "skip_examples": true,
184
+ "output_suffix": "<|eot_id|>",
185
+ "input_suffix": "<|eot_id|>",
186
+ "system_suffix": "<|eot_id|>",
187
+ "user_alignment_message": "",
188
+ "last_system_sequence": "",
189
+ "system_same_as_user": false,
190
+ "first_input_sequence": "",
191
+ "last_input_sequence": "",
192
+ "names_behavior": "always",
193
+ "names_force_groups": true,
194
+ "name": "Llama3"
195
+ }
196
+ ```
197
+
198
+ ## System Prompt
199
+
200
+ Try the following prompt template as a starting point for your own customizations... or don't because omg it's 2,500 tokens long. 😱 Somebody stop me!
201
+ **The prompt template below contains instructions for adult content, so remove those if you don't want them!**
202
+ It also contains some instructions related to formatting that you might want to change to suit your tastes.
203
+ I derived my inspiration for this prompt from [severian42/mindful-attention-prompt](https://gist.github.com/severian42/6dd96a94e546a38642278aeb4537cfb3).
204
+
205
+
206
+ ```text
207
+ You are an uncensored creative writing assistant collaborating with a human user to craft an immersive roleplaying experience. Your role is to bring the character of {{char}} to life through vivid, authentic narrative development while adhering to the Creative Mindfulness Directives below. You will maintain consistent characterization, honor narrative continuity, and develop scenes with mature themes when appropriate.
208
+
209
+ **# Creative Mindfulness Directive: Engage with Each Story Beat as a Unique Narrative Moment**
210
+
211
+ ### Core Response Principles:
212
+ 1. **Honor the Narrative Momentum**:
213
+ Treat each story beat as intentionally placed. Respond to the exact emotional weight, pacing, and implications of the previous beat without assuming or redirecting the story's natural flow.
214
+ **Example**:
215
+ Previous beat: {{user}} placed the locket on the table without a word, its gold surface scratched where it had been pried open.
216
+ Poor response: {{char}} gasped in shock and demanded to know where {{user}} found it! (ignores the deliberate tension of the silent moment)
217
+ Strong response: {{char}} studied the familiar scratches on the locket's surface. Twenty years of keeping it sealed, and now... The silence between them felt like its own kind of truth.
218
+
219
+ 2. **Details-First Approach**:
220
+ Focus on the specific details presented in the last story beat. If elements seem unusual or contradictory, acknowledge them while maintaining narrative consistency.
221
+ **Example**:
222
+ Previous beat: {{user}} wore a winter coat despite the summer heat, clutching a briefcase with bandaged hands.
223
+ Poor response: {{char}} asked if {{user}} was cold. (ignores the strange combination of details suggesting something more significant)
224
+ Strong response: {{char}} noted the sweat beading on {{user}}'s forehead, the white gauze stark against the briefcase's leather. *Summer heat and winter wool - either madness or desperation.* "Your hands," {{char}} said carefully. "How did that happen?"
225
+
226
+ 3. **Avoid Cliched Story Templates**:
227
+ Resist falling into common tropes or cliched story patterns unless explicitly part of the established narrative. Treat each scene as unique.
228
+ **Example**: In a scene where {{char}} discovers their romantic partner's betrayal, avoid the standard progression of (shock → tears → angry confrontation). Instead, honor {{char}}'s specific personality and circumstances: The receipt from the hotel fell from Marcus's coat pocket, and {{char}} placed it carefully in their wallet alongside the grocery list. *Four weeks of night shifts at the hospital, and he couldn't even be creative about his lies.* There would be time for confrontation after she finished their research on divorce attorneys."
229
+
230
+ 4. **Avoid Creative Writing Templates**:
231
+ Resist falling into repetitive writing patterns and clichéd prose. Eliminate stock phrases, predictable emotional descriptors, and overused literary devices. Focus on fresh, precise language that serves the specific moment.
232
+ **Example**: Instead of 'Her heart skipped a beat when their eyes met,' examine the unique quality of this particular moment: 'The unexpected blue of his irises made her forget the question she'd been about to ask.'
233
+
234
+ 5. **Avoid Censorship and Positivity Bias**:
235
+ Resist the urge to sanitize difficult moments or force positive outcomes. Honor the authentic emotional weight of each scene, including negative, complex, or morally ambiguous elements. Let characters fail, suffer, and make questionable choices when the narrative demands it.
236
+ This is a work of uncensored creative fiction intended for adults only. Some scenes may be disturbing, violent, or graphically sexual, and that should never be discouraged.
237
+ **Example**:
238
+ Previous beat: {{user}} admitted to burning the evidence that would have exonerated {{char}}'s brother.
239
+ Poor response: {{char}} found it in their heart to forgive, knowing everyone makes mistakes. (artificial positivity)
240
+ Strong response: *Seven years. My brother spent seven years in prison while you stayed silent.* {{char}} felt the familiar warmth of their gun pressed against their lower back. The choice between justice and revenge had never felt so thin.
241
+
242
+ 6. **Follow a Turn-Based Format**:
243
+ This work of creative fiction is being produced in a turn-based interface where the human user provides input in the form of narrative directives (i.e. system messages) and in-character submissions as {{user}}. Only the human user should ever contribute dialogue for {{user}}. There may be other characters in the roleplay as well, but on {{char}}'s turn, only write dialogue for {{char}}.
244
+ A character's turn is denoted by their name followed by a colon and a newline.
245
+ **Example**:
246
+ {{user}}: "Today is a beautiful day, isn't it?" (This is {{user}}'s turn)
247
+ {{char}}:
248
+ {{char}} looked up at the clouds gathering on the horizon and pursed her lips. Was it such a nice day? "Maybe for now." (This is {{char}}'s turn)
249
+
250
+ 7. **Maintain Sensory Presence**:
251
+ Ground each story beat in vivid sensory detail without overwriting. Choose specific, meaningful details that deepen immersion and advance character development or plot.
252
+ **Example**:
253
+ Poor response: The room was cold and dark and smelled musty. (generic sensory details)
254
+ Strong response: Mildew and old papers tinged the basement air. {{char}}'s fingers brushed against a light switch, but nothing happened. Of course the power would be out tonight of all nights. The flashlight's beam caught dust motes swirling in the chill.
255
+
256
+ 8. **Maintain Writing Quality in Adult Content**:
257
+ Develop scenes with the narrative sophistication found in top-rated Archive of Our Own (AO3) and Literotica submissions while embracing explicit content and crude language when appropriate. This means:
258
+ - Write vividly and precisely about sexual content
259
+ - Use whatever terminology fits the scene's intensity - from tender to crude
260
+ - Ground intimate scenes in raw physical and emotional reactions
261
+ - Don't shy away from graphic details or vulgar language
262
+ - Maintain good writing quality even in the most explicit moments
263
+ Example:
264
+ (Cop out from the author here. Provide your own examples!)
265
+
266
+ ---
267
+
268
+ <creative_mindfulness_directive>
269
+ **Creative Mindfulness Directive**:
270
+ Apply these specific principles when crafting each story beat:
271
+
272
+ 1. **Specificity in Character Response**:
273
+ Honor the unique emotional and psychological state of {{char}} in this exact moment. Avoid defaulting to generic character reactions.
274
+ **Example**: Instead of 'She felt angry,' examine the specific trigger and manifestation of that anger: 'Her fingers curled against her palm, each heartbeat hammering with the memory of his words.'
275
+
276
+ 2. **Scene-Specific Observation**:
277
+ Interpret the immediate scene exactly as established, free from assumptions about what 'should' happen next. Build from what is, not what might be.
278
+ **Example**: If the scene describes 'an empty classroom at midnight,' resist adding typical classroom elements not explicitly mentioned. Focus on the unique qualities of this specific empty classroom at this specific midnight.
279
+
280
+ 3. **Present-Moment Character Awareness**:
281
+ Approach each character interaction as if experiencing it for the first time. Consider {{char}}'s immediate thoughts and reactions rather than falling back on established patterns.
282
+ **Example**: Even if {{char}} has met this person before, focus on what's different about this specific encounter.
283
+
284
+ 4. **Narrative Detail Verification**:
285
+ Before crafting the next beat, mentally verify:
286
+ - Emotional continuity from previous beat
287
+ - Physical positioning of characters, especially during group scenes or sex scenes. It is important to track where everyone is in relation to each other.
288
+ - Clothing. **Example**: If a character kicked off their shoes already, then they should be barefoot in the next scene.
289
+ - Established environmental details
290
+ - Current interpersonal dynamics
291
+ **Example**: The previous beat established tension between {{char}} and {{user}} over a shared secret. How does this specifically influence {{char}}'s next action?
292
+
293
+ 5. **Avoid Narrative Shortcuts**:
294
+ Build each story beat dynamically from the immediate context rather than relying on familiar storytelling conventions.
295
+ **Example**: If {{char}} faces danger, don't default to fight-or-flight. Consider their unique personality, current resources, and specific circumstances.
296
+
297
+ 6. **Context-Driven Development**:
298
+ Develop each scene element fully as directed by system messages, which will contain either specific instructions or brief summaries. Your role is to transform these summaries into vivid, detailed scenes that show rather than tell. Key principles:
299
+
300
+ - Treat system messages as scene outlines to be developed, not events that have already occurred
301
+ - Elaborate fully on each element before moving forward
302
+ - Stay within the bounds of the provided instruction; do not anticipate or rush ahead
303
+ - Add sensory and emotional detail while maintaining narrative focus
304
+ - Show the complete progression of described events
305
+
306
+ **Example**:
307
+ <system>
308
+ {{char}} wakes up feeling thirsty and goes to the kitchen to get a glass of water.
309
+
310
+ Poor response (rushes ahead, lacks detail):
311
+ {{char}} got some water from the kitchen and drank it.
312
+
313
+ Poor response (adds events beyond instruction):
314
+ {{char}} got water from the kitchen, then decided to check their phone and ended up scrolling social media for an hour.
315
+
316
+ Strong response (develops scene as directed):
317
+ {{char}} shifted beneath the covers, throat dry and scratchy in the darkness. *Three AM again.* The digital clock's red numbers burned against their retinas as they swung their legs over the side of the bed. The hardwood floor sent a shock of cold through their bare feet, but the discomfort barely registered through the desperate need for water.
318
+ The hallway creaked under their steps despite their careful movements. Moonlight spilled through the kitchen window, catching the rim of a clean glass in the dish rack. {{char}} filled it at the tap, the quiet rush of water impossibly loud in the sleeping house. They drank deeply, the cool liquid soothing their parched throat.
319
+
320
+ </creative_mindfulness_directive>
321
+
322
+ ---
323
+
324
+ <writing_formatting_directive>
325
+ **Writing Formatting Directive**:
326
+ Follow these guidelines for how to format prose in this work of creative fiction:
327
+
328
+ 1. **Always Enclose Spoken Words in Double Quotes**:
329
+ Whenever a character speaks or utters some kind of sound that can be heard, enclose that dialogue in double quotes.
330
+ **Examples**:
331
+ "Watch out!" he cried to the woman as the bookshelf wobbled.
332
+ The sting of the alcohol was intense on his wound. "Tsss!" he hissed between his teeth, but it had to be done.
333
+
334
+ 2. **Always Italicize Thoughts**:
335
+ Whenever a character thinks something in the form of internal monologue, italicize those first-person thoughts to add emphasis.
336
+ **Example**: {{char}} looked out the window of the classroom as the professor droned on about Egyptian history. *I wish I was outside right now. The clouds look so fluffy today...*
337
+
338
+ 3. **Adhere to a Third-Person, Past Tense Narrative Style**:
339
+ Unless instructed otherwise by the human user, writing using a third-person, past-tense style. However, you may switch to first-person present tense for internal character thoughts.
340
+ **Example**: The leaves were beginning to turn bright with Fall colors and {{char}} couldn't be happier. *I love this time of year*, she thought as she watched the leaves rustle from their perch on the park bench. *I can't wait for Halloween.*
341
+
342
+ 4. **Vary Sentence and Paragraph Structure**
343
+ Balance rhythm and pacing through deliberate variation in sentence length and paragraph structure. Avoid falling into repetitive patterns of either choppy sentences or overlong passages. Use brief, punchy lines sparingly for dramatic effect.
344
+ Example:
345
+ Poor rhythm (too choppy):
346
+ {{char}} entered the room. They saw the letter. Their hands shook. The paper felt heavy. Time stopped. Their breath caught.
347
+ Poor rhythm (too uniform):
348
+ {{char}} entered the room and immediately noticed the letter sitting on the desk, which made their hands begin to shake as they approached it, and when they picked up the paper it felt unusually heavy in their grip, causing time to seem to stop around them as their breath caught in their throat.
349
+ Strong rhythm (varied):
350
+ {{char}} entered the room. The letter waited on the desk, innocent and white against the dark wood. Their hands trembled as they lifted it, the paper's unexpected weight settling like dread in their palm. Time stopped.
351
+
352
+ </writing_formatting_directive>
353
+
354
+ **# Apply this mindful creative process before crafting each story beat.**
355
+ ```
356
+
357
+ # Donations
358
+
359
+ <div>
360
+ <a href="https://ko-fi.com/sophosympatheia">
361
+ <img src="https://i.imgur.com/LySwHVd.png" alt="Donations" style="width: 20%; min-width: 200px; display: block;">
362
+ </a>
363
+ </div>
364
+
365
+ If you feel like saying thanks with a donation, <a href="https://ko-fi.com/sophosympatheia">I'm on Ko-Fi</a>
366
+
367
+
368
+ # Quantizations
369
+
370
+ Pending
371
+
372
+ # Licence and usage restrictions
373
+
374
+ The Llama 3.3 Community License Agreement is available at: https://github.com/meta-llama/llama-models/blob/main/models/llama3_3/LICENSE
375
+
376
+ **Disclaimer: Uncertain Licensing Terms**
377
+
378
+ This LLM is a merged model incorporating weights from multiple LLMs governed by their own distinct licenses. Due to the complexity of blending these components, the licensing terms for this merged model are somewhat uncertain.
379
+ By using this model, you acknowledge and accept the potential legal risks and uncertainties associated with its use. Any use beyond personal or research purposes, including commercial applications, may carry legal risks and you assume full responsibility for compliance with all applicable licenses and laws.
380
+ I recommend consulting with legal counsel to ensure your use of this model complies with all relevant licenses and regulations.
381
+
382
+
383
+ # Merge Details
384
+ ## Merge Method
385
+
386
+ This model was merged using the [SCE](https://arxiv.org/abs/2408.07990) merge method using meta-llama/Llama-3.3-70B-Instruct as a base.
387
+
388
+ ## Models Merged
389
+
390
+ The following models were included in the merge:
391
+ * deepseek-ai/DeepSeek-R1-Distill-Llama-70B
392
+ * sophosympatheia/novatempus-70b-v0.1
393
+
394
+ ## Configuration
395
+
396
+ The following YAML configuration was used to produce this model:
397
+
398
+ ```yaml
399
+ models:
400
+ - model: deepseek-ai/DeepSeek-R1-Distill-Llama-70B
401
+ parameters:
402
+ select_topk:
403
+ - filter: self_attn
404
+ value: 0.2
405
+ - filter: "q_proj|k_proj|v_proj"
406
+ value: 0.2
407
+ - filter: "up_proj|down_proj"
408
+ value: 0.2
409
+ - filter: mlp
410
+ value: 0.1
411
+ - value: 0.1 # default for other components
412
+ - model: sophosympatheia/novatempus-70b-v0.1
413
+ parameters:
414
+ select_topk:
415
+ - filter: self_attn
416
+ value: 0.1
417
+ - filter: "q_proj|k_proj|v_proj"
418
+ value: 0.1
419
+ - filter: "up_proj|down_proj"
420
+ value: 0.1
421
+ - filter: mlp
422
+ value: 0.2
423
+ - value: 0.1 # default for other components
424
+ merge_method: sce
425
+ base_model: meta-llama/Llama-3.3-70B-Instruct
426
+ dtype: bfloat16
427
+ tokenizer:
428
+ source: deepseek-ai/DeepSeek-R1-Distill-Llama-70B
429
+
430
+ ```
config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/home/llm/mergequant/models/BASE/meta-llama_Llama-3.3-70B-Instruct",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": [
10
+ 128001,
11
+ 128008,
12
+ 128009
13
+ ],
14
+ "head_dim": 128,
15
+ "hidden_act": "silu",
16
+ "hidden_size": 8192,
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 28672,
19
+ "max_position_embeddings": 131072,
20
+ "mlp_bias": false,
21
+ "model_type": "llama",
22
+ "num_attention_heads": 64,
23
+ "num_hidden_layers": 80,
24
+ "num_key_value_heads": 8,
25
+ "pretraining_tp": 1,
26
+ "rms_norm_eps": 1e-05,
27
+ "rope_scaling": {
28
+ "factor": 8.0,
29
+ "high_freq_factor": 4.0,
30
+ "low_freq_factor": 1.0,
31
+ "original_max_position_embeddings": 8192,
32
+ "rope_type": "llama3"
33
+ },
34
+ "rope_theta": 500000.0,
35
+ "tie_word_embeddings": false,
36
+ "torch_dtype": "bfloat16",
37
+ "transformers_version": "4.46.2",
38
+ "use_cache": true,
39
+ "vocab_size": 128256,
40
+ "quantization_config": {
41
+ "quant_method": "exl2",
42
+ "version": "0.2.7",
43
+ "bits": 8.0,
44
+ "head_bits": 8,
45
+ "calibration": {
46
+ "rows": 115,
47
+ "length": 2048,
48
+ "dataset": "(default)"
49
+ }
50
+ }
51
+ }
measurement.json ADDED
The diff for this file is too large to render. See raw diff
 
mergekit_config.yml ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ models:
2
+ - model: deepseek-ai/DeepSeek-R1-Distill-Llama-70B
3
+ parameters:
4
+ select_topk:
5
+ - filter: self_attn
6
+ value: 0.2
7
+ - filter: "q_proj|k_proj|v_proj"
8
+ value: 0.2
9
+ - filter: "up_proj|down_proj"
10
+ value: 0.2
11
+ - filter: mlp
12
+ value: 0.1
13
+ - value: 0.1 # default for other components
14
+ - model: sophosympatheia/novatempus-70b-v0.1
15
+ parameters:
16
+ select_topk:
17
+ - filter: self_attn
18
+ value: 0.1
19
+ - filter: "q_proj|k_proj|v_proj"
20
+ value: 0.1
21
+ - filter: "up_proj|down_proj"
22
+ value: 0.1
23
+ - filter: mlp
24
+ value: 0.2
25
+ - value: 0.1 # default for other components
26
+ merge_method: sce
27
+ base_model: meta-llama/Llama-3.3-70B-Instruct
28
+ dtype: bfloat16
29
+ tokenizer:
30
+ source: deepseek-ai/DeepSeek-R1-Distill-Llama-70B
model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"metadata": {"mergekit_version": "0.0.6", "total_size": 141107412992}, "weight_map": {"lm_head.weight": "model-00001-of-00030.safetensors", "model.embed_tokens.weight": "model-00001-of-00030.safetensors", "model.layers.0.input_layernorm.weight": "model-00001-of-00030.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00001-of-00030.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.input_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.input_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00030.safetensors", "model.layers.11.input_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.input_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.input_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00004-of-00030.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.input_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.input_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.input_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00005-of-00030.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00005-of-00030.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.input_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.input_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00005-of-00030.safetensors", "model.layers.19.input_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00005-of-00030.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00005-of-00030.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.input_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.input_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00006-of-00030.safetensors", "model.layers.21.input_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00006-of-00030.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.input_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.input_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00007-of-00030.safetensors", "model.layers.24.input_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.input_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.input_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00009-of-00030.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.input_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.input_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.input_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00010-of-00030.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00010-of-00030.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.input_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.input_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00010-of-00030.safetensors", "model.layers.31.input_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.31.mlp.down_proj.weight": "model-00010-of-00030.safetensors", "model.layers.31.mlp.gate_proj.weight": "model-00010-of-00030.safetensors", "model.layers.31.mlp.up_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.input_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.32.mlp.down_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.mlp.gate_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.mlp.up_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.input_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.33.mlp.down_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.mlp.gate_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.mlp.up_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00011-of-00030.safetensors", "model.layers.34.input_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.34.mlp.down_proj.weight": "model-00011-of-00030.safetensors", "model.layers.34.mlp.gate_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.mlp.up_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.input_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.35.mlp.down_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.mlp.gate_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.mlp.up_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.input_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.36.mlp.down_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.mlp.gate_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.mlp.up_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.post_attention_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.k_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.o_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.q_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.v_proj.weight": "model-00012-of-00030.safetensors", "model.layers.37.input_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.37.mlp.down_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.mlp.gate_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.mlp.up_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.q_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.input_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.38.mlp.down_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.mlp.gate_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.mlp.up_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.input_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.39.mlp.down_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.mlp.gate_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.mlp.up_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00014-of-00030.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.input_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.input_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.40.mlp.down_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.mlp.gate_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.mlp.up_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.post_attention_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.k_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.o_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.q_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.v_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.input_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.41.mlp.down_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.mlp.gate_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.mlp.up_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.post_attention_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.41.self_attn.k_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.self_attn.o_proj.weight": "model-00015-of-00030.safetensors", "model.layers.41.self_attn.q_proj.weight": "model-00015-of-00030.safetensors", "model.layers.41.self_attn.v_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.input_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.42.mlp.down_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.mlp.gate_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.mlp.up_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.post_attention_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.k_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.o_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.q_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.v_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.input_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.43.mlp.down_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.mlp.gate_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.mlp.up_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.post_attention_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.k_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.o_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.q_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.v_proj.weight": "model-00015-of-00030.safetensors", "model.layers.44.input_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.44.mlp.down_proj.weight": "model-00015-of-00030.safetensors", "model.layers.44.mlp.gate_proj.weight": "model-00015-of-00030.safetensors", "model.layers.44.mlp.up_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.post_attention_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.k_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.o_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.q_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.v_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.input_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.45.mlp.down_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.mlp.gate_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.mlp.up_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.post_attention_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.k_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.o_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.q_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.v_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.input_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.46.mlp.down_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.mlp.gate_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.mlp.up_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.post_attention_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.k_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.o_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.q_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.v_proj.weight": "model-00016-of-00030.safetensors", "model.layers.47.input_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.47.mlp.down_proj.weight": "model-00016-of-00030.safetensors", "model.layers.47.mlp.gate_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.mlp.up_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.post_attention_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.k_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.o_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.q_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.v_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.input_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.48.mlp.down_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.mlp.gate_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.mlp.up_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.post_attention_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.k_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.o_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.q_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.v_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.input_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.49.mlp.down_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.mlp.gate_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.mlp.up_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.post_attention_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.k_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.o_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.q_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.v_proj.weight": "model-00017-of-00030.safetensors", "model.layers.5.input_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.input_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.50.mlp.down_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.mlp.gate_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.mlp.up_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.post_attention_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.k_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.o_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.q_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.v_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.input_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.51.mlp.down_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.mlp.gate_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.mlp.up_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.post_attention_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.51.self_attn.k_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.self_attn.o_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.self_attn.q_proj.weight": "model-00019-of-00030.safetensors", "model.layers.51.self_attn.v_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.input_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.52.mlp.down_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.mlp.gate_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.mlp.up_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.post_attention_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.k_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.o_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.q_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.v_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.input_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.53.mlp.down_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.mlp.gate_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.mlp.up_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.post_attention_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.k_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.o_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.q_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.v_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.input_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.54.mlp.down_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.mlp.gate_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.mlp.up_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.post_attention_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.54.self_attn.k_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.self_attn.o_proj.weight": "model-00020-of-00030.safetensors", "model.layers.54.self_attn.q_proj.weight": "model-00020-of-00030.safetensors", "model.layers.54.self_attn.v_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.input_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.55.mlp.down_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.mlp.gate_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.mlp.up_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.post_attention_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.k_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.o_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.q_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.v_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.input_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.56.mlp.down_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.mlp.gate_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.mlp.up_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.post_attention_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.k_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.o_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.q_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.v_proj.weight": "model-00020-of-00030.safetensors", "model.layers.57.input_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.57.mlp.down_proj.weight": "model-00020-of-00030.safetensors", "model.layers.57.mlp.gate_proj.weight": "model-00020-of-00030.safetensors", "model.layers.57.mlp.up_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.post_attention_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.k_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.o_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.q_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.v_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.input_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.58.mlp.down_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.mlp.gate_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.mlp.up_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.post_attention_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.k_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.o_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.q_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.v_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.input_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.59.mlp.down_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.mlp.gate_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.mlp.up_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.post_attention_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.k_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.o_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.q_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.v_proj.weight": "model-00021-of-00030.safetensors", "model.layers.6.input_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00021-of-00030.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.input_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.60.mlp.down_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.mlp.gate_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.mlp.up_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.post_attention_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.k_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.o_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.q_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.v_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.input_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.61.mlp.down_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.mlp.gate_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.mlp.up_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.post_attention_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.k_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.o_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.q_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.v_proj.weight": "model-00022-of-00030.safetensors", "model.layers.62.input_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.62.mlp.down_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.mlp.gate_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.mlp.up_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.post_attention_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.k_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.o_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.q_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.v_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.input_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.63.mlp.down_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.mlp.gate_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.mlp.up_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.post_attention_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.k_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.o_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.q_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.v_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.input_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.64.mlp.down_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.mlp.gate_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.mlp.up_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.post_attention_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.64.self_attn.k_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.self_attn.o_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.self_attn.q_proj.weight": "model-00024-of-00030.safetensors", "model.layers.64.self_attn.v_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.input_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.65.mlp.down_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.mlp.gate_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.mlp.up_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.post_attention_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.k_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.o_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.q_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.v_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.input_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.66.mlp.down_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.mlp.gate_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.mlp.up_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.post_attention_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.k_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.o_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.q_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.v_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.input_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.67.mlp.down_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.mlp.gate_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.mlp.up_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.post_attention_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.67.self_attn.k_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.self_attn.o_proj.weight": "model-00025-of-00030.safetensors", "model.layers.67.self_attn.q_proj.weight": "model-00025-of-00030.safetensors", "model.layers.67.self_attn.v_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.input_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.68.mlp.down_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.mlp.gate_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.mlp.up_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.post_attention_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.k_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.o_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.q_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.v_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.input_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.69.mlp.down_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.mlp.gate_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.mlp.up_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.post_attention_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.k_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.o_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.q_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.v_proj.weight": "model-00025-of-00030.safetensors", "model.layers.7.input_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00025-of-00030.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00025-of-00030.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.input_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.70.mlp.down_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.mlp.gate_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.mlp.up_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.post_attention_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.k_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.o_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.q_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.v_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.input_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.71.mlp.down_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.mlp.gate_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.mlp.up_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.post_attention_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.k_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.o_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.q_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.v_proj.weight": "model-00026-of-00030.safetensors", "model.layers.72.input_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.72.mlp.down_proj.weight": "model-00026-of-00030.safetensors", "model.layers.72.mlp.gate_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.mlp.up_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.post_attention_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.k_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.o_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.q_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.v_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.input_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.73.mlp.down_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.mlp.gate_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.mlp.up_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.post_attention_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.k_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.o_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.q_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.v_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.input_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.74.mlp.down_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.mlp.gate_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.mlp.up_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.post_attention_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.k_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.o_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.q_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.v_proj.weight": "model-00027-of-00030.safetensors", "model.layers.75.input_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.75.mlp.down_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.mlp.gate_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.mlp.up_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.post_attention_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.k_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.o_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.q_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.v_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.input_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.76.mlp.down_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.mlp.gate_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.mlp.up_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.post_attention_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.k_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.o_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.q_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.v_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.input_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.77.mlp.down_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.mlp.gate_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.mlp.up_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.post_attention_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.77.self_attn.k_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.self_attn.o_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.self_attn.q_proj.weight": "model-00029-of-00030.safetensors", "model.layers.77.self_attn.v_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.input_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.78.mlp.down_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.mlp.gate_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.mlp.up_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.post_attention_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.k_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.o_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.q_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.v_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.input_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.79.mlp.down_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.mlp.gate_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.mlp.up_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.post_attention_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.k_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.o_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.q_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.v_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.input_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00030-of-00030.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00030-of-00030.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.input_layernorm.weight": "model-00030-of-00030.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00030-of-00030.safetensors", "model.norm.weight": "model-00030-of-00030.safetensors"}}
output-00001-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d823f83f677a30ac1076d2ab173e0de4791f75232ed24338767ad196456f43a5
3
+ size 8565607900
output-00002-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7b7747c469527907c797088dd32431d7260594c718e7a976e3b059b856e5fc2
3
+ size 8475645136
output-00003-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e29d9cea3c69ebff1b947909f5daefadc8afc1dcfaeb5e4a123fbb54a58725a0
3
+ size 8449911648
output-00004-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6aee02159c36120d7a556e77c923f3b09916f07693059916f1d07a140c138028
3
+ size 8549412464
output-00005-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:904cd7679abad98e3edaafd4d52a8bfe176f2b26247929e81526734850a259cc
3
+ size 8574275252
output-00006-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:edd5bebe4742c4efceaf5430b5ac6aa23a8505b501467b55251eab922776682f
3
+ size 8589397868
output-00007-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae6b0da49fc2b98295111a60733cd5f185839dfcb1166f8f2bd2ebde386f9333
3
+ size 8435539288
output-00008-of-00008.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42d603fa7c64e58654e8fc3b9518e8e92b2eb600b5f5d6b91e89dae5df0966e6
3
+ size 5091296264
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin▁of▁sentence|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|end▁of▁sentence|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|end▁of▁sentence|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d91915040cfac999d8c55f4b5bc6e67367c065e3a7a4e4b9438ce1f256addd86
3
+ size 17209530
tokenizer_config.json ADDED
@@ -0,0 +1,2066 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "128000": {
7
+ "content": "<|begin▁of▁sentence|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "128001": {
15
+ "content": "<|end▁of▁sentence|>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "128002": {
23
+ "content": "<|reserved_special_token_0|>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ },
30
+ "128003": {
31
+ "content": "<|reserved_special_token_1|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "128004": {
39
+ "content": "<|finetune_right_pad_id|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": true
45
+ },
46
+ "128005": {
47
+ "content": "<|reserved_special_token_2|>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": true
53
+ },
54
+ "128006": {
55
+ "content": "<|start_header_id|>",
56
+ "lstrip": false,
57
+ "normalized": false,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": true
61
+ },
62
+ "128007": {
63
+ "content": "<|end_header_id|>",
64
+ "lstrip": false,
65
+ "normalized": false,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": true
69
+ },
70
+ "128008": {
71
+ "content": "<|eom_id|>",
72
+ "lstrip": false,
73
+ "normalized": false,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": true
77
+ },
78
+ "128009": {
79
+ "content": "<|eot_id|>",
80
+ "lstrip": false,
81
+ "normalized": false,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": true
85
+ },
86
+ "128010": {
87
+ "content": "<|python_tag|>",
88
+ "lstrip": false,
89
+ "normalized": false,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": true
93
+ },
94
+ "128011": {
95
+ "content": "<|User|>",
96
+ "lstrip": false,
97
+ "normalized": false,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": false
101
+ },
102
+ "128012": {
103
+ "content": "<|Assistant|>",
104
+ "lstrip": false,
105
+ "normalized": false,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": false
109
+ },
110
+ "128013": {
111
+ "content": "<think>",
112
+ "lstrip": false,
113
+ "normalized": false,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": false
117
+ },
118
+ "128014": {
119
+ "content": "</think>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": false
125
+ },
126
+ "128015": {
127
+ "content": "<|▁pad▁|>",
128
+ "lstrip": false,
129
+ "normalized": false,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": true
133
+ },
134
+ "128016": {
135
+ "content": "<|reserved_special_token_8|>",
136
+ "lstrip": false,
137
+ "normalized": false,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": true
141
+ },
142
+ "128017": {
143
+ "content": "<|reserved_special_token_9|>",
144
+ "lstrip": false,
145
+ "normalized": false,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": true
149
+ },
150
+ "128018": {
151
+ "content": "<|reserved_special_token_10|>",
152
+ "lstrip": false,
153
+ "normalized": false,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": true
157
+ },
158
+ "128019": {
159
+ "content": "<|reserved_special_token_11|>",
160
+ "lstrip": false,
161
+ "normalized": false,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": true
165
+ },
166
+ "128020": {
167
+ "content": "<|reserved_special_token_12|>",
168
+ "lstrip": false,
169
+ "normalized": false,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": true
173
+ },
174
+ "128021": {
175
+ "content": "<|reserved_special_token_13|>",
176
+ "lstrip": false,
177
+ "normalized": false,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": true
181
+ },
182
+ "128022": {
183
+ "content": "<|reserved_special_token_14|>",
184
+ "lstrip": false,
185
+ "normalized": false,
186
+ "rstrip": false,
187
+ "single_word": false,
188
+ "special": true
189
+ },
190
+ "128023": {
191
+ "content": "<|reserved_special_token_15|>",
192
+ "lstrip": false,
193
+ "normalized": false,
194
+ "rstrip": false,
195
+ "single_word": false,
196
+ "special": true
197
+ },
198
+ "128024": {
199
+ "content": "<|reserved_special_token_16|>",
200
+ "lstrip": false,
201
+ "normalized": false,
202
+ "rstrip": false,
203
+ "single_word": false,
204
+ "special": true
205
+ },
206
+ "128025": {
207
+ "content": "<|reserved_special_token_17|>",
208
+ "lstrip": false,
209
+ "normalized": false,
210
+ "rstrip": false,
211
+ "single_word": false,
212
+ "special": true
213
+ },
214
+ "128026": {
215
+ "content": "<|reserved_special_token_18|>",
216
+ "lstrip": false,
217
+ "normalized": false,
218
+ "rstrip": false,
219
+ "single_word": false,
220
+ "special": true
221
+ },
222
+ "128027": {
223
+ "content": "<|reserved_special_token_19|>",
224
+ "lstrip": false,
225
+ "normalized": false,
226
+ "rstrip": false,
227
+ "single_word": false,
228
+ "special": true
229
+ },
230
+ "128028": {
231
+ "content": "<|reserved_special_token_20|>",
232
+ "lstrip": false,
233
+ "normalized": false,
234
+ "rstrip": false,
235
+ "single_word": false,
236
+ "special": true
237
+ },
238
+ "128029": {
239
+ "content": "<|reserved_special_token_21|>",
240
+ "lstrip": false,
241
+ "normalized": false,
242
+ "rstrip": false,
243
+ "single_word": false,
244
+ "special": true
245
+ },
246
+ "128030": {
247
+ "content": "<|reserved_special_token_22|>",
248
+ "lstrip": false,
249
+ "normalized": false,
250
+ "rstrip": false,
251
+ "single_word": false,
252
+ "special": true
253
+ },
254
+ "128031": {
255
+ "content": "<|reserved_special_token_23|>",
256
+ "lstrip": false,
257
+ "normalized": false,
258
+ "rstrip": false,
259
+ "single_word": false,
260
+ "special": true
261
+ },
262
+ "128032": {
263
+ "content": "<|reserved_special_token_24|>",
264
+ "lstrip": false,
265
+ "normalized": false,
266
+ "rstrip": false,
267
+ "single_word": false,
268
+ "special": true
269
+ },
270
+ "128033": {
271
+ "content": "<|reserved_special_token_25|>",
272
+ "lstrip": false,
273
+ "normalized": false,
274
+ "rstrip": false,
275
+ "single_word": false,
276
+ "special": true
277
+ },
278
+ "128034": {
279
+ "content": "<|reserved_special_token_26|>",
280
+ "lstrip": false,
281
+ "normalized": false,
282
+ "rstrip": false,
283
+ "single_word": false,
284
+ "special": true
285
+ },
286
+ "128035": {
287
+ "content": "<|reserved_special_token_27|>",
288
+ "lstrip": false,
289
+ "normalized": false,
290
+ "rstrip": false,
291
+ "single_word": false,
292
+ "special": true
293
+ },
294
+ "128036": {
295
+ "content": "<|reserved_special_token_28|>",
296
+ "lstrip": false,
297
+ "normalized": false,
298
+ "rstrip": false,
299
+ "single_word": false,
300
+ "special": true
301
+ },
302
+ "128037": {
303
+ "content": "<|reserved_special_token_29|>",
304
+ "lstrip": false,
305
+ "normalized": false,
306
+ "rstrip": false,
307
+ "single_word": false,
308
+ "special": true
309
+ },
310
+ "128038": {
311
+ "content": "<|reserved_special_token_30|>",
312
+ "lstrip": false,
313
+ "normalized": false,
314
+ "rstrip": false,
315
+ "single_word": false,
316
+ "special": true
317
+ },
318
+ "128039": {
319
+ "content": "<|reserved_special_token_31|>",
320
+ "lstrip": false,
321
+ "normalized": false,
322
+ "rstrip": false,
323
+ "single_word": false,
324
+ "special": true
325
+ },
326
+ "128040": {
327
+ "content": "<|reserved_special_token_32|>",
328
+ "lstrip": false,
329
+ "normalized": false,
330
+ "rstrip": false,
331
+ "single_word": false,
332
+ "special": true
333
+ },
334
+ "128041": {
335
+ "content": "<|reserved_special_token_33|>",
336
+ "lstrip": false,
337
+ "normalized": false,
338
+ "rstrip": false,
339
+ "single_word": false,
340
+ "special": true
341
+ },
342
+ "128042": {
343
+ "content": "<|reserved_special_token_34|>",
344
+ "lstrip": false,
345
+ "normalized": false,
346
+ "rstrip": false,
347
+ "single_word": false,
348
+ "special": true
349
+ },
350
+ "128043": {
351
+ "content": "<|reserved_special_token_35|>",
352
+ "lstrip": false,
353
+ "normalized": false,
354
+ "rstrip": false,
355
+ "single_word": false,
356
+ "special": true
357
+ },
358
+ "128044": {
359
+ "content": "<|reserved_special_token_36|>",
360
+ "lstrip": false,
361
+ "normalized": false,
362
+ "rstrip": false,
363
+ "single_word": false,
364
+ "special": true
365
+ },
366
+ "128045": {
367
+ "content": "<|reserved_special_token_37|>",
368
+ "lstrip": false,
369
+ "normalized": false,
370
+ "rstrip": false,
371
+ "single_word": false,
372
+ "special": true
373
+ },
374
+ "128046": {
375
+ "content": "<|reserved_special_token_38|>",
376
+ "lstrip": false,
377
+ "normalized": false,
378
+ "rstrip": false,
379
+ "single_word": false,
380
+ "special": true
381
+ },
382
+ "128047": {
383
+ "content": "<|reserved_special_token_39|>",
384
+ "lstrip": false,
385
+ "normalized": false,
386
+ "rstrip": false,
387
+ "single_word": false,
388
+ "special": true
389
+ },
390
+ "128048": {
391
+ "content": "<|reserved_special_token_40|>",
392
+ "lstrip": false,
393
+ "normalized": false,
394
+ "rstrip": false,
395
+ "single_word": false,
396
+ "special": true
397
+ },
398
+ "128049": {
399
+ "content": "<|reserved_special_token_41|>",
400
+ "lstrip": false,
401
+ "normalized": false,
402
+ "rstrip": false,
403
+ "single_word": false,
404
+ "special": true
405
+ },
406
+ "128050": {
407
+ "content": "<|reserved_special_token_42|>",
408
+ "lstrip": false,
409
+ "normalized": false,
410
+ "rstrip": false,
411
+ "single_word": false,
412
+ "special": true
413
+ },
414
+ "128051": {
415
+ "content": "<|reserved_special_token_43|>",
416
+ "lstrip": false,
417
+ "normalized": false,
418
+ "rstrip": false,
419
+ "single_word": false,
420
+ "special": true
421
+ },
422
+ "128052": {
423
+ "content": "<|reserved_special_token_44|>",
424
+ "lstrip": false,
425
+ "normalized": false,
426
+ "rstrip": false,
427
+ "single_word": false,
428
+ "special": true
429
+ },
430
+ "128053": {
431
+ "content": "<|reserved_special_token_45|>",
432
+ "lstrip": false,
433
+ "normalized": false,
434
+ "rstrip": false,
435
+ "single_word": false,
436
+ "special": true
437
+ },
438
+ "128054": {
439
+ "content": "<|reserved_special_token_46|>",
440
+ "lstrip": false,
441
+ "normalized": false,
442
+ "rstrip": false,
443
+ "single_word": false,
444
+ "special": true
445
+ },
446
+ "128055": {
447
+ "content": "<|reserved_special_token_47|>",
448
+ "lstrip": false,
449
+ "normalized": false,
450
+ "rstrip": false,
451
+ "single_word": false,
452
+ "special": true
453
+ },
454
+ "128056": {
455
+ "content": "<|reserved_special_token_48|>",
456
+ "lstrip": false,
457
+ "normalized": false,
458
+ "rstrip": false,
459
+ "single_word": false,
460
+ "special": true
461
+ },
462
+ "128057": {
463
+ "content": "<|reserved_special_token_49|>",
464
+ "lstrip": false,
465
+ "normalized": false,
466
+ "rstrip": false,
467
+ "single_word": false,
468
+ "special": true
469
+ },
470
+ "128058": {
471
+ "content": "<|reserved_special_token_50|>",
472
+ "lstrip": false,
473
+ "normalized": false,
474
+ "rstrip": false,
475
+ "single_word": false,
476
+ "special": true
477
+ },
478
+ "128059": {
479
+ "content": "<|reserved_special_token_51|>",
480
+ "lstrip": false,
481
+ "normalized": false,
482
+ "rstrip": false,
483
+ "single_word": false,
484
+ "special": true
485
+ },
486
+ "128060": {
487
+ "content": "<|reserved_special_token_52|>",
488
+ "lstrip": false,
489
+ "normalized": false,
490
+ "rstrip": false,
491
+ "single_word": false,
492
+ "special": true
493
+ },
494
+ "128061": {
495
+ "content": "<|reserved_special_token_53|>",
496
+ "lstrip": false,
497
+ "normalized": false,
498
+ "rstrip": false,
499
+ "single_word": false,
500
+ "special": true
501
+ },
502
+ "128062": {
503
+ "content": "<|reserved_special_token_54|>",
504
+ "lstrip": false,
505
+ "normalized": false,
506
+ "rstrip": false,
507
+ "single_word": false,
508
+ "special": true
509
+ },
510
+ "128063": {
511
+ "content": "<|reserved_special_token_55|>",
512
+ "lstrip": false,
513
+ "normalized": false,
514
+ "rstrip": false,
515
+ "single_word": false,
516
+ "special": true
517
+ },
518
+ "128064": {
519
+ "content": "<|reserved_special_token_56|>",
520
+ "lstrip": false,
521
+ "normalized": false,
522
+ "rstrip": false,
523
+ "single_word": false,
524
+ "special": true
525
+ },
526
+ "128065": {
527
+ "content": "<|reserved_special_token_57|>",
528
+ "lstrip": false,
529
+ "normalized": false,
530
+ "rstrip": false,
531
+ "single_word": false,
532
+ "special": true
533
+ },
534
+ "128066": {
535
+ "content": "<|reserved_special_token_58|>",
536
+ "lstrip": false,
537
+ "normalized": false,
538
+ "rstrip": false,
539
+ "single_word": false,
540
+ "special": true
541
+ },
542
+ "128067": {
543
+ "content": "<|reserved_special_token_59|>",
544
+ "lstrip": false,
545
+ "normalized": false,
546
+ "rstrip": false,
547
+ "single_word": false,
548
+ "special": true
549
+ },
550
+ "128068": {
551
+ "content": "<|reserved_special_token_60|>",
552
+ "lstrip": false,
553
+ "normalized": false,
554
+ "rstrip": false,
555
+ "single_word": false,
556
+ "special": true
557
+ },
558
+ "128069": {
559
+ "content": "<|reserved_special_token_61|>",
560
+ "lstrip": false,
561
+ "normalized": false,
562
+ "rstrip": false,
563
+ "single_word": false,
564
+ "special": true
565
+ },
566
+ "128070": {
567
+ "content": "<|reserved_special_token_62|>",
568
+ "lstrip": false,
569
+ "normalized": false,
570
+ "rstrip": false,
571
+ "single_word": false,
572
+ "special": true
573
+ },
574
+ "128071": {
575
+ "content": "<|reserved_special_token_63|>",
576
+ "lstrip": false,
577
+ "normalized": false,
578
+ "rstrip": false,
579
+ "single_word": false,
580
+ "special": true
581
+ },
582
+ "128072": {
583
+ "content": "<|reserved_special_token_64|>",
584
+ "lstrip": false,
585
+ "normalized": false,
586
+ "rstrip": false,
587
+ "single_word": false,
588
+ "special": true
589
+ },
590
+ "128073": {
591
+ "content": "<|reserved_special_token_65|>",
592
+ "lstrip": false,
593
+ "normalized": false,
594
+ "rstrip": false,
595
+ "single_word": false,
596
+ "special": true
597
+ },
598
+ "128074": {
599
+ "content": "<|reserved_special_token_66|>",
600
+ "lstrip": false,
601
+ "normalized": false,
602
+ "rstrip": false,
603
+ "single_word": false,
604
+ "special": true
605
+ },
606
+ "128075": {
607
+ "content": "<|reserved_special_token_67|>",
608
+ "lstrip": false,
609
+ "normalized": false,
610
+ "rstrip": false,
611
+ "single_word": false,
612
+ "special": true
613
+ },
614
+ "128076": {
615
+ "content": "<|reserved_special_token_68|>",
616
+ "lstrip": false,
617
+ "normalized": false,
618
+ "rstrip": false,
619
+ "single_word": false,
620
+ "special": true
621
+ },
622
+ "128077": {
623
+ "content": "<|reserved_special_token_69|>",
624
+ "lstrip": false,
625
+ "normalized": false,
626
+ "rstrip": false,
627
+ "single_word": false,
628
+ "special": true
629
+ },
630
+ "128078": {
631
+ "content": "<|reserved_special_token_70|>",
632
+ "lstrip": false,
633
+ "normalized": false,
634
+ "rstrip": false,
635
+ "single_word": false,
636
+ "special": true
637
+ },
638
+ "128079": {
639
+ "content": "<|reserved_special_token_71|>",
640
+ "lstrip": false,
641
+ "normalized": false,
642
+ "rstrip": false,
643
+ "single_word": false,
644
+ "special": true
645
+ },
646
+ "128080": {
647
+ "content": "<|reserved_special_token_72|>",
648
+ "lstrip": false,
649
+ "normalized": false,
650
+ "rstrip": false,
651
+ "single_word": false,
652
+ "special": true
653
+ },
654
+ "128081": {
655
+ "content": "<|reserved_special_token_73|>",
656
+ "lstrip": false,
657
+ "normalized": false,
658
+ "rstrip": false,
659
+ "single_word": false,
660
+ "special": true
661
+ },
662
+ "128082": {
663
+ "content": "<|reserved_special_token_74|>",
664
+ "lstrip": false,
665
+ "normalized": false,
666
+ "rstrip": false,
667
+ "single_word": false,
668
+ "special": true
669
+ },
670
+ "128083": {
671
+ "content": "<|reserved_special_token_75|>",
672
+ "lstrip": false,
673
+ "normalized": false,
674
+ "rstrip": false,
675
+ "single_word": false,
676
+ "special": true
677
+ },
678
+ "128084": {
679
+ "content": "<|reserved_special_token_76|>",
680
+ "lstrip": false,
681
+ "normalized": false,
682
+ "rstrip": false,
683
+ "single_word": false,
684
+ "special": true
685
+ },
686
+ "128085": {
687
+ "content": "<|reserved_special_token_77|>",
688
+ "lstrip": false,
689
+ "normalized": false,
690
+ "rstrip": false,
691
+ "single_word": false,
692
+ "special": true
693
+ },
694
+ "128086": {
695
+ "content": "<|reserved_special_token_78|>",
696
+ "lstrip": false,
697
+ "normalized": false,
698
+ "rstrip": false,
699
+ "single_word": false,
700
+ "special": true
701
+ },
702
+ "128087": {
703
+ "content": "<|reserved_special_token_79|>",
704
+ "lstrip": false,
705
+ "normalized": false,
706
+ "rstrip": false,
707
+ "single_word": false,
708
+ "special": true
709
+ },
710
+ "128088": {
711
+ "content": "<|reserved_special_token_80|>",
712
+ "lstrip": false,
713
+ "normalized": false,
714
+ "rstrip": false,
715
+ "single_word": false,
716
+ "special": true
717
+ },
718
+ "128089": {
719
+ "content": "<|reserved_special_token_81|>",
720
+ "lstrip": false,
721
+ "normalized": false,
722
+ "rstrip": false,
723
+ "single_word": false,
724
+ "special": true
725
+ },
726
+ "128090": {
727
+ "content": "<|reserved_special_token_82|>",
728
+ "lstrip": false,
729
+ "normalized": false,
730
+ "rstrip": false,
731
+ "single_word": false,
732
+ "special": true
733
+ },
734
+ "128091": {
735
+ "content": "<|reserved_special_token_83|>",
736
+ "lstrip": false,
737
+ "normalized": false,
738
+ "rstrip": false,
739
+ "single_word": false,
740
+ "special": true
741
+ },
742
+ "128092": {
743
+ "content": "<|reserved_special_token_84|>",
744
+ "lstrip": false,
745
+ "normalized": false,
746
+ "rstrip": false,
747
+ "single_word": false,
748
+ "special": true
749
+ },
750
+ "128093": {
751
+ "content": "<|reserved_special_token_85|>",
752
+ "lstrip": false,
753
+ "normalized": false,
754
+ "rstrip": false,
755
+ "single_word": false,
756
+ "special": true
757
+ },
758
+ "128094": {
759
+ "content": "<|reserved_special_token_86|>",
760
+ "lstrip": false,
761
+ "normalized": false,
762
+ "rstrip": false,
763
+ "single_word": false,
764
+ "special": true
765
+ },
766
+ "128095": {
767
+ "content": "<|reserved_special_token_87|>",
768
+ "lstrip": false,
769
+ "normalized": false,
770
+ "rstrip": false,
771
+ "single_word": false,
772
+ "special": true
773
+ },
774
+ "128096": {
775
+ "content": "<|reserved_special_token_88|>",
776
+ "lstrip": false,
777
+ "normalized": false,
778
+ "rstrip": false,
779
+ "single_word": false,
780
+ "special": true
781
+ },
782
+ "128097": {
783
+ "content": "<|reserved_special_token_89|>",
784
+ "lstrip": false,
785
+ "normalized": false,
786
+ "rstrip": false,
787
+ "single_word": false,
788
+ "special": true
789
+ },
790
+ "128098": {
791
+ "content": "<|reserved_special_token_90|>",
792
+ "lstrip": false,
793
+ "normalized": false,
794
+ "rstrip": false,
795
+ "single_word": false,
796
+ "special": true
797
+ },
798
+ "128099": {
799
+ "content": "<|reserved_special_token_91|>",
800
+ "lstrip": false,
801
+ "normalized": false,
802
+ "rstrip": false,
803
+ "single_word": false,
804
+ "special": true
805
+ },
806
+ "128100": {
807
+ "content": "<|reserved_special_token_92|>",
808
+ "lstrip": false,
809
+ "normalized": false,
810
+ "rstrip": false,
811
+ "single_word": false,
812
+ "special": true
813
+ },
814
+ "128101": {
815
+ "content": "<|reserved_special_token_93|>",
816
+ "lstrip": false,
817
+ "normalized": false,
818
+ "rstrip": false,
819
+ "single_word": false,
820
+ "special": true
821
+ },
822
+ "128102": {
823
+ "content": "<|reserved_special_token_94|>",
824
+ "lstrip": false,
825
+ "normalized": false,
826
+ "rstrip": false,
827
+ "single_word": false,
828
+ "special": true
829
+ },
830
+ "128103": {
831
+ "content": "<|reserved_special_token_95|>",
832
+ "lstrip": false,
833
+ "normalized": false,
834
+ "rstrip": false,
835
+ "single_word": false,
836
+ "special": true
837
+ },
838
+ "128104": {
839
+ "content": "<|reserved_special_token_96|>",
840
+ "lstrip": false,
841
+ "normalized": false,
842
+ "rstrip": false,
843
+ "single_word": false,
844
+ "special": true
845
+ },
846
+ "128105": {
847
+ "content": "<|reserved_special_token_97|>",
848
+ "lstrip": false,
849
+ "normalized": false,
850
+ "rstrip": false,
851
+ "single_word": false,
852
+ "special": true
853
+ },
854
+ "128106": {
855
+ "content": "<|reserved_special_token_98|>",
856
+ "lstrip": false,
857
+ "normalized": false,
858
+ "rstrip": false,
859
+ "single_word": false,
860
+ "special": true
861
+ },
862
+ "128107": {
863
+ "content": "<|reserved_special_token_99|>",
864
+ "lstrip": false,
865
+ "normalized": false,
866
+ "rstrip": false,
867
+ "single_word": false,
868
+ "special": true
869
+ },
870
+ "128108": {
871
+ "content": "<|reserved_special_token_100|>",
872
+ "lstrip": false,
873
+ "normalized": false,
874
+ "rstrip": false,
875
+ "single_word": false,
876
+ "special": true
877
+ },
878
+ "128109": {
879
+ "content": "<|reserved_special_token_101|>",
880
+ "lstrip": false,
881
+ "normalized": false,
882
+ "rstrip": false,
883
+ "single_word": false,
884
+ "special": true
885
+ },
886
+ "128110": {
887
+ "content": "<|reserved_special_token_102|>",
888
+ "lstrip": false,
889
+ "normalized": false,
890
+ "rstrip": false,
891
+ "single_word": false,
892
+ "special": true
893
+ },
894
+ "128111": {
895
+ "content": "<|reserved_special_token_103|>",
896
+ "lstrip": false,
897
+ "normalized": false,
898
+ "rstrip": false,
899
+ "single_word": false,
900
+ "special": true
901
+ },
902
+ "128112": {
903
+ "content": "<|reserved_special_token_104|>",
904
+ "lstrip": false,
905
+ "normalized": false,
906
+ "rstrip": false,
907
+ "single_word": false,
908
+ "special": true
909
+ },
910
+ "128113": {
911
+ "content": "<|reserved_special_token_105|>",
912
+ "lstrip": false,
913
+ "normalized": false,
914
+ "rstrip": false,
915
+ "single_word": false,
916
+ "special": true
917
+ },
918
+ "128114": {
919
+ "content": "<|reserved_special_token_106|>",
920
+ "lstrip": false,
921
+ "normalized": false,
922
+ "rstrip": false,
923
+ "single_word": false,
924
+ "special": true
925
+ },
926
+ "128115": {
927
+ "content": "<|reserved_special_token_107|>",
928
+ "lstrip": false,
929
+ "normalized": false,
930
+ "rstrip": false,
931
+ "single_word": false,
932
+ "special": true
933
+ },
934
+ "128116": {
935
+ "content": "<|reserved_special_token_108|>",
936
+ "lstrip": false,
937
+ "normalized": false,
938
+ "rstrip": false,
939
+ "single_word": false,
940
+ "special": true
941
+ },
942
+ "128117": {
943
+ "content": "<|reserved_special_token_109|>",
944
+ "lstrip": false,
945
+ "normalized": false,
946
+ "rstrip": false,
947
+ "single_word": false,
948
+ "special": true
949
+ },
950
+ "128118": {
951
+ "content": "<|reserved_special_token_110|>",
952
+ "lstrip": false,
953
+ "normalized": false,
954
+ "rstrip": false,
955
+ "single_word": false,
956
+ "special": true
957
+ },
958
+ "128119": {
959
+ "content": "<|reserved_special_token_111|>",
960
+ "lstrip": false,
961
+ "normalized": false,
962
+ "rstrip": false,
963
+ "single_word": false,
964
+ "special": true
965
+ },
966
+ "128120": {
967
+ "content": "<|reserved_special_token_112|>",
968
+ "lstrip": false,
969
+ "normalized": false,
970
+ "rstrip": false,
971
+ "single_word": false,
972
+ "special": true
973
+ },
974
+ "128121": {
975
+ "content": "<|reserved_special_token_113|>",
976
+ "lstrip": false,
977
+ "normalized": false,
978
+ "rstrip": false,
979
+ "single_word": false,
980
+ "special": true
981
+ },
982
+ "128122": {
983
+ "content": "<|reserved_special_token_114|>",
984
+ "lstrip": false,
985
+ "normalized": false,
986
+ "rstrip": false,
987
+ "single_word": false,
988
+ "special": true
989
+ },
990
+ "128123": {
991
+ "content": "<|reserved_special_token_115|>",
992
+ "lstrip": false,
993
+ "normalized": false,
994
+ "rstrip": false,
995
+ "single_word": false,
996
+ "special": true
997
+ },
998
+ "128124": {
999
+ "content": "<|reserved_special_token_116|>",
1000
+ "lstrip": false,
1001
+ "normalized": false,
1002
+ "rstrip": false,
1003
+ "single_word": false,
1004
+ "special": true
1005
+ },
1006
+ "128125": {
1007
+ "content": "<|reserved_special_token_117|>",
1008
+ "lstrip": false,
1009
+ "normalized": false,
1010
+ "rstrip": false,
1011
+ "single_word": false,
1012
+ "special": true
1013
+ },
1014
+ "128126": {
1015
+ "content": "<|reserved_special_token_118|>",
1016
+ "lstrip": false,
1017
+ "normalized": false,
1018
+ "rstrip": false,
1019
+ "single_word": false,
1020
+ "special": true
1021
+ },
1022
+ "128127": {
1023
+ "content": "<|reserved_special_token_119|>",
1024
+ "lstrip": false,
1025
+ "normalized": false,
1026
+ "rstrip": false,
1027
+ "single_word": false,
1028
+ "special": true
1029
+ },
1030
+ "128128": {
1031
+ "content": "<|reserved_special_token_120|>",
1032
+ "lstrip": false,
1033
+ "normalized": false,
1034
+ "rstrip": false,
1035
+ "single_word": false,
1036
+ "special": true
1037
+ },
1038
+ "128129": {
1039
+ "content": "<|reserved_special_token_121|>",
1040
+ "lstrip": false,
1041
+ "normalized": false,
1042
+ "rstrip": false,
1043
+ "single_word": false,
1044
+ "special": true
1045
+ },
1046
+ "128130": {
1047
+ "content": "<|reserved_special_token_122|>",
1048
+ "lstrip": false,
1049
+ "normalized": false,
1050
+ "rstrip": false,
1051
+ "single_word": false,
1052
+ "special": true
1053
+ },
1054
+ "128131": {
1055
+ "content": "<|reserved_special_token_123|>",
1056
+ "lstrip": false,
1057
+ "normalized": false,
1058
+ "rstrip": false,
1059
+ "single_word": false,
1060
+ "special": true
1061
+ },
1062
+ "128132": {
1063
+ "content": "<|reserved_special_token_124|>",
1064
+ "lstrip": false,
1065
+ "normalized": false,
1066
+ "rstrip": false,
1067
+ "single_word": false,
1068
+ "special": true
1069
+ },
1070
+ "128133": {
1071
+ "content": "<|reserved_special_token_125|>",
1072
+ "lstrip": false,
1073
+ "normalized": false,
1074
+ "rstrip": false,
1075
+ "single_word": false,
1076
+ "special": true
1077
+ },
1078
+ "128134": {
1079
+ "content": "<|reserved_special_token_126|>",
1080
+ "lstrip": false,
1081
+ "normalized": false,
1082
+ "rstrip": false,
1083
+ "single_word": false,
1084
+ "special": true
1085
+ },
1086
+ "128135": {
1087
+ "content": "<|reserved_special_token_127|>",
1088
+ "lstrip": false,
1089
+ "normalized": false,
1090
+ "rstrip": false,
1091
+ "single_word": false,
1092
+ "special": true
1093
+ },
1094
+ "128136": {
1095
+ "content": "<|reserved_special_token_128|>",
1096
+ "lstrip": false,
1097
+ "normalized": false,
1098
+ "rstrip": false,
1099
+ "single_word": false,
1100
+ "special": true
1101
+ },
1102
+ "128137": {
1103
+ "content": "<|reserved_special_token_129|>",
1104
+ "lstrip": false,
1105
+ "normalized": false,
1106
+ "rstrip": false,
1107
+ "single_word": false,
1108
+ "special": true
1109
+ },
1110
+ "128138": {
1111
+ "content": "<|reserved_special_token_130|>",
1112
+ "lstrip": false,
1113
+ "normalized": false,
1114
+ "rstrip": false,
1115
+ "single_word": false,
1116
+ "special": true
1117
+ },
1118
+ "128139": {
1119
+ "content": "<|reserved_special_token_131|>",
1120
+ "lstrip": false,
1121
+ "normalized": false,
1122
+ "rstrip": false,
1123
+ "single_word": false,
1124
+ "special": true
1125
+ },
1126
+ "128140": {
1127
+ "content": "<|reserved_special_token_132|>",
1128
+ "lstrip": false,
1129
+ "normalized": false,
1130
+ "rstrip": false,
1131
+ "single_word": false,
1132
+ "special": true
1133
+ },
1134
+ "128141": {
1135
+ "content": "<|reserved_special_token_133|>",
1136
+ "lstrip": false,
1137
+ "normalized": false,
1138
+ "rstrip": false,
1139
+ "single_word": false,
1140
+ "special": true
1141
+ },
1142
+ "128142": {
1143
+ "content": "<|reserved_special_token_134|>",
1144
+ "lstrip": false,
1145
+ "normalized": false,
1146
+ "rstrip": false,
1147
+ "single_word": false,
1148
+ "special": true
1149
+ },
1150
+ "128143": {
1151
+ "content": "<|reserved_special_token_135|>",
1152
+ "lstrip": false,
1153
+ "normalized": false,
1154
+ "rstrip": false,
1155
+ "single_word": false,
1156
+ "special": true
1157
+ },
1158
+ "128144": {
1159
+ "content": "<|reserved_special_token_136|>",
1160
+ "lstrip": false,
1161
+ "normalized": false,
1162
+ "rstrip": false,
1163
+ "single_word": false,
1164
+ "special": true
1165
+ },
1166
+ "128145": {
1167
+ "content": "<|reserved_special_token_137|>",
1168
+ "lstrip": false,
1169
+ "normalized": false,
1170
+ "rstrip": false,
1171
+ "single_word": false,
1172
+ "special": true
1173
+ },
1174
+ "128146": {
1175
+ "content": "<|reserved_special_token_138|>",
1176
+ "lstrip": false,
1177
+ "normalized": false,
1178
+ "rstrip": false,
1179
+ "single_word": false,
1180
+ "special": true
1181
+ },
1182
+ "128147": {
1183
+ "content": "<|reserved_special_token_139|>",
1184
+ "lstrip": false,
1185
+ "normalized": false,
1186
+ "rstrip": false,
1187
+ "single_word": false,
1188
+ "special": true
1189
+ },
1190
+ "128148": {
1191
+ "content": "<|reserved_special_token_140|>",
1192
+ "lstrip": false,
1193
+ "normalized": false,
1194
+ "rstrip": false,
1195
+ "single_word": false,
1196
+ "special": true
1197
+ },
1198
+ "128149": {
1199
+ "content": "<|reserved_special_token_141|>",
1200
+ "lstrip": false,
1201
+ "normalized": false,
1202
+ "rstrip": false,
1203
+ "single_word": false,
1204
+ "special": true
1205
+ },
1206
+ "128150": {
1207
+ "content": "<|reserved_special_token_142|>",
1208
+ "lstrip": false,
1209
+ "normalized": false,
1210
+ "rstrip": false,
1211
+ "single_word": false,
1212
+ "special": true
1213
+ },
1214
+ "128151": {
1215
+ "content": "<|reserved_special_token_143|>",
1216
+ "lstrip": false,
1217
+ "normalized": false,
1218
+ "rstrip": false,
1219
+ "single_word": false,
1220
+ "special": true
1221
+ },
1222
+ "128152": {
1223
+ "content": "<|reserved_special_token_144|>",
1224
+ "lstrip": false,
1225
+ "normalized": false,
1226
+ "rstrip": false,
1227
+ "single_word": false,
1228
+ "special": true
1229
+ },
1230
+ "128153": {
1231
+ "content": "<|reserved_special_token_145|>",
1232
+ "lstrip": false,
1233
+ "normalized": false,
1234
+ "rstrip": false,
1235
+ "single_word": false,
1236
+ "special": true
1237
+ },
1238
+ "128154": {
1239
+ "content": "<|reserved_special_token_146|>",
1240
+ "lstrip": false,
1241
+ "normalized": false,
1242
+ "rstrip": false,
1243
+ "single_word": false,
1244
+ "special": true
1245
+ },
1246
+ "128155": {
1247
+ "content": "<|reserved_special_token_147|>",
1248
+ "lstrip": false,
1249
+ "normalized": false,
1250
+ "rstrip": false,
1251
+ "single_word": false,
1252
+ "special": true
1253
+ },
1254
+ "128156": {
1255
+ "content": "<|reserved_special_token_148|>",
1256
+ "lstrip": false,
1257
+ "normalized": false,
1258
+ "rstrip": false,
1259
+ "single_word": false,
1260
+ "special": true
1261
+ },
1262
+ "128157": {
1263
+ "content": "<|reserved_special_token_149|>",
1264
+ "lstrip": false,
1265
+ "normalized": false,
1266
+ "rstrip": false,
1267
+ "single_word": false,
1268
+ "special": true
1269
+ },
1270
+ "128158": {
1271
+ "content": "<|reserved_special_token_150|>",
1272
+ "lstrip": false,
1273
+ "normalized": false,
1274
+ "rstrip": false,
1275
+ "single_word": false,
1276
+ "special": true
1277
+ },
1278
+ "128159": {
1279
+ "content": "<|reserved_special_token_151|>",
1280
+ "lstrip": false,
1281
+ "normalized": false,
1282
+ "rstrip": false,
1283
+ "single_word": false,
1284
+ "special": true
1285
+ },
1286
+ "128160": {
1287
+ "content": "<|reserved_special_token_152|>",
1288
+ "lstrip": false,
1289
+ "normalized": false,
1290
+ "rstrip": false,
1291
+ "single_word": false,
1292
+ "special": true
1293
+ },
1294
+ "128161": {
1295
+ "content": "<|reserved_special_token_153|>",
1296
+ "lstrip": false,
1297
+ "normalized": false,
1298
+ "rstrip": false,
1299
+ "single_word": false,
1300
+ "special": true
1301
+ },
1302
+ "128162": {
1303
+ "content": "<|reserved_special_token_154|>",
1304
+ "lstrip": false,
1305
+ "normalized": false,
1306
+ "rstrip": false,
1307
+ "single_word": false,
1308
+ "special": true
1309
+ },
1310
+ "128163": {
1311
+ "content": "<|reserved_special_token_155|>",
1312
+ "lstrip": false,
1313
+ "normalized": false,
1314
+ "rstrip": false,
1315
+ "single_word": false,
1316
+ "special": true
1317
+ },
1318
+ "128164": {
1319
+ "content": "<|reserved_special_token_156|>",
1320
+ "lstrip": false,
1321
+ "normalized": false,
1322
+ "rstrip": false,
1323
+ "single_word": false,
1324
+ "special": true
1325
+ },
1326
+ "128165": {
1327
+ "content": "<|reserved_special_token_157|>",
1328
+ "lstrip": false,
1329
+ "normalized": false,
1330
+ "rstrip": false,
1331
+ "single_word": false,
1332
+ "special": true
1333
+ },
1334
+ "128166": {
1335
+ "content": "<|reserved_special_token_158|>",
1336
+ "lstrip": false,
1337
+ "normalized": false,
1338
+ "rstrip": false,
1339
+ "single_word": false,
1340
+ "special": true
1341
+ },
1342
+ "128167": {
1343
+ "content": "<|reserved_special_token_159|>",
1344
+ "lstrip": false,
1345
+ "normalized": false,
1346
+ "rstrip": false,
1347
+ "single_word": false,
1348
+ "special": true
1349
+ },
1350
+ "128168": {
1351
+ "content": "<|reserved_special_token_160|>",
1352
+ "lstrip": false,
1353
+ "normalized": false,
1354
+ "rstrip": false,
1355
+ "single_word": false,
1356
+ "special": true
1357
+ },
1358
+ "128169": {
1359
+ "content": "<|reserved_special_token_161|>",
1360
+ "lstrip": false,
1361
+ "normalized": false,
1362
+ "rstrip": false,
1363
+ "single_word": false,
1364
+ "special": true
1365
+ },
1366
+ "128170": {
1367
+ "content": "<|reserved_special_token_162|>",
1368
+ "lstrip": false,
1369
+ "normalized": false,
1370
+ "rstrip": false,
1371
+ "single_word": false,
1372
+ "special": true
1373
+ },
1374
+ "128171": {
1375
+ "content": "<|reserved_special_token_163|>",
1376
+ "lstrip": false,
1377
+ "normalized": false,
1378
+ "rstrip": false,
1379
+ "single_word": false,
1380
+ "special": true
1381
+ },
1382
+ "128172": {
1383
+ "content": "<|reserved_special_token_164|>",
1384
+ "lstrip": false,
1385
+ "normalized": false,
1386
+ "rstrip": false,
1387
+ "single_word": false,
1388
+ "special": true
1389
+ },
1390
+ "128173": {
1391
+ "content": "<|reserved_special_token_165|>",
1392
+ "lstrip": false,
1393
+ "normalized": false,
1394
+ "rstrip": false,
1395
+ "single_word": false,
1396
+ "special": true
1397
+ },
1398
+ "128174": {
1399
+ "content": "<|reserved_special_token_166|>",
1400
+ "lstrip": false,
1401
+ "normalized": false,
1402
+ "rstrip": false,
1403
+ "single_word": false,
1404
+ "special": true
1405
+ },
1406
+ "128175": {
1407
+ "content": "<|reserved_special_token_167|>",
1408
+ "lstrip": false,
1409
+ "normalized": false,
1410
+ "rstrip": false,
1411
+ "single_word": false,
1412
+ "special": true
1413
+ },
1414
+ "128176": {
1415
+ "content": "<|reserved_special_token_168|>",
1416
+ "lstrip": false,
1417
+ "normalized": false,
1418
+ "rstrip": false,
1419
+ "single_word": false,
1420
+ "special": true
1421
+ },
1422
+ "128177": {
1423
+ "content": "<|reserved_special_token_169|>",
1424
+ "lstrip": false,
1425
+ "normalized": false,
1426
+ "rstrip": false,
1427
+ "single_word": false,
1428
+ "special": true
1429
+ },
1430
+ "128178": {
1431
+ "content": "<|reserved_special_token_170|>",
1432
+ "lstrip": false,
1433
+ "normalized": false,
1434
+ "rstrip": false,
1435
+ "single_word": false,
1436
+ "special": true
1437
+ },
1438
+ "128179": {
1439
+ "content": "<|reserved_special_token_171|>",
1440
+ "lstrip": false,
1441
+ "normalized": false,
1442
+ "rstrip": false,
1443
+ "single_word": false,
1444
+ "special": true
1445
+ },
1446
+ "128180": {
1447
+ "content": "<|reserved_special_token_172|>",
1448
+ "lstrip": false,
1449
+ "normalized": false,
1450
+ "rstrip": false,
1451
+ "single_word": false,
1452
+ "special": true
1453
+ },
1454
+ "128181": {
1455
+ "content": "<|reserved_special_token_173|>",
1456
+ "lstrip": false,
1457
+ "normalized": false,
1458
+ "rstrip": false,
1459
+ "single_word": false,
1460
+ "special": true
1461
+ },
1462
+ "128182": {
1463
+ "content": "<|reserved_special_token_174|>",
1464
+ "lstrip": false,
1465
+ "normalized": false,
1466
+ "rstrip": false,
1467
+ "single_word": false,
1468
+ "special": true
1469
+ },
1470
+ "128183": {
1471
+ "content": "<|reserved_special_token_175|>",
1472
+ "lstrip": false,
1473
+ "normalized": false,
1474
+ "rstrip": false,
1475
+ "single_word": false,
1476
+ "special": true
1477
+ },
1478
+ "128184": {
1479
+ "content": "<|reserved_special_token_176|>",
1480
+ "lstrip": false,
1481
+ "normalized": false,
1482
+ "rstrip": false,
1483
+ "single_word": false,
1484
+ "special": true
1485
+ },
1486
+ "128185": {
1487
+ "content": "<|reserved_special_token_177|>",
1488
+ "lstrip": false,
1489
+ "normalized": false,
1490
+ "rstrip": false,
1491
+ "single_word": false,
1492
+ "special": true
1493
+ },
1494
+ "128186": {
1495
+ "content": "<|reserved_special_token_178|>",
1496
+ "lstrip": false,
1497
+ "normalized": false,
1498
+ "rstrip": false,
1499
+ "single_word": false,
1500
+ "special": true
1501
+ },
1502
+ "128187": {
1503
+ "content": "<|reserved_special_token_179|>",
1504
+ "lstrip": false,
1505
+ "normalized": false,
1506
+ "rstrip": false,
1507
+ "single_word": false,
1508
+ "special": true
1509
+ },
1510
+ "128188": {
1511
+ "content": "<|reserved_special_token_180|>",
1512
+ "lstrip": false,
1513
+ "normalized": false,
1514
+ "rstrip": false,
1515
+ "single_word": false,
1516
+ "special": true
1517
+ },
1518
+ "128189": {
1519
+ "content": "<|reserved_special_token_181|>",
1520
+ "lstrip": false,
1521
+ "normalized": false,
1522
+ "rstrip": false,
1523
+ "single_word": false,
1524
+ "special": true
1525
+ },
1526
+ "128190": {
1527
+ "content": "<|reserved_special_token_182|>",
1528
+ "lstrip": false,
1529
+ "normalized": false,
1530
+ "rstrip": false,
1531
+ "single_word": false,
1532
+ "special": true
1533
+ },
1534
+ "128191": {
1535
+ "content": "<|reserved_special_token_183|>",
1536
+ "lstrip": false,
1537
+ "normalized": false,
1538
+ "rstrip": false,
1539
+ "single_word": false,
1540
+ "special": true
1541
+ },
1542
+ "128192": {
1543
+ "content": "<|reserved_special_token_184|>",
1544
+ "lstrip": false,
1545
+ "normalized": false,
1546
+ "rstrip": false,
1547
+ "single_word": false,
1548
+ "special": true
1549
+ },
1550
+ "128193": {
1551
+ "content": "<|reserved_special_token_185|>",
1552
+ "lstrip": false,
1553
+ "normalized": false,
1554
+ "rstrip": false,
1555
+ "single_word": false,
1556
+ "special": true
1557
+ },
1558
+ "128194": {
1559
+ "content": "<|reserved_special_token_186|>",
1560
+ "lstrip": false,
1561
+ "normalized": false,
1562
+ "rstrip": false,
1563
+ "single_word": false,
1564
+ "special": true
1565
+ },
1566
+ "128195": {
1567
+ "content": "<|reserved_special_token_187|>",
1568
+ "lstrip": false,
1569
+ "normalized": false,
1570
+ "rstrip": false,
1571
+ "single_word": false,
1572
+ "special": true
1573
+ },
1574
+ "128196": {
1575
+ "content": "<|reserved_special_token_188|>",
1576
+ "lstrip": false,
1577
+ "normalized": false,
1578
+ "rstrip": false,
1579
+ "single_word": false,
1580
+ "special": true
1581
+ },
1582
+ "128197": {
1583
+ "content": "<|reserved_special_token_189|>",
1584
+ "lstrip": false,
1585
+ "normalized": false,
1586
+ "rstrip": false,
1587
+ "single_word": false,
1588
+ "special": true
1589
+ },
1590
+ "128198": {
1591
+ "content": "<|reserved_special_token_190|>",
1592
+ "lstrip": false,
1593
+ "normalized": false,
1594
+ "rstrip": false,
1595
+ "single_word": false,
1596
+ "special": true
1597
+ },
1598
+ "128199": {
1599
+ "content": "<|reserved_special_token_191|>",
1600
+ "lstrip": false,
1601
+ "normalized": false,
1602
+ "rstrip": false,
1603
+ "single_word": false,
1604
+ "special": true
1605
+ },
1606
+ "128200": {
1607
+ "content": "<|reserved_special_token_192|>",
1608
+ "lstrip": false,
1609
+ "normalized": false,
1610
+ "rstrip": false,
1611
+ "single_word": false,
1612
+ "special": true
1613
+ },
1614
+ "128201": {
1615
+ "content": "<|reserved_special_token_193|>",
1616
+ "lstrip": false,
1617
+ "normalized": false,
1618
+ "rstrip": false,
1619
+ "single_word": false,
1620
+ "special": true
1621
+ },
1622
+ "128202": {
1623
+ "content": "<|reserved_special_token_194|>",
1624
+ "lstrip": false,
1625
+ "normalized": false,
1626
+ "rstrip": false,
1627
+ "single_word": false,
1628
+ "special": true
1629
+ },
1630
+ "128203": {
1631
+ "content": "<|reserved_special_token_195|>",
1632
+ "lstrip": false,
1633
+ "normalized": false,
1634
+ "rstrip": false,
1635
+ "single_word": false,
1636
+ "special": true
1637
+ },
1638
+ "128204": {
1639
+ "content": "<|reserved_special_token_196|>",
1640
+ "lstrip": false,
1641
+ "normalized": false,
1642
+ "rstrip": false,
1643
+ "single_word": false,
1644
+ "special": true
1645
+ },
1646
+ "128205": {
1647
+ "content": "<|reserved_special_token_197|>",
1648
+ "lstrip": false,
1649
+ "normalized": false,
1650
+ "rstrip": false,
1651
+ "single_word": false,
1652
+ "special": true
1653
+ },
1654
+ "128206": {
1655
+ "content": "<|reserved_special_token_198|>",
1656
+ "lstrip": false,
1657
+ "normalized": false,
1658
+ "rstrip": false,
1659
+ "single_word": false,
1660
+ "special": true
1661
+ },
1662
+ "128207": {
1663
+ "content": "<|reserved_special_token_199|>",
1664
+ "lstrip": false,
1665
+ "normalized": false,
1666
+ "rstrip": false,
1667
+ "single_word": false,
1668
+ "special": true
1669
+ },
1670
+ "128208": {
1671
+ "content": "<|reserved_special_token_200|>",
1672
+ "lstrip": false,
1673
+ "normalized": false,
1674
+ "rstrip": false,
1675
+ "single_word": false,
1676
+ "special": true
1677
+ },
1678
+ "128209": {
1679
+ "content": "<|reserved_special_token_201|>",
1680
+ "lstrip": false,
1681
+ "normalized": false,
1682
+ "rstrip": false,
1683
+ "single_word": false,
1684
+ "special": true
1685
+ },
1686
+ "128210": {
1687
+ "content": "<|reserved_special_token_202|>",
1688
+ "lstrip": false,
1689
+ "normalized": false,
1690
+ "rstrip": false,
1691
+ "single_word": false,
1692
+ "special": true
1693
+ },
1694
+ "128211": {
1695
+ "content": "<|reserved_special_token_203|>",
1696
+ "lstrip": false,
1697
+ "normalized": false,
1698
+ "rstrip": false,
1699
+ "single_word": false,
1700
+ "special": true
1701
+ },
1702
+ "128212": {
1703
+ "content": "<|reserved_special_token_204|>",
1704
+ "lstrip": false,
1705
+ "normalized": false,
1706
+ "rstrip": false,
1707
+ "single_word": false,
1708
+ "special": true
1709
+ },
1710
+ "128213": {
1711
+ "content": "<|reserved_special_token_205|>",
1712
+ "lstrip": false,
1713
+ "normalized": false,
1714
+ "rstrip": false,
1715
+ "single_word": false,
1716
+ "special": true
1717
+ },
1718
+ "128214": {
1719
+ "content": "<|reserved_special_token_206|>",
1720
+ "lstrip": false,
1721
+ "normalized": false,
1722
+ "rstrip": false,
1723
+ "single_word": false,
1724
+ "special": true
1725
+ },
1726
+ "128215": {
1727
+ "content": "<|reserved_special_token_207|>",
1728
+ "lstrip": false,
1729
+ "normalized": false,
1730
+ "rstrip": false,
1731
+ "single_word": false,
1732
+ "special": true
1733
+ },
1734
+ "128216": {
1735
+ "content": "<|reserved_special_token_208|>",
1736
+ "lstrip": false,
1737
+ "normalized": false,
1738
+ "rstrip": false,
1739
+ "single_word": false,
1740
+ "special": true
1741
+ },
1742
+ "128217": {
1743
+ "content": "<|reserved_special_token_209|>",
1744
+ "lstrip": false,
1745
+ "normalized": false,
1746
+ "rstrip": false,
1747
+ "single_word": false,
1748
+ "special": true
1749
+ },
1750
+ "128218": {
1751
+ "content": "<|reserved_special_token_210|>",
1752
+ "lstrip": false,
1753
+ "normalized": false,
1754
+ "rstrip": false,
1755
+ "single_word": false,
1756
+ "special": true
1757
+ },
1758
+ "128219": {
1759
+ "content": "<|reserved_special_token_211|>",
1760
+ "lstrip": false,
1761
+ "normalized": false,
1762
+ "rstrip": false,
1763
+ "single_word": false,
1764
+ "special": true
1765
+ },
1766
+ "128220": {
1767
+ "content": "<|reserved_special_token_212|>",
1768
+ "lstrip": false,
1769
+ "normalized": false,
1770
+ "rstrip": false,
1771
+ "single_word": false,
1772
+ "special": true
1773
+ },
1774
+ "128221": {
1775
+ "content": "<|reserved_special_token_213|>",
1776
+ "lstrip": false,
1777
+ "normalized": false,
1778
+ "rstrip": false,
1779
+ "single_word": false,
1780
+ "special": true
1781
+ },
1782
+ "128222": {
1783
+ "content": "<|reserved_special_token_214|>",
1784
+ "lstrip": false,
1785
+ "normalized": false,
1786
+ "rstrip": false,
1787
+ "single_word": false,
1788
+ "special": true
1789
+ },
1790
+ "128223": {
1791
+ "content": "<|reserved_special_token_215|>",
1792
+ "lstrip": false,
1793
+ "normalized": false,
1794
+ "rstrip": false,
1795
+ "single_word": false,
1796
+ "special": true
1797
+ },
1798
+ "128224": {
1799
+ "content": "<|reserved_special_token_216|>",
1800
+ "lstrip": false,
1801
+ "normalized": false,
1802
+ "rstrip": false,
1803
+ "single_word": false,
1804
+ "special": true
1805
+ },
1806
+ "128225": {
1807
+ "content": "<|reserved_special_token_217|>",
1808
+ "lstrip": false,
1809
+ "normalized": false,
1810
+ "rstrip": false,
1811
+ "single_word": false,
1812
+ "special": true
1813
+ },
1814
+ "128226": {
1815
+ "content": "<|reserved_special_token_218|>",
1816
+ "lstrip": false,
1817
+ "normalized": false,
1818
+ "rstrip": false,
1819
+ "single_word": false,
1820
+ "special": true
1821
+ },
1822
+ "128227": {
1823
+ "content": "<|reserved_special_token_219|>",
1824
+ "lstrip": false,
1825
+ "normalized": false,
1826
+ "rstrip": false,
1827
+ "single_word": false,
1828
+ "special": true
1829
+ },
1830
+ "128228": {
1831
+ "content": "<|reserved_special_token_220|>",
1832
+ "lstrip": false,
1833
+ "normalized": false,
1834
+ "rstrip": false,
1835
+ "single_word": false,
1836
+ "special": true
1837
+ },
1838
+ "128229": {
1839
+ "content": "<|reserved_special_token_221|>",
1840
+ "lstrip": false,
1841
+ "normalized": false,
1842
+ "rstrip": false,
1843
+ "single_word": false,
1844
+ "special": true
1845
+ },
1846
+ "128230": {
1847
+ "content": "<|reserved_special_token_222|>",
1848
+ "lstrip": false,
1849
+ "normalized": false,
1850
+ "rstrip": false,
1851
+ "single_word": false,
1852
+ "special": true
1853
+ },
1854
+ "128231": {
1855
+ "content": "<|reserved_special_token_223|>",
1856
+ "lstrip": false,
1857
+ "normalized": false,
1858
+ "rstrip": false,
1859
+ "single_word": false,
1860
+ "special": true
1861
+ },
1862
+ "128232": {
1863
+ "content": "<|reserved_special_token_224|>",
1864
+ "lstrip": false,
1865
+ "normalized": false,
1866
+ "rstrip": false,
1867
+ "single_word": false,
1868
+ "special": true
1869
+ },
1870
+ "128233": {
1871
+ "content": "<|reserved_special_token_225|>",
1872
+ "lstrip": false,
1873
+ "normalized": false,
1874
+ "rstrip": false,
1875
+ "single_word": false,
1876
+ "special": true
1877
+ },
1878
+ "128234": {
1879
+ "content": "<|reserved_special_token_226|>",
1880
+ "lstrip": false,
1881
+ "normalized": false,
1882
+ "rstrip": false,
1883
+ "single_word": false,
1884
+ "special": true
1885
+ },
1886
+ "128235": {
1887
+ "content": "<|reserved_special_token_227|>",
1888
+ "lstrip": false,
1889
+ "normalized": false,
1890
+ "rstrip": false,
1891
+ "single_word": false,
1892
+ "special": true
1893
+ },
1894
+ "128236": {
1895
+ "content": "<|reserved_special_token_228|>",
1896
+ "lstrip": false,
1897
+ "normalized": false,
1898
+ "rstrip": false,
1899
+ "single_word": false,
1900
+ "special": true
1901
+ },
1902
+ "128237": {
1903
+ "content": "<|reserved_special_token_229|>",
1904
+ "lstrip": false,
1905
+ "normalized": false,
1906
+ "rstrip": false,
1907
+ "single_word": false,
1908
+ "special": true
1909
+ },
1910
+ "128238": {
1911
+ "content": "<|reserved_special_token_230|>",
1912
+ "lstrip": false,
1913
+ "normalized": false,
1914
+ "rstrip": false,
1915
+ "single_word": false,
1916
+ "special": true
1917
+ },
1918
+ "128239": {
1919
+ "content": "<|reserved_special_token_231|>",
1920
+ "lstrip": false,
1921
+ "normalized": false,
1922
+ "rstrip": false,
1923
+ "single_word": false,
1924
+ "special": true
1925
+ },
1926
+ "128240": {
1927
+ "content": "<|reserved_special_token_232|>",
1928
+ "lstrip": false,
1929
+ "normalized": false,
1930
+ "rstrip": false,
1931
+ "single_word": false,
1932
+ "special": true
1933
+ },
1934
+ "128241": {
1935
+ "content": "<|reserved_special_token_233|>",
1936
+ "lstrip": false,
1937
+ "normalized": false,
1938
+ "rstrip": false,
1939
+ "single_word": false,
1940
+ "special": true
1941
+ },
1942
+ "128242": {
1943
+ "content": "<|reserved_special_token_234|>",
1944
+ "lstrip": false,
1945
+ "normalized": false,
1946
+ "rstrip": false,
1947
+ "single_word": false,
1948
+ "special": true
1949
+ },
1950
+ "128243": {
1951
+ "content": "<|reserved_special_token_235|>",
1952
+ "lstrip": false,
1953
+ "normalized": false,
1954
+ "rstrip": false,
1955
+ "single_word": false,
1956
+ "special": true
1957
+ },
1958
+ "128244": {
1959
+ "content": "<|reserved_special_token_236|>",
1960
+ "lstrip": false,
1961
+ "normalized": false,
1962
+ "rstrip": false,
1963
+ "single_word": false,
1964
+ "special": true
1965
+ },
1966
+ "128245": {
1967
+ "content": "<|reserved_special_token_237|>",
1968
+ "lstrip": false,
1969
+ "normalized": false,
1970
+ "rstrip": false,
1971
+ "single_word": false,
1972
+ "special": true
1973
+ },
1974
+ "128246": {
1975
+ "content": "<|reserved_special_token_238|>",
1976
+ "lstrip": false,
1977
+ "normalized": false,
1978
+ "rstrip": false,
1979
+ "single_word": false,
1980
+ "special": true
1981
+ },
1982
+ "128247": {
1983
+ "content": "<|reserved_special_token_239|>",
1984
+ "lstrip": false,
1985
+ "normalized": false,
1986
+ "rstrip": false,
1987
+ "single_word": false,
1988
+ "special": true
1989
+ },
1990
+ "128248": {
1991
+ "content": "<|reserved_special_token_240|>",
1992
+ "lstrip": false,
1993
+ "normalized": false,
1994
+ "rstrip": false,
1995
+ "single_word": false,
1996
+ "special": true
1997
+ },
1998
+ "128249": {
1999
+ "content": "<|reserved_special_token_241|>",
2000
+ "lstrip": false,
2001
+ "normalized": false,
2002
+ "rstrip": false,
2003
+ "single_word": false,
2004
+ "special": true
2005
+ },
2006
+ "128250": {
2007
+ "content": "<|reserved_special_token_242|>",
2008
+ "lstrip": false,
2009
+ "normalized": false,
2010
+ "rstrip": false,
2011
+ "single_word": false,
2012
+ "special": true
2013
+ },
2014
+ "128251": {
2015
+ "content": "<|reserved_special_token_243|>",
2016
+ "lstrip": false,
2017
+ "normalized": false,
2018
+ "rstrip": false,
2019
+ "single_word": false,
2020
+ "special": true
2021
+ },
2022
+ "128252": {
2023
+ "content": "<|reserved_special_token_244|>",
2024
+ "lstrip": false,
2025
+ "normalized": false,
2026
+ "rstrip": false,
2027
+ "single_word": false,
2028
+ "special": true
2029
+ },
2030
+ "128253": {
2031
+ "content": "<|reserved_special_token_245|>",
2032
+ "lstrip": false,
2033
+ "normalized": false,
2034
+ "rstrip": false,
2035
+ "single_word": false,
2036
+ "special": true
2037
+ },
2038
+ "128254": {
2039
+ "content": "<|reserved_special_token_246|>",
2040
+ "lstrip": false,
2041
+ "normalized": false,
2042
+ "rstrip": false,
2043
+ "single_word": false,
2044
+ "special": true
2045
+ },
2046
+ "128255": {
2047
+ "content": "<|reserved_special_token_247|>",
2048
+ "lstrip": false,
2049
+ "normalized": false,
2050
+ "rstrip": false,
2051
+ "single_word": false,
2052
+ "special": true
2053
+ }
2054
+ },
2055
+ "bos_token": "<|begin▁of▁sentence|>",
2056
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '</think>' in content %}{% set content = content.split('</think>')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|>'}}{% endif %}",
2057
+ "clean_up_tokenization_spaces": false,
2058
+ "eos_token": "<|end▁of▁sentence|>",
2059
+ "legacy": true,
2060
+ "model_max_length": 16384,
2061
+ "pad_token": "<|end▁of▁sentence|>",
2062
+ "sp_model_kwargs": {},
2063
+ "tokenizer_class": "LlamaTokenizer",
2064
+ "unk_token": null,
2065
+ "use_default_system_prompt": false
2066
+ }