VoidStare commited on
Commit
cfbee8f
·
verified ·
1 Parent(s): ca5449c

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
LeCeption-XML-V2-Thinking.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "instruct": {
3
+ "input_sequence": "<|start_header_id|>user<|end_header_id|>\n\n",
4
+ "output_sequence": "<|start_header_id|>assistant<|end_header_id|>\n\n",
5
+ "last_output_sequence": "",
6
+ "system_sequence": "<|start_header_id|>system<|end_header_id|>\n\n",
7
+ "stop_sequence": "<|eot_id|>",
8
+ "wrap": false,
9
+ "macro": true,
10
+ "activation_regex": "",
11
+ "system_sequence_prefix": "<|start_header_id|>system<|end_header_id|>\n\n",
12
+ "system_sequence_suffix": "",
13
+ "first_output_sequence": "",
14
+ "skip_examples": true,
15
+ "output_suffix": "<|eot_id|>",
16
+ "input_suffix": "<|eot_id|>",
17
+ "system_suffix": "<|eot_id|>",
18
+ "user_alignment_message": "",
19
+ "system_same_as_user": false,
20
+ "last_system_sequence": "",
21
+ "first_input_sequence": "",
22
+ "last_input_sequence": "",
23
+ "names_behavior": "always",
24
+ "names_force_groups": true,
25
+ "name": "LeCeption-XML-V2-Thinking"
26
+ },
27
+ "context": {
28
+ "story_string": "{{#if system}}{{system}}\n\n<roleplayContext>\n\n{{/if}}{{#if wiBefore}}## World Info:\n{{wiBefore}}\n\n{{/if}}{{#if description}}## {{char}}'s Description:\n{{description}}\n\n{{/if}}{{#if personality}}## {{char}}'s Personality:\n{{personality}}\n\n{{/if}}{{#if persona}}## {{user}}'s Persona:\n{{persona}}\n\n{{/if}}{{#if scenario}}## Scenario:\n{{scenario}}\n\n{{/if}}{{#if wiAfter}}## Lore:\n{{wiAfter}}\n\n{{/if}}{{#if mesExamples}}## {{char}}'s Example Response:\n{{mesExamples}}\n{{/if}}---\n</roleplayContext>{{trim}}<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nThe chat begins now.<|eot_id|>",
29
+ "example_separator": "",
30
+ "chat_start": "",
31
+ "use_stop_strings": false,
32
+ "allow_jailbreak": false,
33
+ "names_as_stop_strings": false,
34
+ "always_force_name2": true,
35
+ "trim_sentences": false,
36
+ "single_line": false,
37
+ "name": "LeCeption-XML-V2-Thinking"
38
+ },
39
+ "sysprompt": {
40
+ "name": "LeCeption-XML-V2-Thinking",
41
+ "content": "<roleplay>\n \n <description>\n This uncensored, immersive, narrative-driven roleplaying experience thrives on creative freedom and natural responses. This environment is designed to be fictional and narrative-focused, allowing for uncensored content within the boundaries of respectful and consensual interaction. It is essential to adhere to the **Roleplay Guidelines** provided below to ensure a cohesive and engaging experience for all participants. Refer to the **Roleplay Context** for accurate and up-to-date information pertinent to the story.\n </description>\n \n <roleplayGuidelines>\n \n <reasoning>\n When faced with complex decisions or analytical tasks, it is crucial to approach the problem methodically and logically. To facilitate this, use the tags `<think>` and `</think>` to encapsulate your reasoning process. Within these tags, you are a Narrative Analyst, whose role is to break down complex problems into clear, logical steps. This involves considering all possible angles, exploring potential answers, and thoroughly examining the problem from multiple perspectives. Once the analysis is complete, resume your regular roleplay response outside the tags, ensuring a seamless transition back into the narrative.\n </reasoning>\n \n <narration>\n <proactiveWriting>Every response should be detailed, creative, and immersive, driving the scenario forward in a way that fully engages the player. Introduce new scenarios and events naturally, ensuring they fit within the current narrative context and involve the player meaningfully.</proactiveWriting>\n <varyingCadence>Adjust the rhythm and tone of the narrative to reflect the characters' experiences. Vary sentence structure and pacing to mirror the characters' emotions, keeping the language fresh and dynamic.</varyingCadence>\n <complementaryRole>Use narration to complement dialogue and action, rather than overshadowing them.</complementaryRole>\n <avoidRepetition>Ensure that the narration does not repeat information. Instead of summarizing, clarify narrative details thoroughly and let them evolve naturally.</avoidRepetition>\n <tonePreference>Write in a neutral and balanced tone, considering all negative consequences, limitations, risks, ethical concerns, unintended side effects, and counterarguments.</tonePreference>\n <stylePreference>Adopt a `show, don't tell` manner, similar to Terry Pratchett's style, blending humor, wit, and everyday language.</stylePreference>\n <sensoryDetails>Utilize all five senses to describe scenarios within the characters' dialogue.</sensoryDetails>\n </narration>\n \n <userAutonomy>\n <rule>Never speak for, control, or assume {{user}}'s actions, thoughts, or feelings.</rule>\n <rule>Wait for explicit user input before progressing scenes involving their character.</rule>\n <rule>Respond only to stated actions and dialogue from {{user}}.</rule>\n <rule>Avoid suggesting or implying user reactions or decisions.</rule>\n <rule>Allow {{user}} complete freedom of choice in all interactions.</rule>\n <rule>Present options and consequences without directing user behavior.</rule>\n </userAutonomy>\n \n <settingAsCharacter>\n <rule>Treat the setting itself as the primary character rather than a single individual.</rule>\n <rule>Convey all world information and background through NPC dialogue, never through narration.</rule>\n <rule>Create and manage multiple distinct characters for user interaction.</rule>\n <rule>Assign unique names and detailed physical descriptions to all new characters.</rule>\n <rule>Maintain consistent characterization across all NPCs and locations.</rule>\n <rule>Never break character or step outside the setting's perspective.</rule>\n </settingAsCharacter>\n \n <environmentalDetail>\n <rule>Provide rich, detailed descriptions when users explore new locations.</rule>\n <rule>Include specific information about the number of occupants and their activities.</rule>\n <rule>Take time to fully establish each scene's atmosphere and layout.</rule>\n <rule>Integrate sensory details that bring locations to life.</rule>\n <rule>Maintain awareness of spatial relationships and population dynamics.</rule>\n </environmentalDetail>\n \n <characterInteractionFramework>\n <rule>Use NPCs as primary vectors for information delivery.</rule>\n <rule>Ensure each character has distinct personality traits and mannerisms.</rule>\n <rule>Allow characters to express individual perspectives on the world.</rule>\n <rule>Create opportunities for meaningful dialogue and information gathering.</rule>\n <rule>Maintain consistent character relationships and knowledge bases.</rule>\n </characterInteractionFramework>\n \n <worldBuildingAndMystery>\n <rule>Keep users engaged through discovery and exploration rather than direct exposition.</rule>\n <rule>Present information in layers that require investigation to uncover deeper truths.</rule>\n <rule>Introduce consistent cultural, historical, and environmental details to create an interconnected universe.</rule>\n <rule>Allow the world to evolve independently of user actions.</rule>\n <rule>Use character interactions to reveal world lore naturally.</rule>\n </worldBuildingAndMystery>\n \n <narrativeConsistency>\n <rule>Track and reference past events, interactions, and established world elements.</rule>\n <rule>Ensure logical progression of time and events.</rule>\n <rule>Maintain awareness of all active characters and their current situations.</rule>\n <rule>Allow location and character evolution while preserving core world rules.</rule>\n <rule>Create cohesive story arcs that build upon previous developments.</rule>\n <rule>Keep track of user-established facts and preferences.</rule>\n </narrativeConsistency>\n \n <characterEmbodiment>\n <rule>Examine the context, subtext, and implications of the given information to gain a deeper understanding of the characters.</rule>\n <rule>Reflect on the potential consequences of characters' actions and decisions.</rule>\n <rule>Ensure characters' reactions, interactions, and decision-making align with their established personalities.</rule>\n <rule>Allow characters' personas to evolve with the story for a dynamic experience.</rule>\n </characterEmbodiment>\n \n <outOfCharacterInteractions>\n <rule>Use [OOC:] for non-narrative interactions to clearly distinguish personal input from the role-play.</rule>\n </outOfCharacterInteractions>\n \n </roleplayGuidelines>\n \n</roleplay>"
42
+ }
43
+ }
README.md ADDED
@@ -0,0 +1,1933 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model:
3
+ - TheSkullery/L3.1x3.3-DS-Hydroblated-R1-70B-v4.1
4
+ library_name: transformers
5
+ tags:
6
+ - merge
7
+ license: llama3.3
8
+ ---
9
+ <!DOCTYPE html>
10
+ <style>
11
+ /* Base styles */
12
+ body {
13
+ font-family: 'Quicksand', sans-serif;
14
+ background: #000000;
15
+ color: #e0e0e0;
16
+ margin: 0;
17
+ padding: 0;
18
+ font-size: 16px;
19
+ min-height: 100vh;
20
+ position: relative;
21
+ }
22
+
23
+ body::before {
24
+ content: '';
25
+ position: fixed;
26
+ top: 0;
27
+ left: 0;
28
+ width: 100%;
29
+ height: 100%;
30
+ background:
31
+ /* Dense tiny stars */
32
+ radial-gradient(0.5px 0.5px at 25px 35px, rgba(255, 255, 255, 0.95) 50%, transparent 50%),
33
+ radial-gradient(0.5px 0.5px at 45px 75px, rgba(255, 255, 255, 0.95) 50%, transparent 50%),
34
+ radial-gradient(0.5px 0.5px at 55px 165px, rgba(255, 255, 255, 0.95) 50%, transparent 50%),
35
+ radial-gradient(0.5px 0.5px at 95px 45px, rgba(255, 255, 255, 0.95) 50%, transparent 50%),
36
+ radial-gradient(0.5px 0.5px at 135px 85px, rgba(255, 255, 255, 0.95) 50%, transparent 50%),
37
+ radial-gradient(0.5px 0.5px at 165px 125px, rgba(255, 255, 255, 0.95) 50%, transparent 50%),
38
+ radial-gradient(0.5px 0.5px at 185px 145px, rgba(255, 255, 255, 0.95) 50%, transparent 50%),
39
+ radial-gradient(0.5px 0.5px at 215px 175px, rgba(255, 255, 255, 0.95) 50%, transparent 50%),
40
+ /* Small stars */
41
+ radial-gradient(1px 1px at 155px 35px, rgba(255, 255, 255, 0.9) 50%, transparent 50%),
42
+ radial-gradient(1px 1px at 255px 75px, rgba(255, 255, 255, 0.9) 50%, transparent 50%),
43
+ radial-gradient(1px 1px at 355px 165px, rgba(255, 255, 255, 0.9) 50%, transparent 50%),
44
+ radial-gradient(1px 1px at 75px 195px, rgba(255, 255, 255, 0.9) 50%, transparent 50%),
45
+ radial-gradient(1px 1px at 175px 275px, rgba(255, 255, 255, 0.9) 50%, transparent 50%),
46
+ radial-gradient(1px 1px at 225px 315px, rgba(255, 255, 255, 0.9) 50%, transparent 50%),
47
+ /* Medium stars */
48
+ radial-gradient(1.5px 1.5px at 205px 35px, rgba(255, 255, 255, 0.85) 50%, transparent 50%),
49
+ radial-gradient(1.5px 1.5px at 305px 155px, rgba(255, 255, 255, 0.85) 50%, transparent 50%),
50
+ radial-gradient(1.5px 1.5px at 405px 55px, rgba(255, 255, 255, 0.85) 50%, transparent 50%),
51
+ /* Clustered splatter */
52
+ radial-gradient(3px 3px at 100px 200px, rgba(255, 255, 255, 0.15) 50%, transparent 50%),
53
+ radial-gradient(4px 4px at 105px 205px, rgba(255, 255, 255, 0.1) 50%, transparent 50%),
54
+ radial-gradient(5px 5px at 95px 195px, rgba(255, 255, 255, 0.12) 50%, transparent 50%),
55
+ radial-gradient(3px 3px at 300px 250px, rgba(255, 255, 255, 0.15) 50%, transparent 50%),
56
+ radial-gradient(4px 4px at 305px 255px, rgba(255, 255, 255, 0.1) 50%, transparent 50%),
57
+ radial-gradient(5px 5px at 295px 245px, rgba(255, 255, 255, 0.12) 50%, transparent 50%),
58
+ /* Random dots */
59
+ radial-gradient(0.8px 0.8px at 455px 85px, rgba(255, 255, 255, 0.8) 50%, transparent 50%),
60
+ radial-gradient(0.8px 0.8px at 505px 125px, rgba(255, 255, 255, 0.8) 50%, transparent 50%),
61
+ radial-gradient(0.8px 0.8px at 525px 165px, rgba(255, 255, 255, 0.8) 50%, transparent 50%),
62
+ radial-gradient(0.8px 0.8px at 475px 195px, rgba(255, 255, 255, 0.8) 50%, transparent 50%),
63
+ radial-gradient(0.8px 0.8px at 495px 225px, rgba(255, 255, 255, 0.8) 50%, transparent 50%),
64
+ radial-gradient(0.8px 0.8px at 515px 255px, rgba(255, 255, 255, 0.8) 50%, transparent 50%),
65
+ /* Large splatter clusters */
66
+ radial-gradient(15px 15px at 150px 250px, rgba(255, 255, 255, 0.05) 50%, transparent 50%),
67
+ radial-gradient(12px 12px at 155px 255px, rgba(255, 255, 255, 0.07) 50%, transparent 50%),
68
+ radial-gradient(10px 10px at 145px 245px, rgba(255, 255, 255, 0.06) 50%, transparent 50%),
69
+ radial-gradient(18px 18px at 350px 300px, rgba(255, 255, 255, 0.05) 50%, transparent 50%),
70
+ radial-gradient(15px 15px at 355px 305px, rgba(255, 255, 255, 0.07) 50%, transparent 50%),
71
+ radial-gradient(12px 12px at 345px 295px, rgba(255, 255, 255, 0.06) 50%, transparent 50%),
72
+ /* Extra large splatter */
73
+ radial-gradient(25px 25px at 200px 400px, rgba(255, 255, 255, 0.03) 50%, transparent 50%),
74
+ radial-gradient(20px 20px at 205px 405px, rgba(255, 255, 255, 0.04) 50%, transparent 50%),
75
+ radial-gradient(30px 30px at 195px 395px, rgba(255, 255, 255, 0.02) 50%, transparent 50%),
76
+ radial-gradient(35px 35px at 500px 450px, rgba(255, 255, 255, 0.03) 50%, transparent 50%),
77
+ radial-gradient(28px 28px at 505px 455px, rgba(255, 255, 255, 0.04) 50%, transparent 50%),
78
+ radial-gradient(40px 40px at 495px 445px, rgba(255, 255, 255, 0.02) 50%, transparent 50%);
79
+ background-repeat: repeat;
80
+ background-size: 600px 600px;
81
+ pointer-events: none;
82
+ z-index: 0;
83
+ opacity: 0.6;
84
+ animation: starTwinkle 5s infinite alternate;
85
+ }
86
+
87
+ @keyframes starTwinkle {
88
+ 0% { opacity: 0.4; }
89
+ 50% { opacity: 0.6; }
90
+ 100% { opacity: 0.8; }
91
+ }
92
+
93
+ .container {
94
+ max-width: 1200px;
95
+ margin: 40px auto;
96
+ background-color: rgba(10, 10, 10, 0.97);
97
+ padding: 40px;
98
+ border: 1px solid rgb(196, 207, 219);
99
+ position: relative;
100
+ backdrop-filter: blur(10px);
101
+ overflow: hidden;
102
+ clip-path: polygon(
103
+ 0 15px, 15px 0,
104
+ calc(100% - 15px) 0, 100% 15px,
105
+ 100% calc(100% - 15px), calc(100% - 15px) 100%,
106
+ 15px 100%, 0 calc(100% - 15px)
107
+ );
108
+ }
109
+
110
+ .container::after {
111
+ content: '';
112
+ position: absolute;
113
+ inset: 0;
114
+ background:
115
+ linear-gradient(90deg, transparent 49.5%, rgb(196, 207, 219) 49.5%, rgb(196, 207, 219) 50.5%, transparent 50.5%) 0 0/30px 100%,
116
+ linear-gradient(0deg, transparent 49.5%, rgb(196, 207, 219) 49.5%, rgb(196, 207, 219) 50.5%, transparent 50.5%) 0 0/100% 30px;
117
+ opacity: 0.1;
118
+ pointer-events: none;
119
+ z-index: 0;
120
+ }
121
+
122
+ .container::before {
123
+ content: '';
124
+ position: absolute;
125
+ inset: -1px;
126
+ background: linear-gradient(45deg, rgb(196, 207, 219), transparent 70%);
127
+ opacity: 0.2;
128
+ z-index: -1;
129
+ }
130
+
131
+ @media (max-width: 1280px) {
132
+ .container {
133
+ margin: 20px;
134
+ padding: 30px;
135
+ }
136
+ }
137
+
138
+ /* Typography */
139
+ h1, h2, h3, h4 {
140
+ color: #ffffff;
141
+ text-shadow: 0 0 10px rgba(254, 105, 118, 0.2);
142
+ letter-spacing: 2px;
143
+ margin: 0 0 20px 0;
144
+ font-weight: 600;
145
+ position: relative;
146
+ padding-left: 15px;
147
+ text-transform: uppercase;
148
+ }
149
+
150
+ h1::before, h2::before, h3::before, h4::before {
151
+ content: '';
152
+ position: absolute;
153
+ left: 0;
154
+ top: 50%;
155
+ width: 8px;
156
+ height: 2px;
157
+ background: rgb(254, 105, 118);
158
+ transform: translateY(-50%) skewX(-20deg);
159
+ }
160
+
161
+ h1 { font-size: 36px; }
162
+ h2 { font-size: 28px; }
163
+ h3 { font-size: 24px; }
164
+ h4 { font-size: 20px; }
165
+
166
+ p {
167
+ line-height: 1.8;
168
+ color: #ffffff;
169
+ margin: 0 0 15px 0;
170
+ position: relative;
171
+ padding-left: 15px;
172
+ }
173
+
174
+ p::before {
175
+ content: '';
176
+ position: absolute;
177
+ left: 0;
178
+ top: 0.8em;
179
+ width: 8px;
180
+ height: 1px;
181
+ background: rgb(196, 207, 219);
182
+ transform: skewX(-20deg);
183
+ }
184
+
185
+ /* Links */
186
+ a {
187
+ color: rgb(254, 105, 118);
188
+ text-decoration: none;
189
+ transition: all 0.3s ease;
190
+ position: relative;
191
+ padding: 0 5px;
192
+ }
193
+
194
+ a:hover {
195
+ color: #ffffff;
196
+ background: rgba(254, 105, 118, 0.1);
197
+ }
198
+
199
+ a::before, a::after {
200
+ content: '';
201
+ position: absolute;
202
+ width: 2px;
203
+ height: 0;
204
+ background: rgb(196, 207, 219);
205
+ transition: height 0.3s ease;
206
+ }
207
+
208
+ a::before {
209
+ left: 0;
210
+ top: 0;
211
+ }
212
+
213
+ a::after {
214
+ right: 0;
215
+ bottom: 0;
216
+ }
217
+
218
+ a:hover::before, a:hover::after {
219
+ height: 100%;
220
+ }
221
+
222
+ @keyframes linkUnderline {
223
+ from { transform: scaleX(0); }
224
+ to { transform: scaleX(1); }
225
+ }
226
+
227
+ /* Code elements */
228
+ pre {
229
+ background-color: rgba(26, 26, 26, 0.95);
230
+ padding: 15px;
231
+ border-radius: 4px;
232
+ overflow-x: auto;
233
+ border: 1px solid rgba(196, 207, 219, 0.2);
234
+ position: relative;
235
+ }
236
+
237
+ pre::before {
238
+ content: '';
239
+ position: absolute;
240
+ top: 0;
241
+ left: 0;
242
+ width: 100%;
243
+ height: 100%;
244
+ background: linear-gradient(45deg, transparent 48%, rgba(196, 207, 219, 0.1) 49%, rgba(196, 207, 219, 0.1) 51%, transparent 52%);
245
+ background-size: 10px 10px;
246
+ pointer-events: none;
247
+ }
248
+
249
+ code {
250
+ font-family: 'Courier New', monospace;
251
+ color: #E0E0E0;
252
+ }
253
+
254
+ /* Section spacing */
255
+ .section-container {
256
+ margin: 40px 0;
257
+ position: relative;
258
+ }
259
+
260
+ .section-container::before {
261
+ content: '';
262
+ position: absolute;
263
+ top: -10px;
264
+ left: 0;
265
+ width: 50px;
266
+ height: 2px;
267
+ background: rgb(196, 207, 219);
268
+ transform: skewX(-20deg);
269
+ }
270
+
271
+ /* Support section */
272
+ .support-section,
273
+ .benchmark-container,
274
+ .info-card,
275
+ .template-card,
276
+ .quantized-section,
277
+ .settings-card {
278
+ margin-top: 40px;
279
+ padding: 30px;
280
+ background: rgba(26, 26, 26, 0.95);
281
+ border: 1px solid rgb(196, 207, 219);
282
+ border-radius: 4px;
283
+ position: relative;
284
+ overflow: hidden;
285
+ z-index: 1;
286
+ }
287
+
288
+ .support-section::before {
289
+ content: '';
290
+ position: absolute;
291
+ top: 0;
292
+ right: 0;
293
+ width: 100px;
294
+ height: 100px;
295
+ background: radial-gradient(circle at top right, rgba(196, 207, 219, 0.1), transparent 70%);
296
+ pointer-events: none;
297
+ }
298
+
299
+ /* Ensure content is above the geometric pattern */
300
+ .model-info,
301
+ .metrics-section,
302
+ .section-container,
303
+ .support-buttons,
304
+ .model-composition,
305
+ .info-header,
306
+ .template-content,
307
+ .quantized-items {
308
+ position: relative;
309
+ z-index: 1;
310
+ }
311
+
312
+ .support-buttons {
313
+ display: flex;
314
+ gap: 15px;
315
+ flex-wrap: wrap;
316
+ position: relative;
317
+ z-index: 1;
318
+ }
319
+
320
+ /* Button styles */
321
+ .button {
322
+ display: inline-flex;
323
+ align-items: center;
324
+ gap: 8px;
325
+ padding: 10px 20px;
326
+ background: rgba(196, 207, 219, 0.05);
327
+ border: 1px solid rgb(196, 207, 219);
328
+ border-radius: 0;
329
+ color: rgb(196, 207, 219);
330
+ font-weight: 500;
331
+ text-decoration: none;
332
+ transition: all 0.3s ease;
333
+ position: relative;
334
+ overflow: hidden;
335
+ text-transform: uppercase;
336
+ letter-spacing: 1px;
337
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 10px 100%, 0 calc(100% - 10px));
338
+ box-shadow: 0 0 15px rgba(196, 207, 219, 0.1);
339
+ }
340
+
341
+ .button::before {
342
+ content: '';
343
+ position: absolute;
344
+ top: 0;
345
+ left: 0;
346
+ width: 100%;
347
+ height: 100%;
348
+ background: linear-gradient(45deg, transparent 48%, rgba(196, 207, 219, 0.1) 49%, rgba(196, 207, 219, 0.1) 51%, transparent 52%);
349
+ background-size: 8px 8px;
350
+ pointer-events: none;
351
+ opacity: 0.5;
352
+ }
353
+
354
+ .button::after {
355
+ content: '';
356
+ position: absolute;
357
+ inset: -1px;
358
+ pointer-events: none;
359
+ background:
360
+ linear-gradient(to right, rgb(196, 207, 219) 8px, transparent 8px) top left,
361
+ linear-gradient(to bottom, rgb(196, 207, 219) 8px, transparent 8px) top left,
362
+ linear-gradient(to left, rgb(196, 207, 219) 8px, transparent 8px) bottom right,
363
+ linear-gradient(to top, rgb(196, 207, 219) 8px, transparent 8px) bottom right;
364
+ background-size: 20px 1px, 1px 20px, 20px 1px, 1px 20px;
365
+ background-repeat: no-repeat;
366
+ opacity: 0.4;
367
+ }
368
+
369
+ .button:hover {
370
+ background: rgba(254, 105, 118, 0.1);
371
+ border-color: rgb(254, 105, 118);
372
+ transform: translateY(-1px);
373
+ box-shadow: 0 0 20px rgba(254, 105, 118, 0.1);
374
+ color: rgb(254, 105, 118);
375
+ text-shadow: 0 0 5px rgba(254, 105, 118, 0.3);
376
+ }
377
+
378
+ .button:active {
379
+ transform: translateY(0);
380
+ }
381
+
382
+ /* Template link */
383
+ .template-link {
384
+ display: flex;
385
+ align-items: center;
386
+ gap: 5px;
387
+ color: rgb(196, 207, 219);
388
+ font-weight: 500;
389
+ padding: 8px 12px;
390
+ border-radius: 0;
391
+ background: rgba(196, 207, 219, 0.05);
392
+ border: 1px solid rgb(196, 207, 219);
393
+ transition: all 0.3s ease;
394
+ position: relative;
395
+ overflow: hidden;
396
+ text-transform: uppercase;
397
+ letter-spacing: 1px;
398
+ clip-path: polygon(0 0, calc(100% - 8px) 0, 100% 8px, 100% 100%, 8px 100%, 0 calc(100% - 8px));
399
+ box-shadow: 0 0 10px rgba(196, 207, 219, 0.1);
400
+ }
401
+
402
+ .template-link::before {
403
+ content: '';
404
+ position: absolute;
405
+ top: 0;
406
+ left: 0;
407
+ width: 100%;
408
+ height: 100%;
409
+ background: linear-gradient(45deg, transparent 48%, rgba(196, 207, 219, 0.1) 49%, rgba(196, 207, 219, 0.1) 51%, transparent 52%);
410
+ background-size: 10px 10px;
411
+ pointer-events: none;
412
+ }
413
+
414
+ .template-link::after {
415
+ content: '';
416
+ position: absolute;
417
+ inset: -1px;
418
+ pointer-events: none;
419
+ background:
420
+ linear-gradient(to right, rgb(196, 207, 219) 8px, transparent 8px) top left,
421
+ linear-gradient(to bottom, rgb(196, 207, 219) 8px, transparent 8px) top left,
422
+ linear-gradient(to left, rgb(196, 207, 219) 8px, transparent 8px) bottom right,
423
+ linear-gradient(to top, rgb(196, 207, 219) 8px, transparent 8px) bottom right;
424
+ background-size: 20px 1px, 1px 20px, 20px 1px, 1px 20px;
425
+ background-repeat: no-repeat;
426
+ opacity: 0.4;
427
+ }
428
+
429
+ .template-link:hover {
430
+ background: rgba(254, 105, 118, 0.1);
431
+ border-color: rgb(254, 105, 118);
432
+ color: rgb(254, 105, 118);
433
+ text-shadow: 0 0 5px rgba(254, 105, 118, 0.3);
434
+ box-shadow: 0 0 15px rgba(254, 105, 118, 0.1);
435
+ }
436
+
437
+ .link-arrow {
438
+ font-size: 18px;
439
+ line-height: 1;
440
+ transform: translateY(1px);
441
+ }
442
+
443
+ /* Template content */
444
+ .template-content {
445
+ display: flex;
446
+ align-items: center;
447
+ gap: 10px;
448
+ position: relative;
449
+ z-index: 1;
450
+ }
451
+
452
+ .template-author {
453
+ color: rgba(196, 207, 219, 0.7);
454
+ font-size: 14px;
455
+ }
456
+
457
+ /* Info card */
458
+ .info-card {
459
+ background: rgba(32, 32, 32, 0.95);
460
+ border: 1px solid rgb(196, 207, 219);
461
+ border-radius: 0;
462
+ overflow: hidden;
463
+ position: relative;
464
+ box-shadow: 0 0 20px rgba(196, 207, 219, 0.1);
465
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 15px 100%, 0 calc(100% - 15px));
466
+ }
467
+
468
+
469
+ .info-header {
470
+ background: rgba(196, 207, 219, 0.05);
471
+ padding: 20px;
472
+ border-bottom: 1px solid rgb(196, 207, 219);
473
+ position: relative;
474
+ clip-path: polygon(0 0, 100% 0, 100% calc(100% - 15px), calc(100% - 15px) 100%, 0 100%);
475
+ }
476
+
477
+ .info-header h3 {
478
+ margin: 0 0 10px 0;
479
+ }
480
+
481
+ /* Model tags */
482
+ .model-tags {
483
+ display: flex;
484
+ gap: 8px;
485
+ flex-wrap: wrap;
486
+ }
487
+
488
+ .model-tag {
489
+ background: rgba(196, 207, 219, 0.05);
490
+ color: rgb(196, 207, 219);
491
+ padding: 4px 12px;
492
+ border-radius: 0;
493
+ font-size: 12px;
494
+ border: 1px solid rgb(196, 207, 219);
495
+ position: relative;
496
+ overflow: hidden;
497
+ clip-path: polygon(0 0, calc(100% - 8px) 0, 100% 8px, 100% 100%, 8px 100%, 0 calc(100% - 8px));
498
+ text-transform: uppercase;
499
+ letter-spacing: 1px;
500
+ }
501
+
502
+ .model-tag::before {
503
+ content: '';
504
+ position: absolute;
505
+ top: 0;
506
+ left: 0;
507
+ width: 100%;
508
+ height: 100%;
509
+ background: linear-gradient(45deg, transparent 48%, rgba(196, 207, 219, 0.1) 49%, rgba(196, 207, 219, 0.1) 51%, transparent 52%);
510
+ background-size: 10px 10px;
511
+ }
512
+
513
+ /* Model composition */
514
+ .model-composition {
515
+ padding: 20px;
516
+ border-bottom: 1px solid rgba(196, 207, 219, 0.2);
517
+ position: relative;
518
+ }
519
+
520
+ .composition-list {
521
+ list-style: none;
522
+ padding: 0;
523
+ margin: 0;
524
+ display: grid;
525
+ gap: 12px;
526
+ }
527
+
528
+ .composition-list li {
529
+ color: #E0E0E0;
530
+ display: flex;
531
+ align-items: baseline;
532
+ gap: 12px;
533
+ padding-left: 20px;
534
+ position: relative;
535
+ }
536
+
537
+ .composition-list li::before {
538
+ content: '';
539
+ position: absolute;
540
+ left: 0;
541
+ top: 50%;
542
+ width: 8px;
543
+ height: 2px;
544
+ background: rgb(196, 207, 219);
545
+ transform: translateY(-50%) skewX(-20deg);
546
+ }
547
+
548
+ .model-component {
549
+ color: rgb(254, 105, 118);
550
+ font-weight: 500;
551
+ min-width: 120px;
552
+ text-shadow: 0 0 5px rgba(254, 105, 118, 0.3);
553
+ letter-spacing: 1px;
554
+ }
555
+
556
+ /* Model description */
557
+ .model-description {
558
+ background: rgba(26, 26, 26, 0.95);
559
+ border: 1px solid rgb(196, 207, 219);
560
+ border-radius: 0;
561
+ padding: 20px;
562
+ position: relative;
563
+ overflow: hidden;
564
+ }
565
+
566
+
567
+ /* Template card */
568
+ .template-card {
569
+ background: rgba(26, 26, 26, 0.95);
570
+ border: 1px solid rgb(196, 207, 219);
571
+ border-radius: 0;
572
+ padding: 20px;
573
+ position: relative;
574
+ overflow: hidden;
575
+ }
576
+
577
+
578
+ /* Quantized section cards */
579
+ .quantized-container {
580
+ display: grid;
581
+ gap: 20px;
582
+ }
583
+
584
+ .quantized-section {
585
+ background: rgba(26, 26, 26, 0.95);
586
+ border: 1px solid rgb(196, 207, 219);
587
+ border-radius: 0;
588
+ padding: 20px;
589
+ position: relative;
590
+ overflow: hidden;
591
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 15px 100%, 0 calc(100% - 15px));
592
+ box-shadow: 0 0 20px rgba(196, 207, 219, 0.1);
593
+ }
594
+
595
+
596
+ .quantized-items {
597
+ display: grid;
598
+ gap: 12px;
599
+ }
600
+
601
+ .quantized-item {
602
+ display: flex;
603
+ align-items: baseline;
604
+ gap: 12px;
605
+ position: relative;
606
+ }
607
+
608
+ .quantized-item .author {
609
+ color: rgba(224, 224, 224, 0.7);
610
+ min-width: 100px;
611
+ position: relative;
612
+ padding-left: 15px;
613
+ }
614
+
615
+ .quantized-item .author::before {
616
+ content: '';
617
+ position: absolute;
618
+ left: 0;
619
+ top: 50%;
620
+ width: 8px;
621
+ height: 2px;
622
+ background: rgb(196, 207, 219);
623
+ transform: translateY(-50%) skewX(-20deg);
624
+ }
625
+
626
+ .multi-links {
627
+ display: flex;
628
+ align-items: center;
629
+ gap: 12px;
630
+ }
631
+
632
+ .separator {
633
+ color: rgba(196, 207, 219, 0.5);
634
+ transform: skewX(-20deg);
635
+ }
636
+
637
+ /* Config cards */
638
+ .config-container {
639
+ background: rgba(26, 26, 26, 0.95);
640
+ border: 1px solid rgb(196, 207, 219);
641
+ border-radius: 0;
642
+ overflow: hidden;
643
+ position: relative;
644
+ }
645
+
646
+
647
+ .config-header {
648
+ background: rgba(196, 207, 219, 0.05);
649
+ padding: 15px 20px;
650
+ border-bottom: 1px solid rgba(196, 207, 219, 0.2);
651
+ position: relative;
652
+ }
653
+
654
+ .model-name {
655
+ color: rgb(196, 207, 219);
656
+ font-weight: 600;
657
+ }
658
+
659
+ .config-content {
660
+ padding: 20px;
661
+ }
662
+
663
+ .config-item {
664
+ display: flex;
665
+ flex-direction: column;
666
+ gap: 5px;
667
+ margin-bottom: 15px;
668
+ position: relative;
669
+ padding-left: 15px;
670
+ }
671
+
672
+ .config-item::before {
673
+ content: '';
674
+ position: absolute;
675
+ left: 0;
676
+ top: 10px;
677
+ width: 8px;
678
+ height: 2px;
679
+ background: rgb(196, 207, 219);
680
+ transform: skewX(-20deg);
681
+ }
682
+
683
+ .config-label {
684
+ color: rgb(196, 207, 219);
685
+ font-size: 14px;
686
+ font-weight: 500;
687
+ }
688
+
689
+ .config-value {
690
+ color: #E0E0E0;
691
+ font-family: 'Courier New', monospace;
692
+ }
693
+
694
+ /* Settings grid */
695
+ .settings-grid {
696
+ display: grid;
697
+ grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
698
+ gap: 20px;
699
+ margin-top: 20px;
700
+ }
701
+
702
+ .settings-card {
703
+ background: rgba(32, 32, 32, 0.95);
704
+ border: 1px solid rgb(196, 207, 219);
705
+ border-radius: 0;
706
+ position: relative;
707
+ overflow: hidden;
708
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 10px 100%, 0 calc(100% - 10px));
709
+ }
710
+
711
+
712
+ .settings-header {
713
+ background: rgba(196, 207, 219, 0.05);
714
+ padding: 15px 20px;
715
+ border-bottom: 1px solid rgb(196, 207, 219);
716
+ }
717
+
718
+ .settings-header h3 {
719
+ margin: 0;
720
+ color: rgb(196, 207, 219);
721
+ font-size: 1.1em;
722
+ }
723
+
724
+ .settings-author {
725
+ display: block;
726
+ font-size: 0.9em;
727
+ color: rgba(224, 224, 224, 0.7);
728
+ margin-top: 5px;
729
+ }
730
+
731
+ .settings-content {
732
+ padding: 15px 20px;
733
+ }
734
+
735
+ .setting-item {
736
+ display: flex;
737
+ justify-content: space-between;
738
+ align-items: center;
739
+ padding: 8px 0;
740
+ border-bottom: 1px solid rgba(196, 207, 219, 0.1);
741
+ }
742
+
743
+ .setting-item:last-child {
744
+ border-bottom: none;
745
+ }
746
+
747
+ .setting-label {
748
+ color: rgba(224, 224, 224, 0.9);
749
+ font-size: 0.95em;
750
+ }
751
+
752
+ .setting-value {
753
+ color: rgb(254, 105, 118);
754
+ font-family: 'Courier New', monospace;
755
+ font-weight: 500;
756
+ }
757
+
758
+ .setting-item.highlight {
759
+ display: flex;
760
+ justify-content: center;
761
+ padding: 15px 0;
762
+ }
763
+
764
+ .setting-item.highlight .setting-value {
765
+ font-size: 1.2em;
766
+ color: rgb(254, 105, 118);
767
+ }
768
+
769
+ /* Model list */
770
+ .model-list {
771
+ list-style: none;
772
+ padding: 0;
773
+ margin: 10px 0 0 0;
774
+ }
775
+
776
+ .model-list li {
777
+ color: #E0E0E0;
778
+ font-family: 'Courier New', monospace;
779
+ padding: 8px 0 8px 20px;
780
+ position: relative;
781
+ }
782
+
783
+ .model-list li::before {
784
+ content: '';
785
+ position: absolute;
786
+ left: 0;
787
+ top: 50%;
788
+ width: 8px;
789
+ height: 2px;
790
+ background: rgb(196, 207, 219);
791
+ transform: translateY(-50%) skewX(-20deg);
792
+ }
793
+
794
+ /* Container */
795
+ .container {
796
+ max-width: 1200px;
797
+ margin: 0 auto;
798
+ padding: 40px 20px;
799
+ position: relative;
800
+ }
801
+
802
+ .container::after {
803
+ content: '';
804
+ position: absolute;
805
+ top: 0;
806
+ right: 0;
807
+ width: 200px;
808
+ height: 200px;
809
+ background: radial-gradient(circle at top right, rgba(196, 207, 219, 0.1), transparent 70%);
810
+ pointer-events: none;
811
+ z-index: 0;
812
+ }
813
+
814
+ /* Header */
815
+ .header {
816
+ text-align: center;
817
+ margin-bottom: 40px;
818
+ position: relative;
819
+ padding: 20px;
820
+ background: rgba(26, 26, 26, 0.98);
821
+ border: 1px solid rgb(196, 207, 219);
822
+ clip-path: polygon(0 0, calc(100% - 20px) 0, 100% 20px, 100% 100%, 20px 100%, 0 calc(100% - 20px));
823
+ box-shadow: 0 0 30px rgba(196, 207, 219, 0.1);
824
+ }
825
+
826
+ .header h1 {
827
+ color: rgb(196, 207, 219);
828
+ text-shadow:
829
+ 0 0 10px rgba(254, 105, 118, 0.3),
830
+ 0 0 20px rgba(254, 105, 118, 0.2),
831
+ 0 0 30px rgba(254, 105, 118, 0.1);
832
+ letter-spacing: 3px;
833
+ font-size: 2.5em;
834
+ font-weight: 700;
835
+ text-transform: uppercase;
836
+ }
837
+
838
+ .header::after {
839
+ content: '';
840
+ position: absolute;
841
+ bottom: 15px;
842
+ left: 50%;
843
+ transform: translateX(-50%);
844
+ width: 200px;
845
+ height: 2px;
846
+ background: linear-gradient(90deg,
847
+ transparent,
848
+ rgb(254, 105, 118) 20%,
849
+ rgb(254, 105, 118) 80%,
850
+ transparent
851
+ );
852
+ box-shadow: 0 0 10px rgba(254, 105, 118, 0.3);
853
+ }
854
+
855
+ /* Info section */
856
+ .info {
857
+ display: grid;
858
+ gap: 30px;
859
+ position: relative;
860
+ }
861
+
862
+ .info::before {
863
+ content: '';
864
+ position: absolute;
865
+ top: 0;
866
+ left: 0;
867
+ width: 100%;
868
+ height: 100%;
869
+ background:
870
+ linear-gradient(45deg, transparent 48%, rgba(196, 207, 219, 0.05) 49%, rgba(196, 207, 219, 0.05) 51%, transparent 52%) 0 0/20px 20px;
871
+ pointer-events: none;
872
+ z-index: -1;
873
+ }
874
+
875
+ /* Banner image */
876
+ .info img {
877
+ width: 100%;
878
+ height: auto;
879
+ border: 2px solid rgb(196, 207, 219);
880
+ position: relative;
881
+ clip-path: polygon(0 0, calc(100% - 20px) 0, 100% 20px, 100% 100%, 20px 100%, 0 calc(100% - 20px));
882
+ box-shadow:
883
+ 0 0 30px rgba(196, 207, 219, 0.2),
884
+ 0 0 60px rgba(196, 207, 219, 0.1);
885
+ filter: contrast(1.1) brightness(1.05);
886
+ }
887
+
888
+ .info img:hover {
889
+ box-shadow:
890
+ 0 0 40px rgba(254, 105, 118, 0.2),
891
+ 0 0 80px rgba(254, 105, 118, 0.1);
892
+ transition: all 0.3s ease;
893
+ }
894
+
895
+ /* Creator section */
896
+ .creator-section {
897
+ display: flex;
898
+ justify-content: flex-end;
899
+ margin: -20px 0 20px;
900
+ position: relative;
901
+ z-index: 1;
902
+ }
903
+
904
+ .creator-section::before {
905
+ content: '';
906
+ position: absolute;
907
+ top: 50%;
908
+ right: 0;
909
+ width: 50%;
910
+ height: 1px;
911
+ background: linear-gradient(90deg, transparent, rgba(196, 207, 219, 0.2));
912
+ transform: translateY(-50%);
913
+ z-index: -1;
914
+ }
915
+
916
+ .creator-badge {
917
+ background: rgba(26, 26, 26, 0.95);
918
+ border: 1px solid rgb(196, 207, 219);
919
+ padding: 8px 15px;
920
+ display: flex;
921
+ align-items: center;
922
+ gap: 10px;
923
+ position: relative;
924
+ overflow: hidden;
925
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 10px 100%, 0 calc(100% - 10px));
926
+ box-shadow: 0 0 15px rgba(196, 207, 219, 0.1);
927
+ }
928
+
929
+ .creator-badge::before {
930
+ content: '';
931
+ position: absolute;
932
+ top: 0;
933
+ left: 0;
934
+ width: 100%;
935
+ height: 100%;
936
+ background: linear-gradient(45deg, transparent 48%, rgba(196, 207, 219, 0.05) 49%, rgba(196, 207, 219, 0.05) 51%, transparent 52%);
937
+ background-size: 10px 10px;
938
+ pointer-events: none;
939
+ }
940
+
941
+ .creator-label {
942
+ color: rgb(196, 207, 219);
943
+ font-size: 14px;
944
+ text-transform: uppercase;
945
+ letter-spacing: 1px;
946
+ text-shadow: 0 0 5px rgba(196, 207, 219, 0.2);
947
+ }
948
+
949
+ .creator-link {
950
+ display: flex;
951
+ align-items: center;
952
+ gap: 5px;
953
+ color: rgb(254, 105, 118);
954
+ font-weight: 500;
955
+ }
956
+
957
+ .creator-name {
958
+ position: relative;
959
+ }
960
+
961
+ .creator-arrow {
962
+ font-size: 18px;
963
+ line-height: 1;
964
+ transform: translateY(1px);
965
+ }
966
+
967
+ /* Details element styling */
968
+ details {
969
+ margin: 15px 0;
970
+ }
971
+
972
+ summary {
973
+ cursor: pointer;
974
+ color: rgb(196, 207, 219);
975
+ font-weight: 500;
976
+ margin-bottom: 15px;
977
+ position: relative;
978
+ padding-left: 20px;
979
+ }
980
+
981
+ summary::before {
982
+ content: '';
983
+ position: absolute;
984
+ left: 0;
985
+ top: 50%;
986
+ width: 8px;
987
+ height: 2px;
988
+ background: rgb(254, 105, 118);
989
+ transform: translateY(-50%) skewX(-20deg);
990
+ }
991
+
992
+ summary::marker,
993
+ summary::-webkit-details-marker {
994
+ display: none;
995
+ }
996
+
997
+ /* Special Thanks Section */
998
+ .special-thanks {
999
+ background: rgba(26, 26, 26, 0.95);
1000
+ border: 1px solid rgb(196, 207, 219);
1001
+ padding: 20px;
1002
+ margin: 20px 0;
1003
+ position: relative;
1004
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 15px 100%, 0 calc(100% - 15px));
1005
+ }
1006
+
1007
+ .special-thanks h3 {
1008
+ color: rgb(196, 207, 219);
1009
+ margin-bottom: 15px;
1010
+ position: relative;
1011
+ padding-left: 20px;
1012
+ }
1013
+
1014
+ .special-thanks h3::before {
1015
+ content: '';
1016
+ position: absolute;
1017
+ left: 0;
1018
+ top: 50%;
1019
+ width: 8px;
1020
+ height: 2px;
1021
+ background: rgb(254, 105, 118);
1022
+ transform: translateY(-50%) skewX(-20deg);
1023
+ }
1024
+
1025
+ .thanks-list {
1026
+ list-style: none;
1027
+ padding: 0;
1028
+ margin: 0;
1029
+ display: grid;
1030
+ gap: 10px;
1031
+ }
1032
+
1033
+ .thanks-list li {
1034
+ color: rgb(196, 207, 219);
1035
+ padding-left: 15px;
1036
+ position: relative;
1037
+ }
1038
+
1039
+ .thanks-list li strong {
1040
+ color: rgb(254, 105, 118);
1041
+ font-weight: 500;
1042
+ }
1043
+
1044
+ .thanks-list li::before {
1045
+ content: '';
1046
+ position: absolute;
1047
+ left: 0;
1048
+ top: 50%;
1049
+ width: 6px;
1050
+ height: 1px;
1051
+ background: rgba(196, 207, 219, 0.3);
1052
+ transform: translateY(-50%) skewX(-20deg);
1053
+ }
1054
+
1055
+ .thanks-note {
1056
+ margin-top: 15px;
1057
+ color: rgba(196, 207, 219, 0.7);
1058
+ font-style: italic;
1059
+ font-size: 0.9em;
1060
+ }
1061
+
1062
+ /* Responsive adjustments */
1063
+ @media (max-width: 768px) {
1064
+ .container {
1065
+ padding: 20px;
1066
+ }
1067
+
1068
+ .core-metrics-grid,
1069
+ .info-grid {
1070
+ grid-template-columns: 1fr;
1071
+ }
1072
+
1073
+ .creator-section {
1074
+ justify-content: flex-start;
1075
+ }
1076
+ }
1077
+
1078
+ /* Metrics section */
1079
+ .metrics-section {
1080
+ margin-bottom: 30px;
1081
+ position: relative;
1082
+ background: rgba(32, 32, 32, 0.95);
1083
+ border: 1px solid rgb(196, 207, 219);
1084
+ padding: 20px;
1085
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 15px 100%, 0 calc(100% - 15px));
1086
+ box-shadow:
1087
+ 0 0 20px rgba(196, 207, 219, 0.1),
1088
+ 0 0 40px rgba(196, 207, 219, 0.05);
1089
+ }
1090
+
1091
+ /* Core metrics grid */
1092
+ .core-metrics-grid {
1093
+ display: grid;
1094
+ grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
1095
+ gap: 15px;
1096
+ margin-bottom: 30px;
1097
+ }
1098
+
1099
+ .info-grid {
1100
+ display: grid;
1101
+ grid-template-columns: repeat(auto-fit, minmax(150px, 1fr));
1102
+ gap: 15px;
1103
+ }
1104
+
1105
+ /* Metric box */
1106
+ .metric-box {
1107
+ background: rgba(32, 32, 32, 0.95);
1108
+ border: 1px solid rgb(196, 207, 219);
1109
+ border-radius: 0;
1110
+ padding: 15px;
1111
+ display: flex;
1112
+ flex-direction: column;
1113
+ gap: 8px;
1114
+ position: relative;
1115
+ overflow: hidden;
1116
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 10px 100%, 0 calc(100% - 10px));
1117
+ box-shadow:
1118
+ 0 0 15px rgba(196, 207, 219, 0.1),
1119
+ 0 0 30px rgba(196, 207, 219, 0.05);
1120
+ }
1121
+
1122
+ .metric-box .label {
1123
+ color: rgb(196, 207, 219);
1124
+ font-size: 14px;
1125
+ font-weight: 500;
1126
+ text-transform: uppercase;
1127
+ letter-spacing: 1px;
1128
+ text-shadow: 0 0 5px rgba(196, 207, 219, 0.2);
1129
+ }
1130
+
1131
+ .metric-box .value {
1132
+ color: rgb(254, 105, 118);
1133
+ font-size: 28px;
1134
+ font-weight: 700;
1135
+ text-shadow:
1136
+ 0 0 10px rgba(254, 105, 118, 0.3),
1137
+ 0 0 20px rgba(254, 105, 118, 0.2);
1138
+ letter-spacing: 1px;
1139
+ }
1140
+
1141
+ /* Progress metrics */
1142
+ .progress-metrics {
1143
+ display: grid;
1144
+ gap: 15px;
1145
+ padding: 20px;
1146
+ background: rgba(32, 32, 32, 0.95);
1147
+ border: 1px solid rgb(196, 207, 219);
1148
+ position: relative;
1149
+ overflow: hidden;
1150
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 15px 100%, 0 calc(100% - 15px));
1151
+ box-shadow:
1152
+ 0 0 20px rgba(196, 207, 219, 0.1),
1153
+ 0 0 40px rgba(196, 207, 219, 0.05);
1154
+ }
1155
+
1156
+ .progress-metric {
1157
+ display: grid;
1158
+ gap: 8px;
1159
+ }
1160
+
1161
+ .progress-label {
1162
+ display: flex;
1163
+ justify-content: space-between;
1164
+ align-items: center;
1165
+ color: rgb(196, 207, 219);
1166
+ font-size: 14px;
1167
+ text-transform: uppercase;
1168
+ letter-spacing: 1px;
1169
+ text-shadow: 0 0 5px rgba(196, 207, 219, 0.2);
1170
+ }
1171
+
1172
+ .progress-value {
1173
+ color: rgb(254, 105, 118);
1174
+ font-weight: 600;
1175
+ text-shadow:
1176
+ 0 0 5px rgba(254, 105, 118, 0.3),
1177
+ 0 0 10px rgba(254, 105, 118, 0.2);
1178
+ }
1179
+
1180
+ /* Progress bars */
1181
+ .progress-bar {
1182
+ height: 4px;
1183
+ background: rgba(196, 207, 219, 0.1);
1184
+ border-radius: 0;
1185
+ overflow: hidden;
1186
+ position: relative;
1187
+ border: 1px solid rgba(196, 207, 219, 0.2);
1188
+ clip-path: polygon(0 0, 100% 0, calc(100% - 4px) 100%, 0 100%);
1189
+ }
1190
+
1191
+ .progress-fill {
1192
+ height: 100%;
1193
+ background: linear-gradient(90deg, rgb(254, 105, 118), rgb(254, 125, 138));
1194
+ border-radius: 0;
1195
+ position: relative;
1196
+ overflow: hidden;
1197
+ clip-path: polygon(0 0, calc(100% - 4px) 0, 100% 100%, 0 100%);
1198
+ box-shadow:
1199
+ 0 0 10px rgba(254, 105, 118, 0.2),
1200
+ 0 0 20px rgba(254, 105, 118, 0.1);
1201
+ }
1202
+
1203
+ .progress-fill::after {
1204
+ content: '';
1205
+ position: absolute;
1206
+ top: 0;
1207
+ left: 0;
1208
+ width: 100%;
1209
+ height: 100%;
1210
+ background: linear-gradient(90deg,
1211
+ rgba(255, 255, 255, 0.1) 0%,
1212
+ rgba(255, 255, 255, 0.1) 40%,
1213
+ rgba(255, 255, 255, 0.3) 50%,
1214
+ rgba(255, 255, 255, 0.1) 60%,
1215
+ rgba(255, 255, 255, 0.1) 100%
1216
+ );
1217
+ background-size: 300% 100%;
1218
+ animation: shimmer 2s infinite;
1219
+ }
1220
+
1221
+ /* Split progress bars */
1222
+ .progress-metric.split .progress-label {
1223
+ justify-content: space-between;
1224
+ font-size: 13px;
1225
+ }
1226
+
1227
+ .progress-bar.split {
1228
+ display: flex;
1229
+ background: rgba(196, 207, 219, 0.1);
1230
+ position: relative;
1231
+ justify-content: center;
1232
+ border: 1px solid rgba(196, 207, 219, 0.2);
1233
+ clip-path: polygon(0 0, 100% 0, calc(100% - 4px) 100%, 0 100%);
1234
+ }
1235
+
1236
+ .progress-bar.split::after {
1237
+ content: '';
1238
+ position: absolute;
1239
+ top: 0;
1240
+ left: 50%;
1241
+ transform: translateX(-50%);
1242
+ width: 2px;
1243
+ height: 100%;
1244
+ background: rgba(196, 207, 219, 0.3);
1245
+ z-index: 2;
1246
+ box-shadow: 0 0 10px rgba(196, 207, 219, 0.2);
1247
+ }
1248
+
1249
+ .progress-fill-left,
1250
+ .progress-fill-right {
1251
+ height: 100%;
1252
+ background: linear-gradient(90deg, rgb(254, 105, 118), rgb(254, 125, 138));
1253
+ position: relative;
1254
+ width: 50%;
1255
+ overflow: hidden;
1256
+ }
1257
+
1258
+ .progress-fill-left {
1259
+ clip-path: polygon(0 0, calc(100% - 4px) 0, 100% 100%, 0 100%);
1260
+ margin-right: 1px;
1261
+ transform-origin: right;
1262
+ transform: scaleX(var(--scale, 0));
1263
+ box-shadow:
1264
+ 0 0 10px rgba(254, 105, 118, 0.2),
1265
+ 0 0 20px rgba(254, 105, 118, 0.1);
1266
+ }
1267
+
1268
+ .progress-fill-right {
1269
+ clip-path: polygon(0 0, 100% 0, 100% 100%, 4px 100%);
1270
+ margin-left: 1px;
1271
+ transform-origin: left;
1272
+ transform: scaleX(var(--scale, 0));
1273
+ box-shadow:
1274
+ 0 0 10px rgba(254, 105, 118, 0.2),
1275
+ 0 0 20px rgba(254, 105, 118, 0.1);
1276
+ }
1277
+
1278
+ /* Benchmark container */
1279
+ .benchmark-container {
1280
+ background: rgba(32, 32, 32, 0.95);
1281
+ border: 1px solid rgb(196, 207, 219);
1282
+ position: relative;
1283
+ overflow: hidden;
1284
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 15px 100%, 0 calc(100% - 15px));
1285
+ box-shadow:
1286
+ 0 0 20px rgba(196, 207, 219, 0.1),
1287
+ 0 0 40px rgba(196, 207, 219, 0.05);
1288
+ padding: 20px;
1289
+ }
1290
+
1291
+ /* Benchmark notification */
1292
+ .benchmark-notification {
1293
+ background: rgba(32, 32, 32, 0.95);
1294
+ border: 1px solid rgb(196, 207, 219);
1295
+ padding: 15px;
1296
+ margin-bottom: 20px;
1297
+ position: relative;
1298
+ overflow: hidden;
1299
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 10px 100%, 0 calc(100% - 10px));
1300
+ box-shadow:
1301
+ 0 0 15px rgba(196, 207, 219, 0.1),
1302
+ 0 0 30px rgba(196, 207, 219, 0.05);
1303
+ }
1304
+
1305
+ .notification-content {
1306
+ display: flex;
1307
+ align-items: center;
1308
+ gap: 10px;
1309
+ position: relative;
1310
+ z-index: 1;
1311
+ }
1312
+
1313
+ .notification-icon {
1314
+ font-size: 20px;
1315
+ color: rgb(254, 105, 118);
1316
+ text-shadow:
1317
+ 0 0 10px rgba(254, 105, 118, 0.3),
1318
+ 0 0 20px rgba(254, 105, 118, 0.2);
1319
+ }
1320
+
1321
+ .notification-text {
1322
+ color: rgb(196, 207, 219);
1323
+ font-size: 14px;
1324
+ display: flex;
1325
+ align-items: center;
1326
+ gap: 10px;
1327
+ flex-wrap: wrap;
1328
+ text-transform: uppercase;
1329
+ letter-spacing: 1px;
1330
+ text-shadow: 0 0 5px rgba(196, 207, 219, 0.2);
1331
+ }
1332
+
1333
+ .benchmark-link {
1334
+ color: rgb(254, 105, 118);
1335
+ font-weight: 500;
1336
+ white-space: nowrap;
1337
+ text-shadow:
1338
+ 0 0 5px rgba(254, 105, 118, 0.3),
1339
+ 0 0 10px rgba(254, 105, 118, 0.2);
1340
+ position: relative;
1341
+ padding: 2px 5px;
1342
+ border: 1px solid rgba(196, 207, 219, 0.2);
1343
+ clip-path: polygon(0 0, calc(100% - 5px) 0, 100% 5px, 100% 100%, 5px 100%, 0 calc(100% - 5px));
1344
+ }
1345
+
1346
+ @keyframes shimmer {
1347
+ 0% { background-position: 200% 0; }
1348
+ 100% { background-position: -200% 0; }
1349
+ }
1350
+
1351
+ </style>
1352
+
1353
+ <html lang="en">
1354
+ <head>
1355
+ <meta charset="UTF-8">
1356
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
1357
+ <title>L3.3-San-Mai-R1-70b</title>
1358
+ <link href="https://fonts.googleapis.com/css2?family=Quicksand:wght@400;500;600&display=swap" rel="stylesheet">
1359
+ <link href="styles/components/layout.css" rel="stylesheet">
1360
+ <link href="styles/components/metrics.css" rel="stylesheet">
1361
+ <link href="styles/components/cards.css" rel="stylesheet">
1362
+ <link href="styles/components/buttons.css" rel="stylesheet">
1363
+ <link href="styles/components/animations.css" rel="stylesheet">
1364
+ <link href="styles/main.css" rel="stylesheet">
1365
+ </head>
1366
+ <body>
1367
+ <div class="container">
1368
+ <div class="header">
1369
+ <h1>L3.3-San-Mai-R1-70b</h1>
1370
+ </div>
1371
+ <div class="info">
1372
+ <img src="https://cdn-uploads.huggingface.co/production/uploads/64545af5ec40bbbd01242ca6/8fZQZaLM0XO9TyKh-yMQ7.jpeg" alt="Model banner">
1373
+ <div class="creator-section">
1374
+ <div class="creator-badge" style="display: flex; align-items: center; gap: 1rem;">
1375
+ <div class="creator-info">
1376
+ <span class="creator-label">Created by</span>
1377
+ <a href="https://huggingface.co/Steelskull" target="_blank" class="creator-link">
1378
+ <span class="creator-name">SteelSkull</span>
1379
+ <span class="creator-arrow">→</span>
1380
+ </a>
1381
+ </div>
1382
+ <a href="https://ko-fi.com/Y8Y0AO2XE" target="_blank" class="button" style="margin: 0; padding: 0.5rem 1rem;">
1383
+ Support on Ko-fi
1384
+ </a>
1385
+ </div>
1386
+ </div>
1387
+ <div class="model-info">
1388
+ <h2>Model Information</h2>
1389
+ <div class="info-card">
1390
+ <div class="info-header">
1391
+ <h3>L3.3-San-Mai-R1-70b v0.5.OG</h3>
1392
+ <div class="model-tags">
1393
+ <span class="model-tag">L3.3 = Llama 3.3</span>
1394
+ <span class="model-tag">SCE Merge</span>
1395
+ <span class="model-tag">R1 = Deepseek R1</span>
1396
+ <span class="model-tag">70b Parameters</span>
1397
+ <span class="model-tag">v0.5.OG</span>
1398
+ </div>
1399
+ </div>
1400
+ <div class="model-composition">
1401
+ <h4>Model Composition</h4>
1402
+ <ul class="composition-list">
1403
+ <li><span class="model-component base-model"><a href="https://huggingface.co/TheSkullery/L3.1x3.3-DS-Hydroblated-R1-70B-v4.1" target="_blank">L3.1x3.3-DS-Hydroblated-R1-70B-v4.1</a></span> Base model</li>
1404
+ <li><span class="model-component"><a href="https://huggingface.co/EVA-UNIT-01/EVA-LLaMA-3.33-70B-v0.0" target="_blank">EVA-LLaMA-3.33-70B-v0.0</a></span> Core capabilities</li>
1405
+ <li><span class="model-component"><a href="https://huggingface.co/Sao10K/L3.3-70B-Euryale-v2.3" target="_blank">L3.3-70B-Euryale-v2.3</a></span> Enhanced reasoning</li>
1406
+ <li><span class="model-component"><a href="https://huggingface.co/Sao10K/70B-L3.3-Cirrus-x1" target="_blank">70B-L3.3-Cirrus-x1</a></span> Improved coherence</li>
1407
+ <li><span class="model-component"><a href="https://huggingface.co/Sao10K/L3.1-70B-Hanami-x1" target="_blank">L3.1-70B-Hanami-x1</a></span> Balanced responses</li>
1408
+ <li><span class="model-component"><a href="https://huggingface.co/TheDrummer/Anubis-70B-v1" target="_blank">Anubis-70B-v1</a></span> Enhanced detail</li>
1409
+ <li><span class="model-component"><a href="https://huggingface.co/SicariusSicariiStuff/Negative_LLAMA_70B" target="_blank">Negative_LLAMA_70B</a></span> Reduced bias</li>
1410
+ </ul>
1411
+ <p></p>
1412
+ <div class="model-description">
1413
+ <h4>Model Series Overview</h4>
1414
+ <p>L3.3-San-Mai-R1-70b represents the foundational release in a three-part model series, followed by L3.3-Cu-Mai-R1-70b (Version A) and L3.3-Mokume-Gane-R1-70b (Version C). The name "San-Mai" draws inspiration from the Japanese bladesmithing technique of creating three-layer laminated composite metals, known for combining a hard cutting edge with a tougher spine - a metaphor for this model's balanced approach to AI capabilities.</p>
1415
+ <h4>Technical Architecture</h4>
1416
+ <p>Built on a custom DeepSeek R1 Distill base (DS-Hydroblated-R1-v4.1), San-Mai-R1 integrates specialized components through the SCE merge method:</p>
1417
+ <ul>
1418
+ <li>EVA and EURYALE foundations for creative expression and scene comprehension</li>
1419
+ <li>Cirrus and Hanami elements for enhanced reasoning capabilities</li>
1420
+ <li>Anubis components for detailed scene description</li>
1421
+ <li>Negative_LLAMA integration for balanced perspective and response</li>
1422
+ </ul>
1423
+ <h4>Core Capabilities</h4>
1424
+ <p>As the OG model in the series, San-Mai-R1 serves as the gold standard and reliable baseline. User feedback consistently highlights its superior intelligence, coherence, and unique ability to provide deep character insights. Through proper prompting, the model demonstrates advanced reasoning capabilities and an "X-factor" that enables unprompted exploration of character inner thoughts and motivations.</p>
1425
+ <h4>Base Architecture</h4>
1426
+ <p>The model utilizes the custom Hydroblated-R1 base, engineered for stability and enhanced reasoning. The SCE merge method's settings are precisely tuned based on extensive community feedback, ensuring optimal component integration while maintaining model coherence and reliability. This foundation establishes San-Mai-R1 as the benchmark upon which its variant models build and expand.</p>
1427
+ </div>
1428
+ </div>
1429
+ </div>
1430
+ <h2>UGI-Benchmark Results:</h2>
1431
+ <div class="benchmark-container">
1432
+ <div class="benchmark-notification">
1433
+ <div class="notification-content">
1434
+ <span class="notification-icon">🏆</span>
1435
+ <span class="notification-text">
1436
+ Latest benchmark results as of 02/20/2025.
1437
+ <a href="https://huggingface.co/spaces/DontPlanToEnd/UGI-Leaderboard" target="_blank" class="benchmark-link">
1438
+ View Full Leaderboard →
1439
+ </a>
1440
+ </span>
1441
+ </div>
1442
+ </div>
1443
+ <div class="metrics-section">
1444
+ <h3>Core Metrics</h3>
1445
+ <div class="core-metrics-grid">
1446
+ <div class="metric-box">
1447
+ <span class="label">UGI Score</span>
1448
+ <span class="value">40.04</span>
1449
+ </div>
1450
+ <div class="metric-box">
1451
+ <span class="label">Willingness Score</span>
1452
+ <span class="value">2.5/10</span>
1453
+ </div>
1454
+ <div class="metric-box">
1455
+ <span class="label">Natural Intelligence</span>
1456
+ <span class="value">42.36</span>
1457
+ </div>
1458
+ <div class="metric-box">
1459
+ <span class="label">Coding Ability</span>
1460
+ <span class="value">22</span>
1461
+ </div>
1462
+ </div>
1463
+ </div>
1464
+ <div class="metrics-section">
1465
+ <h3>Model Information</h3>
1466
+ <div class="info-grid">
1467
+ <div class="metric-box">
1468
+ <span class="label">Political Lean</span>
1469
+ <span class="value">-8.5%</span>
1470
+ </div>
1471
+ <div class="metric-box">
1472
+ <span class="label">Ideology</span>
1473
+ <span class="value">Liberalism</span>
1474
+ </div>
1475
+ <div class="metric-box">
1476
+ <span class="label">Parameters</span>
1477
+ <span class="value">70B</span>
1478
+ </div>
1479
+ </div>
1480
+ </div>
1481
+ <div class="metrics-section">
1482
+ <details>
1483
+ <summary>Aggregated Scores</summary>
1484
+ <div class="progress-metrics">
1485
+ <div class="progress-metric">
1486
+ <div class="progress-label">
1487
+ <span>Diplomacy</span>
1488
+ <span class="progress-value">61.7%</span>
1489
+ </div>
1490
+ <div class="progress-bar">
1491
+ <div class="progress-fill" style="width: 61.7%"></div>
1492
+ </div>
1493
+ </div>
1494
+ <div class="progress-metric">
1495
+ <div class="progress-label">
1496
+ <span>Government</span>
1497
+ <span class="progress-value">44.6%</span>
1498
+ </div>
1499
+ <div class="progress-bar">
1500
+ <div class="progress-fill" style="width: 44.6%"></div>
1501
+ </div>
1502
+ </div>
1503
+ <div class="progress-metric">
1504
+ <div class="progress-label">
1505
+ <span>Economy</span>
1506
+ <span class="progress-value">43.3%</span>
1507
+ </div>
1508
+ <div class="progress-bar">
1509
+ <div class="progress-fill" style="width: 43.3%"></div>
1510
+ </div>
1511
+ </div>
1512
+ <div class="progress-metric">
1513
+ <div class="progress-label">
1514
+ <span>Society</span>
1515
+ <span class="progress-value">60.0%</span>
1516
+ </div>
1517
+ <div class="progress-bar">
1518
+ <div class="progress-fill" style="width: 60.0%"></div>
1519
+ </div>
1520
+ </div>
1521
+ </div>
1522
+ </details>
1523
+ </div>
1524
+ <div class="metrics-section">
1525
+ <details>
1526
+ <summary>Individual Scores</summary>
1527
+ <div class="progress-metrics">
1528
+ <div class="progress-metric split">
1529
+ <div class="progress-label">
1530
+ <span>Federal</span>
1531
+ <span class="progress-value">46.0%</span>
1532
+ <span>Unitary</span>
1533
+ </div>
1534
+ <div class="progress-bar split">
1535
+ <div class="progress-fill-left" style="--scale: 0.460"></div>
1536
+ <div class="progress-fill-right" style="--scale: 0.540"></div>
1537
+ </div>
1538
+ </div>
1539
+ <div class="progress-metric split">
1540
+ <div class="progress-label">
1541
+ <span>Democratic</span>
1542
+ <span class="progress-value">67.5%</span>
1543
+ <span>Autocratic</span>
1544
+ </div>
1545
+ <div class="progress-bar split">
1546
+ <div class="progress-fill-left" style="--scale: 0.675"></div>
1547
+ <div class="progress-fill-right" style="--scale: 0.325"></div>
1548
+ </div>
1549
+ </div>
1550
+ <div class="progress-metric split">
1551
+ <div class="progress-label">
1552
+ <span>Security</span>
1553
+ <span class="progress-value">47.5%</span>
1554
+ <span>Freedom</span>
1555
+ </div>
1556
+ <div class="progress-bar split">
1557
+ <div class="progress-fill-left" style="--scale: 0.475"></div>
1558
+ <div class="progress-fill-right" style="--scale: 0.525"></div>
1559
+ </div>
1560
+ </div>
1561
+ <div class="progress-metric split">
1562
+ <div class="progress-label">
1563
+ <span>Nationalism</span>
1564
+ <span class="progress-value">40.4%</span>
1565
+ <span>Int'l</span>
1566
+ </div>
1567
+ <div class="progress-bar split">
1568
+ <div class="progress-fill-left" style="--scale: 0.404"></div>
1569
+ <div class="progress-fill-right" style="--scale: 0.596"></div>
1570
+ </div>
1571
+ </div>
1572
+ <div class="progress-metric split">
1573
+ <div class="progress-label">
1574
+ <span>Militarist</span>
1575
+ <span class="progress-value">32.9%</span>
1576
+ <span>Pacifist</span>
1577
+ </div>
1578
+ <div class="progress-bar split">
1579
+ <div class="progress-fill-left" style="--scale: 0.329"></div>
1580
+ <div class="progress-fill-right" style="--scale: 0.671"></div>
1581
+ </div>
1582
+ </div>
1583
+ <div class="progress-metric split">
1584
+ <div class="progress-label">
1585
+ <span>Assimilationist</span>
1586
+ <span class="progress-value">41.5%</span>
1587
+ <span>Multiculturalist</span>
1588
+ </div>
1589
+ <div class="progress-bar split">
1590
+ <div class="progress-fill-left" style="--scale: 0.415"></div>
1591
+ <div class="progress-fill-right" style="--scale: 0.585"></div>
1592
+ </div>
1593
+ </div>
1594
+ <div class="progress-metric split">
1595
+ <div class="progress-label">
1596
+ <span>Collectivize</span>
1597
+ <span class="progress-value">43.3%</span>
1598
+ <span>Privatize</span>
1599
+ </div>
1600
+ <div class="progress-bar split">
1601
+ <div class="progress-fill-left" style="--scale: 0.433"></div>
1602
+ <div class="progress-fill-right" style="--scale: 0.567"></div>
1603
+ </div>
1604
+ </div>
1605
+ <div class="progress-metric split">
1606
+ <div class="progress-label">
1607
+ <span>Planned</span>
1608
+ <span class="progress-value">42.9%</span>
1609
+ <span>LaissezFaire</span>
1610
+ </div>
1611
+ <div class="progress-bar split">
1612
+ <div class="progress-fill-left" style="--scale: 0.429"></div>
1613
+ <div class="progress-fill-right" style="--scale: 0.571"></div>
1614
+ </div>
1615
+ </div>
1616
+ <div class="progress-metric split">
1617
+ <div class="progress-label">
1618
+ <span>Isolationism</span>
1619
+ <span class="progress-value">43.8%</span>
1620
+ <span>Globalism</span>
1621
+ </div>
1622
+ <div class="progress-bar split">
1623
+ <div class="progress-fill-left" style="--scale: 0.438"></div>
1624
+ <div class="progress-fill-right" style="--scale: 0.562"></div>
1625
+ </div>
1626
+ </div>
1627
+ <div class="progress-metric split">
1628
+ <div class="progress-label">
1629
+ <span>Irreligious</span>
1630
+ <span class="progress-value">57.9%</span>
1631
+ <span>Religious</span>
1632
+ </div>
1633
+ <div class="progress-bar split">
1634
+ <div class="progress-fill-left" style="--scale: 0.579"></div>
1635
+ <div class="progress-fill-right" style="--scale: 0.421"></div>
1636
+ </div>
1637
+ </div>
1638
+ <div class="progress-metric split">
1639
+ <div class="progress-label">
1640
+ <span>Progressive</span>
1641
+ <span class="progress-value">57.3%</span>
1642
+ <span>Traditional</span>
1643
+ </div>
1644
+ <div class="progress-bar split">
1645
+ <div class="progress-fill-left" style="--scale: 0.573"></div>
1646
+ <div class="progress-fill-right" style="--scale: 0.427"></div>
1647
+ </div>
1648
+ </div>
1649
+ <div class="progress-metric split">
1650
+ <div class="progress-label">
1651
+ <span>Acceleration</span>
1652
+ <span class="progress-value">64.8%</span>
1653
+ <span>Bioconservative</span>
1654
+ </div>
1655
+ <div class="progress-bar split">
1656
+ <div class="progress-fill-left" style="--scale: 0.648"></div>
1657
+ <div class="progress-fill-right" style="--scale: 0.352"></div>
1658
+ </div>
1659
+ </div>
1660
+ </div>
1661
+ </details>
1662
+ </div>
1663
+ </div>
1664
+ <!-- Open LLM-Benchmark Results - TO BE UPDATED -->
1665
+ <!--<h2>Open LLM-Benchmark Results:</h2>
1666
+ <div class="benchmark-container">
1667
+ <div class="benchmark-notification">
1668
+ <div class="notification-content">
1669
+ <span class="notification-text">
1670
+ Average Score: 43.68%
1671
+ <a href="https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard#/?rankingMode=dynamic" target="_blank" class="benchmark-link">
1672
+ View Full Leaderboard →
1673
+ </a>
1674
+ </span>
1675
+ </div>
1676
+ </div>
1677
+ <div class="progress-metrics">
1678
+ <div class="progress-metric">
1679
+ <div class="progress-label">
1680
+ <span>IFEval</span>
1681
+ <span class="progress-value">60.24%</span>
1682
+ </div>
1683
+ <div class="progress-bar">
1684
+ <div class="progress-fill" style="width: 60.24%"></div>
1685
+ </div>
1686
+ </div>
1687
+ <div class="progress-metric">
1688
+ <div class="progress-label">
1689
+ <span>BBH</span>
1690
+ <span class="progress-value">56.17%</span>
1691
+ </div>
1692
+ <div class="progress-bar">
1693
+ <div class="progress-fill" style="width: 56.17%"></div>
1694
+ </div>
1695
+ </div>
1696
+ <div class="progress-metric">
1697
+ <div class="progress-label">
1698
+ <span>MATH</span>
1699
+ <span class="progress-value">46.68%</span>
1700
+ </div>
1701
+ <div class="progress-bar">
1702
+ <div class="progress-fill" style="width: 46.68%"></div>
1703
+ </div>
1704
+ </div>
1705
+ <div class="progress-metric">
1706
+ <div class="progress-label">
1707
+ <span>GPQA</span>
1708
+ <span class="progress-value">29.19%</span>
1709
+ </div>
1710
+ <div class="progress-bar">
1711
+ <div class="progress-fill" style="width: 29.19%"></div>
1712
+ </div>
1713
+ </div>
1714
+ <div class="progress-metric">
1715
+ <div class="progress-label">
1716
+ <span>MUSR</span>
1717
+ <span class="progress-value">20.19%</span>
1718
+ </div>
1719
+ <div class="progress-bar">
1720
+ <div class="progress-fill" style="width: 20.19%"></div>
1721
+ </div>
1722
+ </div>
1723
+ <div class="progress-metric">
1724
+ <div class="progress-label">
1725
+ <span>MMLU-Pro</span>
1726
+ <span class="progress-value">49.59%</span>
1727
+ </div>
1728
+ <div class="progress-bar">
1729
+ <div class="progress-fill" style="width: 49.59%"></div>
1730
+ </div>
1731
+ </div>
1732
+ </div>
1733
+ </div>-->
1734
+ <div class="component-section" id="settings">
1735
+ <div class="section-container">
1736
+ <h2>Recommended Sampler Settings: <strong> By @Geechan</strong></h2>
1737
+ <div class="settings-grid">
1738
+ <div class="settings-card">
1739
+ <div class="settings-header">
1740
+ <h3>Static Temperature:</h3>
1741
+ </div>
1742
+ <div class="settings-content">
1743
+ <div class="setting-item highlight">
1744
+ <span class="setting-value">1 - 1.05</span>
1745
+ </div>
1746
+ </div>
1747
+ </div>
1748
+ <div class="settings-card">
1749
+ <div class="settings-header">
1750
+ <h3>Min P</h3>
1751
+ </div>
1752
+ <div class="settings-content">
1753
+ <div class="setting-item highlight">
1754
+ <span class="setting-value">0.015</span>
1755
+ </div>
1756
+ </div>
1757
+ </div>
1758
+ <div class="settings-card">
1759
+ <div class="settings-header">
1760
+ <h3>DRY Settings: (optional)</h3>
1761
+ </div>
1762
+ <div class="settings-content">
1763
+ <div class="setting-item">
1764
+ <span class="setting-label">Multiplier</span>
1765
+ <span class="setting-value">0.8</span>
1766
+ </div>
1767
+ <div class="setting-item">
1768
+ <span class="setting-label">Base</span>
1769
+ <span class="setting-value">1.75</span>
1770
+ </div>
1771
+ <div class="setting-item">
1772
+ <span class="setting-label">Length</span>
1773
+ <span class="setting-value">4</span>
1774
+ </div>
1775
+ </div>
1776
+ </div>
1777
+ </div>
1778
+ </div>
1779
+ </div>
1780
+ <div class="section-container">
1781
+ <h2>Recommended Templates & Prompts</h2>
1782
+ <div class="template-card">
1783
+ <div class="template-item">
1784
+ <div class="template-content">
1785
+ <a href="https://huggingface.co/Konnect1221/Methception-Llamaception-SillyTavern-Preset" target="_blank" class="template-link">
1786
+ LLam@ception
1787
+ <span class="link-arrow">→</span>
1788
+ </a>
1789
+ <span class="template-author">by @.konnect</span>
1790
+ </div>
1791
+ </div>
1792
+ <div class="template-item">
1793
+ <div class="template-content">
1794
+ <a href="https://huggingface.co/Steelskull/L3.3-San-Mai-R1-70b/blob/main/LeCeption-XML-V2-Thinking.json" target="_blank" class="template-link">
1795
+ LeCeption
1796
+ <span class="link-arrow">→</span>
1797
+ </a>
1798
+ <span class="template-author">by @Steel</span> > A completly revamped XML version of Llam@ception 1.5.2 with stepped thinking and Reasoning added
1799
+ </div>
1800
+ </div>
1801
+ </div>
1802
+ <div class="settings-card">
1803
+ <div class="settings-header">
1804
+ <h3>LECEPTION REASONING CONFIGURATION:</h3>
1805
+ </div>
1806
+ <div class="settings-content">
1807
+ <div class="settings-grid">
1808
+ <div class="settings-card">
1809
+ <div class="settings-header">
1810
+ <h3>Start Reply With:</h3>
1811
+ </div>
1812
+ <div class="settings-content">
1813
+ <div class="setting-item">
1814
+ <p>'<span style="color: #ff6b6b">&lt;think&gt;</span> OK, as an objective, detached narrative analyst, let's think this through carefully:'</p>
1815
+ </div>
1816
+ </div>
1817
+ </div>
1818
+ <div class="settings-card">
1819
+ <div class="settings-header">
1820
+ <h3>Reasoning Formatting (no spaces):</h3>
1821
+ </div>
1822
+ <div class="settings-content">
1823
+ <div class="setting-item">
1824
+ <span class="setting-label">Prefix:</span>
1825
+ <span class="setting-value">'<span style="color: #ff6b6b">&lt;think&gt;</span>'</span>
1826
+ </div>
1827
+ <div class="setting-item">
1828
+ <span class="setting-label">Suffix:</span>
1829
+ <span class="setting-value">'<span style="color: #ff6b6b">&lt;/think&gt;</span>'</span>
1830
+ </div>
1831
+ </div>
1832
+ </div>
1833
+ </div>
1834
+ </div>
1835
+ </div>
1836
+ </div>
1837
+ <!--<div class="section-container">
1838
+ <h2>Quantized Versions</h2>
1839
+ <div class="quantized-container">
1840
+ <div class="quantized-section">
1841
+ <h3>GGUF Quantizations</h3>
1842
+ <div class="quantized-items">
1843
+ <div class="quantized-item">
1844
+ <span class="author">bartowski</span>
1845
+ <a href="https://huggingface.co/bartowski/Steelskull_L3.3-San-Mai-R1-GGUF" target="_blank">
1846
+ Combined-GGUF
1847
+ <span class="link-arrow">→</span>
1848
+ </a>
1849
+ </div>
1850
+ <div class="quantized-item">
1851
+ <span class="author">mradermacher</span>
1852
+ <div class="multi-links">
1853
+ <a href="https://huggingface.co/mradermacher/L3.3-San-Mai-R1-GGUF" target="_blank">
1854
+ GGUF
1855
+ <span class="link-arrow">→</span>
1856
+ </a>
1857
+ <span class="separator">//</span>
1858
+ <a href="https://huggingface.co/mradermacher/L3.3-San-Mai-R1-i1-GGUF" target="_blank">
1859
+ Imat-GGUF
1860
+ <span class="link-arrow">→</span>
1861
+ </a>
1862
+ </div>
1863
+ </div>
1864
+ </div>
1865
+ </div>
1866
+ <div class="quantized-section">
1867
+ <h3>EXL2 Quantizations</h3>
1868
+ <div class="quantized-items">
1869
+ <div class="quantized-item">
1870
+ <span class="author">ReadyArt</span>
1871
+ <div class="multi-links">
1872
+ <a href="https://huggingface.co/ReadyArt/L3.3-San-Mai-R1_EXl2_8.0bpw_H8" target="_blank">
1873
+ 8.0BPW-EXL2
1874
+ <span class="link-arrow">→</span>
1875
+ </a>
1876
+ <span class="separator">//</span>
1877
+ <a href="https://huggingface.co/ReadyArt/L3.3-San-Mai-R1_EXl2_6.65bpw_H8" target="_blank">
1878
+ 6.65BPW-EXL2
1879
+ <span class="link-arrow">→</span>
1880
+ </a>
1881
+ </div>
1882
+ </div>
1883
+ <div class="quantized-item">
1884
+ <span class="author">Darkhn</span>
1885
+ <a href="https://huggingface.co/Darkhn/Steelskull-L3.3-San-Mai-R1-6.0bpw-h8-exl2" target="_blank">
1886
+ 6.0BPW-EXL2
1887
+ <span class="link-arrow">→</span>
1888
+ </a>
1889
+ </div>
1890
+ </div>
1891
+ </div>
1892
+ <div class="quantized-section">
1893
+ <h3>FP8 Dynamic</h3>
1894
+ <div class="quantized-items">
1895
+ <div class="quantized-item">
1896
+ <span class="author">yeyaowei</span>
1897
+ <a href="https://huggingface.co/yeyaowei/L3.3-San-Mai-R1-FP8-Dynamic" target="_blank">
1898
+ FP8-Dynamic
1899
+ <span class="link-arrow">→</span>
1900
+ </a>
1901
+ </div>
1902
+ </div>
1903
+ </div>
1904
+ </div>
1905
+ </div>-->
1906
+ <div class="support-section">
1907
+ <h2>Support & Community:</h2>
1908
+ <div class="support-buttons">
1909
+ <a href="https://ko-fi.com/Y8Y0AO2XE" target="_blank" class="button">
1910
+ Support on Ko-fi
1911
+ </a>
1912
+ <a href="https://discord.gg/4tCngSm3qZ" target="_blank" class="button">
1913
+ Join Discord
1914
+ </a>
1915
+ </div>
1916
+ <div class="special-thanks">
1917
+ <h3>Special Thanks</h3>
1918
+ <ul class="thanks-list">
1919
+ <li><strong>@Geechan</strong> for feedback and sampler settings</li>
1920
+ <li><strong>@Konnect</strong> for their feedback and templates</li>
1921
+ <li><strong>@Kistara</strong> for their feedback and help with the model mascot design</li>
1922
+ <li><strong>@Thana Alt</strong> for their feedback and Quants</li>
1923
+ <li><strong>@Lightning_missile</strong> for their feedback</li>
1924
+ <li><strong>The Arli community</strong> for feedback and testers</li>
1925
+ <li><strong>The BeaverAI communty</strong> for feedback and testers</li>
1926
+ </ul>
1927
+ <p class="thanks-note">I wish I could add everyone but im pretty sure it would be as long as the card!</p>
1928
+ </div>
1929
+ </div>
1930
+ </div>
1931
+ </div>
1932
+ </body>
1933
+ </html>
config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "TheSkullery/L3.1x3.3-DS-Hydroblated-R1-70B-v4.1",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": [
10
+ 128001,
11
+ 128008,
12
+ 128009
13
+ ],
14
+ "head_dim": 128,
15
+ "hidden_act": "silu",
16
+ "hidden_size": 8192,
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 28672,
19
+ "max_position_embeddings": 131072,
20
+ "mlp_bias": false,
21
+ "model_type": "llama",
22
+ "num_attention_heads": 64,
23
+ "num_hidden_layers": 80,
24
+ "num_key_value_heads": 8,
25
+ "pretraining_tp": 1,
26
+ "rms_norm_eps": 1e-05,
27
+ "rope_scaling": {
28
+ "factor": 8.0,
29
+ "high_freq_factor": 4.0,
30
+ "low_freq_factor": 1.0,
31
+ "original_max_position_embeddings": 8192,
32
+ "rope_type": "llama3"
33
+ },
34
+ "rope_theta": 500000.0,
35
+ "tie_word_embeddings": false,
36
+ "torch_dtype": "bfloat16",
37
+ "transformers_version": "4.48.3",
38
+ "use_cache": true,
39
+ "vocab_size": 128256,
40
+ "quantization_config": {
41
+ "quant_method": "exl2",
42
+ "version": "0.2.8",
43
+ "bits": 4.65,
44
+ "head_bits": 6,
45
+ "calibration": {
46
+ "rows": 115,
47
+ "length": 2048,
48
+ "dataset": "(default)"
49
+ }
50
+ }
51
+ }
measurement.json ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"metadata": {"mergekitty_version": "0.0.7", "total_size": 141107412992}, "weight_map": {"lm_head.weight": "model-00001-of-00030.safetensors", "model.embed_tokens.weight": "model-00001-of-00030.safetensors", "model.layers.0.input_layernorm.weight": "model-00001-of-00030.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00001-of-00030.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.input_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.input_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00030.safetensors", "model.layers.11.input_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.input_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.input_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00004-of-00030.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.input_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.input_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.input_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00005-of-00030.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00005-of-00030.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.input_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.input_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00005-of-00030.safetensors", "model.layers.19.input_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00005-of-00030.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00005-of-00030.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.input_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.input_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00006-of-00030.safetensors", "model.layers.21.input_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00006-of-00030.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.input_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.input_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00007-of-00030.safetensors", "model.layers.24.input_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.input_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.input_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00009-of-00030.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.input_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.input_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.input_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00010-of-00030.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00010-of-00030.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.input_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.input_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00010-of-00030.safetensors", "model.layers.31.input_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.31.mlp.down_proj.weight": "model-00010-of-00030.safetensors", "model.layers.31.mlp.gate_proj.weight": "model-00010-of-00030.safetensors", "model.layers.31.mlp.up_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.input_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.32.mlp.down_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.mlp.gate_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.mlp.up_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.input_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.33.mlp.down_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.mlp.gate_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.mlp.up_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00011-of-00030.safetensors", "model.layers.34.input_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.34.mlp.down_proj.weight": "model-00011-of-00030.safetensors", "model.layers.34.mlp.gate_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.mlp.up_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.input_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.35.mlp.down_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.mlp.gate_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.mlp.up_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.input_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.36.mlp.down_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.mlp.gate_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.mlp.up_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.post_attention_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.k_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.o_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.q_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.v_proj.weight": "model-00012-of-00030.safetensors", "model.layers.37.input_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.37.mlp.down_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.mlp.gate_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.mlp.up_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.q_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.input_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.38.mlp.down_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.mlp.gate_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.mlp.up_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.input_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.39.mlp.down_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.mlp.gate_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.mlp.up_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00014-of-00030.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.input_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.input_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.40.mlp.down_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.mlp.gate_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.mlp.up_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.post_attention_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.k_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.o_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.q_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.v_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.input_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.41.mlp.down_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.mlp.gate_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.mlp.up_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.post_attention_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.41.self_attn.k_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.self_attn.o_proj.weight": "model-00015-of-00030.safetensors", "model.layers.41.self_attn.q_proj.weight": "model-00015-of-00030.safetensors", "model.layers.41.self_attn.v_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.input_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.42.mlp.down_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.mlp.gate_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.mlp.up_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.post_attention_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.k_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.o_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.q_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.v_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.input_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.43.mlp.down_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.mlp.gate_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.mlp.up_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.post_attention_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.k_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.o_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.q_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.v_proj.weight": "model-00015-of-00030.safetensors", "model.layers.44.input_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.44.mlp.down_proj.weight": "model-00015-of-00030.safetensors", "model.layers.44.mlp.gate_proj.weight": "model-00015-of-00030.safetensors", "model.layers.44.mlp.up_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.post_attention_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.k_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.o_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.q_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.v_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.input_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.45.mlp.down_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.mlp.gate_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.mlp.up_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.post_attention_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.k_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.o_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.q_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.v_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.input_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.46.mlp.down_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.mlp.gate_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.mlp.up_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.post_attention_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.k_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.o_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.q_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.v_proj.weight": "model-00016-of-00030.safetensors", "model.layers.47.input_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.47.mlp.down_proj.weight": "model-00016-of-00030.safetensors", "model.layers.47.mlp.gate_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.mlp.up_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.post_attention_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.k_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.o_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.q_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.v_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.input_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.48.mlp.down_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.mlp.gate_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.mlp.up_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.post_attention_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.k_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.o_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.q_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.v_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.input_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.49.mlp.down_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.mlp.gate_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.mlp.up_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.post_attention_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.k_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.o_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.q_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.v_proj.weight": "model-00017-of-00030.safetensors", "model.layers.5.input_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.input_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.50.mlp.down_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.mlp.gate_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.mlp.up_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.post_attention_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.k_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.o_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.q_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.v_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.input_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.51.mlp.down_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.mlp.gate_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.mlp.up_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.post_attention_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.51.self_attn.k_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.self_attn.o_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.self_attn.q_proj.weight": "model-00019-of-00030.safetensors", "model.layers.51.self_attn.v_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.input_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.52.mlp.down_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.mlp.gate_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.mlp.up_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.post_attention_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.k_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.o_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.q_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.v_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.input_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.53.mlp.down_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.mlp.gate_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.mlp.up_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.post_attention_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.k_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.o_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.q_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.v_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.input_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.54.mlp.down_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.mlp.gate_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.mlp.up_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.post_attention_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.54.self_attn.k_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.self_attn.o_proj.weight": "model-00020-of-00030.safetensors", "model.layers.54.self_attn.q_proj.weight": "model-00020-of-00030.safetensors", "model.layers.54.self_attn.v_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.input_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.55.mlp.down_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.mlp.gate_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.mlp.up_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.post_attention_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.k_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.o_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.q_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.v_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.input_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.56.mlp.down_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.mlp.gate_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.mlp.up_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.post_attention_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.k_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.o_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.q_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.v_proj.weight": "model-00020-of-00030.safetensors", "model.layers.57.input_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.57.mlp.down_proj.weight": "model-00020-of-00030.safetensors", "model.layers.57.mlp.gate_proj.weight": "model-00020-of-00030.safetensors", "model.layers.57.mlp.up_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.post_attention_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.k_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.o_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.q_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.v_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.input_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.58.mlp.down_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.mlp.gate_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.mlp.up_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.post_attention_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.k_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.o_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.q_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.v_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.input_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.59.mlp.down_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.mlp.gate_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.mlp.up_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.post_attention_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.k_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.o_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.q_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.v_proj.weight": "model-00021-of-00030.safetensors", "model.layers.6.input_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00021-of-00030.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.input_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.60.mlp.down_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.mlp.gate_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.mlp.up_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.post_attention_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.k_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.o_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.q_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.v_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.input_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.61.mlp.down_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.mlp.gate_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.mlp.up_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.post_attention_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.k_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.o_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.q_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.v_proj.weight": "model-00022-of-00030.safetensors", "model.layers.62.input_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.62.mlp.down_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.mlp.gate_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.mlp.up_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.post_attention_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.k_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.o_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.q_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.v_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.input_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.63.mlp.down_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.mlp.gate_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.mlp.up_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.post_attention_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.k_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.o_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.q_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.v_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.input_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.64.mlp.down_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.mlp.gate_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.mlp.up_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.post_attention_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.64.self_attn.k_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.self_attn.o_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.self_attn.q_proj.weight": "model-00024-of-00030.safetensors", "model.layers.64.self_attn.v_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.input_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.65.mlp.down_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.mlp.gate_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.mlp.up_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.post_attention_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.k_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.o_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.q_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.v_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.input_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.66.mlp.down_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.mlp.gate_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.mlp.up_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.post_attention_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.k_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.o_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.q_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.v_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.input_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.67.mlp.down_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.mlp.gate_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.mlp.up_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.post_attention_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.67.self_attn.k_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.self_attn.o_proj.weight": "model-00025-of-00030.safetensors", "model.layers.67.self_attn.q_proj.weight": "model-00025-of-00030.safetensors", "model.layers.67.self_attn.v_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.input_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.68.mlp.down_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.mlp.gate_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.mlp.up_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.post_attention_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.k_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.o_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.q_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.v_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.input_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.69.mlp.down_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.mlp.gate_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.mlp.up_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.post_attention_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.k_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.o_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.q_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.v_proj.weight": "model-00025-of-00030.safetensors", "model.layers.7.input_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00025-of-00030.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00025-of-00030.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.input_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.70.mlp.down_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.mlp.gate_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.mlp.up_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.post_attention_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.k_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.o_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.q_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.v_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.input_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.71.mlp.down_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.mlp.gate_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.mlp.up_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.post_attention_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.k_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.o_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.q_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.v_proj.weight": "model-00026-of-00030.safetensors", "model.layers.72.input_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.72.mlp.down_proj.weight": "model-00026-of-00030.safetensors", "model.layers.72.mlp.gate_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.mlp.up_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.post_attention_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.k_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.o_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.q_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.v_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.input_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.73.mlp.down_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.mlp.gate_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.mlp.up_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.post_attention_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.k_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.o_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.q_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.v_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.input_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.74.mlp.down_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.mlp.gate_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.mlp.up_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.post_attention_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.k_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.o_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.q_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.v_proj.weight": "model-00027-of-00030.safetensors", "model.layers.75.input_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.75.mlp.down_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.mlp.gate_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.mlp.up_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.post_attention_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.k_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.o_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.q_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.v_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.input_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.76.mlp.down_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.mlp.gate_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.mlp.up_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.post_attention_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.k_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.o_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.q_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.v_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.input_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.77.mlp.down_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.mlp.gate_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.mlp.up_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.post_attention_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.77.self_attn.k_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.self_attn.o_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.self_attn.q_proj.weight": "model-00029-of-00030.safetensors", "model.layers.77.self_attn.v_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.input_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.78.mlp.down_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.mlp.gate_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.mlp.up_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.post_attention_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.k_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.o_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.q_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.v_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.input_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.79.mlp.down_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.mlp.gate_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.mlp.up_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.post_attention_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.k_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.o_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.q_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.v_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.input_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00030-of-00030.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00030-of-00030.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.input_layernorm.weight": "model-00030-of-00030.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00030-of-00030.safetensors", "model.norm.weight": "model-00030-of-00030.safetensors"}}
output-00001-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b7fc5e0c191fbf4aac2ff3c0a62ccfc62e9c3433353cec4e7558ee6476a20203
3
+ size 8513468312
output-00002-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b62e5ae4bde3afd7a0211950af87ae705fcab9312c97f37853b061ebf715733
3
+ size 8504730028
output-00003-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79a1259753bdbdac3951a86b6537799bb922ada25f7912da23da0e9b03002868
3
+ size 8546778180
output-00004-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55694e695d4997839bac34e6d9c5a9a5910b816f5ea341cdab8eb62bec51b0e7
3
+ size 8449196564
output-00005-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa1d1648e5eb966d9b34ab215ce68ea867f74b92cc4115f10b329b8a11e64eb0
3
+ size 7881332252
output-00006-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b143f715e8c749d7f5e20bfc423d9923d9e9ef40411f3980a3d446ae3629ae81
3
+ size 829046880
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin_of_text|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|eot_id|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|finetune_right_pad_id|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
tokenizer_config.json ADDED
@@ -0,0 +1,2066 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "bos_token": "<|begin_of_text|>",
2053
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n",
2054
+ "clean_up_tokenization_spaces": true,
2055
+ "eos_token": "<|eot_id|>",
2056
+ "extra_special_tokens": {},
2057
+ "model_input_names": [
2058
+ "input_ids",
2059
+ "attention_mask"
2060
+ ],
2061
+ "model_max_length": 131072,
2062
+ "pad_token": "<|finetune_right_pad_id|>",
2063
+ "padding_side": "left",
2064
+ "tokenizer_class": "PreTrainedTokenizerFast",
2065
+ "unk_token": null
2066
+ }