File size: 4,245 Bytes
671d190
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
{
    "instruct": {
        "input_sequence": "\n[INST] ",
        "output_sequence": "",
        "last_output_sequence": "",
        "system_sequence": "\n[INST] Narrative Instructions: ",
        "stop_sequence": "",
        "wrap": false,
        "macro": true,
        "activation_regex": "",
        "system_sequence_prefix": "",
        "system_sequence_suffix": "",
        "first_output_sequence": "",
        "skip_examples": true,
        "output_suffix": "</s>",
        "input_suffix": "[/INST]",
        "system_suffix": "[/INST]",
        "user_alignment_message": "",
        "system_same_as_user": false,
        "last_system_sequence": "",
        "first_input_sequence": "",
        "last_input_sequence": "",
        "names_behavior": "always",
        "names_force_groups": true,
        "name": "MS-Instruct"
    },
    "context": {
        "story_string": "\n<s>[INST] {{#if system}}{{system}}\n\n{{/if}}{{#if wiBefore}}## World Info:\n{{wiBefore}}\n{{/if}}{{#if description}}## {{char}}'s Description:\n{{description}}\n{{/if}}{{#if personality}}## {{char}}'s Personality:\n{{personality}}\n{{/if}}{{#if persona}}## {{user}}'s Persona:\n{{persona}}\n{{/if}}{{#if scenario}}## Scenario:\n{{scenario}}\n{{/if}}{{#if wiAfter}}## World Info:\n{{wiAfter}}\n{{/if}}{{#if mesExamples}}## {{char}}'s Example Response:\n{{mesExamples}}\n{{/if}}\n[/INST]",
        "example_separator": "",
        "chat_start": "## Exchange:",
        "use_stop_strings": false,
        "allow_jailbreak": false,
        "names_as_stop_strings": false,
        "always_force_name2": false,
        "trim_sentences": true,
        "single_line": false,
        "name": "MS-Context"
    },
    "preset": {
        "temp": 0.9,
        "temperature_last": true,
        "top_p": 0.88,
        "top_k": 100,
        "top_a": 0,
        "tfs": 1,
        "epsilon_cutoff": 0,
        "eta_cutoff": 0,
        "typical_p": 1,
        "min_p": 0.003,
        "rep_pen": 1.04,
        "rep_pen_range": 0,
        "rep_pen_decay": 0,
        "rep_pen_slope": 0.7,
        "no_repeat_ngram_size": 0,
        "penalty_alpha": 0,
        "num_beams": 1,
        "length_penalty": 1,
        "min_length": 0,
        "encoder_rep_pen": 1,
        "freq_pen": 0,
        "presence_pen": 0.03,
        "skew": 0,
        "do_sample": true,
        "early_stopping": true,
        "dynatemp": false,
        "min_temp": 0,
        "max_temp": 2,
        "dynatemp_exponent": 1,
        "smoothing_factor": 0,
        "smoothing_curve": 1,
        "dry_allowed_length": 2,
        "dry_multiplier": 0.8,
        "dry_base": 1.75,
        "dry_sequence_breakers": "[\"\\n\", \":\", \"\\\"\", \"*\"]",
        "dry_penalty_last_n": 28672,
        "add_bos_token": true,
        "ban_eos_token": false,
        "skip_special_tokens": true,
        "mirostat_mode": 0,
        "mirostat_tau": 5,
        "mirostat_eta": 0.1,
        "guidance_scale": 1,
        "negative_prompt": "",
        "grammar_string": "",
        "json_schema": {},
        "banned_tokens": "",
        "sampler_priority": [
            "repetition_penalty",
            "presence_penalty",
            "frequency_penalty",
            "dry",
            "temperature",
            "dynamic_temperature",
            "quadratic_sampling",
            "top_k",
            "top_p",
            "typical_p",
            "epsilon_cutoff",
            "eta_cutoff",
            "tfs",
            "top_a",
            "min_p",
            "mirostat",
            "xtc",
            "encoder_repetition_penalty",
            "no_repeat_ngram"
        ],
        "samplers": [
            "top_k",
            "tfs_z",
            "typical_p",
            "top_p",
            "min_p",
            "xtc",
            "temperature"
        ],
        "ignore_eos_token": false,
        "spaces_between_special_tokens": true,
        "speculative_ngram": false,
        "sampler_order": [
            6,
            0,
            1,
            3,
            4,
            2,
            5
        ],
        "logit_bias": [],
        "xtc_threshold": 0.1,
        "xtc_probability": 0.23,
        "rep_pen_size": 0,
        "genamt": 350,
        "max_length": 28672,
        "name": "vKW"
    }
}