Against61 commited on
Commit
f91805a
·
verified ·
1 Parent(s): 6dc1804

End of training

Browse files
README.md CHANGED
@@ -1,11 +1,10 @@
1
  ---
2
- license: apache-2.0
3
  library_name: peft
4
  tags:
5
  - trl
6
  - sft
7
  - generated_from_trainer
8
- base_model: TheBloke/Mistral-7B-Instruct-v0.2-GPTQ
9
  model-index:
10
  - name: ayz_bot
11
  results: []
@@ -16,7 +15,7 @@ should probably proofread and complete it, then remove this comment. -->
16
 
17
  # ayz_bot
18
 
19
- This model is a fine-tuned version of [TheBloke/Mistral-7B-Instruct-v0.2-GPTQ](https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.2-GPTQ) on the None dataset.
20
 
21
  ## Model description
22
 
 
1
  ---
 
2
  library_name: peft
3
  tags:
4
  - trl
5
  - sft
6
  - generated_from_trainer
7
+ base_model: TheBloke/saiga_mistral_7b-GPTQ
8
  model-index:
9
  - name: ayz_bot
10
  results: []
 
15
 
16
  # ayz_bot
17
 
18
+ This model is a fine-tuned version of [TheBloke/saiga_mistral_7b-GPTQ](https://huggingface.co/TheBloke/saiga_mistral_7b-GPTQ) on the None dataset.
19
 
20
  ## Model description
21
 
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "TheBloke/Mistral-7B-Instruct-v0.2-GPTQ",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "TheBloke/saiga_mistral_7b-GPTQ",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ab03b6483955be05745b61feb9133c11486459d9a05352371b7fde2b42abe3bb
3
  size 27280152
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96b3fedb12858e5b92e8887a853d27408b83be36ac209e1bfb17d9b683d83166
3
  size 27280152
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "<|im_end|>": 32000,
3
+ "<|im_start|>": 32001
4
+ }
runs/Mar28_18-34-15_Sahib/events.out.tfevents.1711625655.Sahib.12248.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6950dd362b9e376aebfe5e471d570a57962a36b9ddca71a4831056db4008b9e9
3
+ size 11876
special_tokens_map.json CHANGED
@@ -1,15 +1,20 @@
1
  {
 
 
 
 
 
2
  "bos_token": {
3
  "content": "<s>",
4
  "lstrip": false,
5
- "normalized": false,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
  "content": "</s>",
11
  "lstrip": false,
12
- "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
@@ -17,7 +22,7 @@
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
20
- "normalized": false,
21
  "rstrip": false,
22
  "single_word": false
23
  }
 
1
  {
2
+ "additional_special_tokens": [
3
+ "<unk>",
4
+ "<s>",
5
+ "</s>"
6
+ ],
7
  "bos_token": {
8
  "content": "<s>",
9
  "lstrip": false,
10
+ "normalized": true,
11
  "rstrip": false,
12
  "single_word": false
13
  },
14
  "eos_token": {
15
  "content": "</s>",
16
  "lstrip": false,
17
+ "normalized": true,
18
  "rstrip": false,
19
  "single_word": false
20
  },
 
22
  "unk_token": {
23
  "content": "<unk>",
24
  "lstrip": false,
25
+ "normalized": true,
26
  "rstrip": false,
27
  "single_word": false
28
  }
tokenizer.json CHANGED
@@ -14,7 +14,7 @@
14
  "single_word": false,
15
  "lstrip": false,
16
  "rstrip": false,
17
- "normalized": false,
18
  "special": true
19
  },
20
  {
@@ -23,7 +23,7 @@
23
  "single_word": false,
24
  "lstrip": false,
25
  "rstrip": false,
26
- "normalized": false,
27
  "special": true
28
  },
29
  {
@@ -32,8 +32,26 @@
32
  "single_word": false,
33
  "lstrip": false,
34
  "rstrip": false,
35
- "normalized": false,
36
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  }
38
  ],
39
  "normalizer": {
@@ -56,12 +74,6 @@
56
  "post_processor": {
57
  "type": "TemplateProcessing",
58
  "single": [
59
- {
60
- "SpecialToken": {
61
- "id": "<s>",
62
- "type_id": 0
63
- }
64
- },
65
  {
66
  "Sequence": {
67
  "id": "A",
@@ -70,24 +82,12 @@
70
  }
71
  ],
72
  "pair": [
73
- {
74
- "SpecialToken": {
75
- "id": "<s>",
76
- "type_id": 0
77
- }
78
- },
79
  {
80
  "Sequence": {
81
  "id": "A",
82
  "type_id": 0
83
  }
84
  },
85
- {
86
- "SpecialToken": {
87
- "id": "<s>",
88
- "type_id": 1
89
- }
90
- },
91
  {
92
  "Sequence": {
93
  "id": "B",
@@ -95,17 +95,7 @@
95
  }
96
  }
97
  ],
98
- "special_tokens": {
99
- "<s>": {
100
- "id": "<s>",
101
- "ids": [
102
- 1
103
- ],
104
- "tokens": [
105
- "<s>"
106
- ]
107
- }
108
- }
109
  },
110
  "decoder": {
111
  "type": "Sequence",
 
14
  "single_word": false,
15
  "lstrip": false,
16
  "rstrip": false,
17
+ "normalized": true,
18
  "special": true
19
  },
20
  {
 
23
  "single_word": false,
24
  "lstrip": false,
25
  "rstrip": false,
26
+ "normalized": true,
27
  "special": true
28
  },
29
  {
 
32
  "single_word": false,
33
  "lstrip": false,
34
  "rstrip": false,
35
+ "normalized": true,
36
  "special": true
37
+ },
38
+ {
39
+ "id": 32000,
40
+ "content": "<|im_end|>",
41
+ "single_word": false,
42
+ "lstrip": true,
43
+ "rstrip": true,
44
+ "normalized": true,
45
+ "special": false
46
+ },
47
+ {
48
+ "id": 32001,
49
+ "content": "<|im_start|>",
50
+ "single_word": false,
51
+ "lstrip": true,
52
+ "rstrip": true,
53
+ "normalized": true,
54
+ "special": false
55
  }
56
  ],
57
  "normalizer": {
 
74
  "post_processor": {
75
  "type": "TemplateProcessing",
76
  "single": [
 
 
 
 
 
 
77
  {
78
  "Sequence": {
79
  "id": "A",
 
82
  }
83
  ],
84
  "pair": [
 
 
 
 
 
 
85
  {
86
  "Sequence": {
87
  "id": "A",
88
  "type_id": 0
89
  }
90
  },
 
 
 
 
 
 
91
  {
92
  "Sequence": {
93
  "id": "B",
 
95
  }
96
  }
97
  ],
98
+ "special_tokens": {}
 
 
 
 
 
 
 
 
 
 
99
  },
100
  "decoder": {
101
  "type": "Sequence",
tokenizer_config.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
- "add_bos_token": true,
3
  "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
7
  "lstrip": false,
8
- "normalized": false,
9
  "rstrip": false,
10
  "single_word": false,
11
  "special": true
@@ -13,7 +13,7 @@
13
  "1": {
14
  "content": "<s>",
15
  "lstrip": false,
16
- "normalized": false,
17
  "rstrip": false,
18
  "single_word": false,
19
  "special": true
@@ -21,23 +21,43 @@
21
  "2": {
22
  "content": "</s>",
23
  "lstrip": false,
24
- "normalized": false,
25
  "rstrip": false,
26
  "single_word": false,
27
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  }
29
  },
30
- "additional_special_tokens": [],
 
 
 
 
31
  "bos_token": "<s>",
32
- "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "</s>",
35
  "legacy": true,
36
- "model_max_length": 1000000000000000019884624838656,
37
  "pad_token": "</s>",
 
38
  "sp_model_kwargs": {},
39
  "spaces_between_special_tokens": false,
40
  "tokenizer_class": "LlamaTokenizer",
41
  "unk_token": "<unk>",
42
- "use_default_system_prompt": false
43
  }
 
1
  {
2
+ "add_bos_token": false,
3
  "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
7
  "lstrip": false,
8
+ "normalized": true,
9
  "rstrip": false,
10
  "single_word": false,
11
  "special": true
 
13
  "1": {
14
  "content": "<s>",
15
  "lstrip": false,
16
+ "normalized": true,
17
  "rstrip": false,
18
  "single_word": false,
19
  "special": true
 
21
  "2": {
22
  "content": "</s>",
23
  "lstrip": false,
24
+ "normalized": true,
25
  "rstrip": false,
26
  "single_word": false,
27
  "special": true
28
+ },
29
+ "32000": {
30
+ "content": "<|im_end|>",
31
+ "lstrip": true,
32
+ "normalized": true,
33
+ "rstrip": true,
34
+ "single_word": false,
35
+ "special": false
36
+ },
37
+ "32001": {
38
+ "content": "<|im_start|>",
39
+ "lstrip": true,
40
+ "normalized": true,
41
+ "rstrip": true,
42
+ "single_word": false,
43
+ "special": false
44
  }
45
  },
46
+ "additional_special_tokens": [
47
+ "<unk>",
48
+ "<s>",
49
+ "</s>"
50
+ ],
51
  "bos_token": "<s>",
 
52
  "clean_up_tokenization_spaces": false,
53
  "eos_token": "</s>",
54
  "legacy": true,
55
+ "model_max_length": 32768,
56
  "pad_token": "</s>",
57
+ "padding_side": "left",
58
  "sp_model_kwargs": {},
59
  "spaces_between_special_tokens": false,
60
  "tokenizer_class": "LlamaTokenizer",
61
  "unk_token": "<unk>",
62
+ "use_default_system_prompt": true
63
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b5a41b7059f78233f8ead463e41ea663b554eec64483ebebae72afd9a33e9d1c
3
  size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e5885f0dab7b13121d6f1d357a85c0b42f6d3ad93845f728876296e96d1601c
3
  size 4920