dainis-boumber commited on
Commit
e6aaed9
1 Parent(s): 025ce7a

Training in progress, step 50

Browse files
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "mistralai/Mistral-7B-v0.1",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -19,14 +19,14 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
 
22
  "gate_proj",
 
23
  "v_proj",
24
- "o_proj",
25
- "up_proj",
26
  "q_proj",
27
- "down_proj",
28
- "k_proj",
29
- "lm_head"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_rslora": false
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "lm_head",
23
+ "down_proj",
24
  "gate_proj",
25
+ "k_proj",
26
  "v_proj",
 
 
27
  "q_proj",
28
+ "o_proj",
29
+ "up_proj"
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8183447d60db4dba5568e2198a966654535cca3d00cb0918917c1b9589ed8826
3
- size 346667168
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3dbfd64f4eb5de4ff2dcb308aaca0243ba8a5d4396e7b9727c9d24700171120
3
+ size 609389712
tokenizer_config.json CHANGED
@@ -29,15 +29,17 @@
29
  },
30
  "additional_special_tokens": [],
31
  "bos_token": "<s>",
 
32
  "clean_up_tokenization_spaces": false,
33
  "eos_token": "</s>",
34
  "legacy": true,
35
- "model_max_length": 128,
36
  "pad_token": "</s>",
37
  "padding_side": "left",
38
  "sp_model_kwargs": {},
39
  "spaces_between_special_tokens": false,
40
  "tokenizer_class": "LlamaTokenizer",
41
  "unk_token": "<unk>",
42
- "use_default_system_prompt": false
 
43
  }
 
29
  },
30
  "additional_special_tokens": [],
31
  "bos_token": "<s>",
32
+ "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "</s>",
35
  "legacy": true,
36
+ "model_max_length": 256,
37
  "pad_token": "</s>",
38
  "padding_side": "left",
39
  "sp_model_kwargs": {},
40
  "spaces_between_special_tokens": false,
41
  "tokenizer_class": "LlamaTokenizer",
42
  "unk_token": "<unk>",
43
+ "use_default_system_prompt": false,
44
+ "use_reentrant": false
45
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:39e4028097a7fb39d9b7760538d903cc2370e073a8cb659bad732a877fa55139
3
  size 4664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f69c6253546c579f18ddfa0a4925fca272336ad65790c31d36c88028becb590f
3
  size 4664