debisoft commited on
Commit
f32cdc0
·
verified ·
1 Parent(s): 3a6fbb3

debisoft/mistral-7b-thinking-function_calling-logic-capturing-V0

Browse files
adapter_config.json CHANGED
@@ -25,14 +25,14 @@
25
  "revision": null,
26
  "target_modules": [
27
  "k_proj",
28
- "o_proj",
29
  "v_proj",
 
30
  "q_proj",
31
- "gate_proj",
32
- "down_proj",
33
- "lm_head",
34
  "up_proj",
35
- "embed_tokens"
 
 
36
  ],
37
  "task_type": "CAUSAL_LM",
38
  "trainable_token_indices": null,
 
25
  "revision": null,
26
  "target_modules": [
27
  "k_proj",
28
+ "embed_tokens",
29
  "v_proj",
30
+ "o_proj",
31
  "q_proj",
 
 
 
32
  "up_proj",
33
+ "lm_head",
34
+ "down_proj",
35
+ "gate_proj"
36
  ],
37
  "task_type": "CAUSAL_LM",
38
  "trainable_token_indices": null,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:07588c828f8b358dcdae915944471c483c1be5e214af9dd4eb4f01937f67505c
3
  size 707227664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b325bf97fa2d826f915bad6b63b98063eaccc722cd2814f71fa77f8e07f1c388
3
  size 707227664
runs/Jun03_19-07-14_ada-B550-AORUS-ELITE/events.out.tfevents.1748948843.ada-B550-AORUS-ELITE ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9aa62bf1543b02cd22991b7ce206c0cd327ee6ae4314f18e34a4d724819fad2
3
+ size 54469
special_tokens_map.json CHANGED
@@ -18,7 +18,13 @@
18
  "rstrip": false,
19
  "single_word": false
20
  },
21
- "eos_token": "<eos>",
 
 
 
 
 
 
22
  "pad_token": {
23
  "content": "<pad>",
24
  "lstrip": false,
 
18
  "rstrip": false,
19
  "single_word": false
20
  },
21
+ "eos_token": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false
27
+ },
28
  "pad_token": {
29
  "content": "<pad>",
30
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -6267,7 +6267,7 @@
6267
  "bos_token": "<s>",
6268
  "chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{{ '<start_of_turn>' + message['role'] + '\n' + message['content'] | trim + '<end_of_turn><eos>\n' }}{% endfor %}{% if add_generation_prompt %}{{'<start_of_turn>model\n'}}{% endif %}",
6269
  "clean_up_tokenization_spaces": false,
6270
- "eos_token": "<eos>",
6271
  "extra_special_tokens": {},
6272
  "legacy": false,
6273
  "model_max_length": 1000000000000000019884624838656,
 
6267
  "bos_token": "<s>",
6268
  "chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{{ '<start_of_turn>' + message['role'] + '\n' + message['content'] | trim + '<end_of_turn><eos>\n' }}{% endfor %}{% if add_generation_prompt %}{{'<start_of_turn>model\n'}}{% endif %}",
6269
  "clean_up_tokenization_spaces": false,
6270
+ "eos_token": "</s>",
6271
  "extra_special_tokens": {},
6272
  "legacy": false,
6273
  "model_max_length": 1000000000000000019884624838656,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9a33f7f031d60eb405b2714a7fa15333360b85a1eeb11b92c1ad6103c937965a
3
  size 5688
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8895386edbc5c4fdefe1567d7bf209b8371bcdce0d1955d1836da9029569447b
3
  size 5688