debisoft commited on
Commit
6188486
·
verified ·
1 Parent(s): e3804b1

debisoft/mistral-7b-thinking-function_calling-logic-capturing-V0

Browse files
adapter_config.json CHANGED
@@ -24,15 +24,15 @@
24
  "rank_pattern": {},
25
  "revision": null,
26
  "target_modules": [
 
 
 
27
  "q_proj",
28
  "gate_proj",
29
- "lm_head",
30
- "o_proj",
31
- "embed_tokens",
32
  "down_proj",
33
- "v_proj",
34
- "k_proj",
35
- "up_proj"
36
  ],
37
  "task_type": "CAUSAL_LM",
38
  "trainable_token_indices": null,
 
24
  "rank_pattern": {},
25
  "revision": null,
26
  "target_modules": [
27
+ "k_proj",
28
+ "o_proj",
29
+ "v_proj",
30
  "q_proj",
31
  "gate_proj",
 
 
 
32
  "down_proj",
33
+ "lm_head",
34
+ "up_proj",
35
+ "embed_tokens"
36
  ],
37
  "task_type": "CAUSAL_LM",
38
  "trainable_token_indices": null,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2547fc59c1e2d341a3d803ab256bc98c8f8c3ed38e9a12ff6cfcd5ad02b8d77c
3
  size 707227664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07588c828f8b358dcdae915944471c483c1be5e214af9dd4eb4f01937f67505c
3
  size 707227664
runs/Jun03_17-12-52_ada-B550-AORUS-ELITE/events.out.tfevents.1748941981.ada-B550-AORUS-ELITE ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97c3f3c2b8bb85b499d276e07610a34d3f987633a5348e1b41598246ff3420b8
3
+ size 47420
special_tokens_map.json CHANGED
@@ -18,7 +18,13 @@
18
  "rstrip": false,
19
  "single_word": false
20
  },
21
- "eos_token": "<eos>",
 
 
 
 
 
 
22
  "pad_token": {
23
  "content": "<pad>",
24
  "lstrip": false,
 
18
  "rstrip": false,
19
  "single_word": false
20
  },
21
+ "eos_token": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false
27
+ },
28
  "pad_token": {
29
  "content": "<pad>",
30
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -6267,7 +6267,7 @@
6267
  "bos_token": "<s>",
6268
  "chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{{ '<start_of_turn>' + message['role'] + '\n' + message['content'] | trim + '<end_of_turn><eos>\n' }}{% endfor %}{% if add_generation_prompt %}{{'<start_of_turn>model\n'}}{% endif %}",
6269
  "clean_up_tokenization_spaces": false,
6270
- "eos_token": "<eos>",
6271
  "extra_special_tokens": {},
6272
  "legacy": false,
6273
  "model_max_length": 1000000000000000019884624838656,
 
6267
  "bos_token": "<s>",
6268
  "chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{{ '<start_of_turn>' + message['role'] + '\n' + message['content'] | trim + '<end_of_turn><eos>\n' }}{% endfor %}{% if add_generation_prompt %}{{'<start_of_turn>model\n'}}{% endif %}",
6269
  "clean_up_tokenization_spaces": false,
6270
+ "eos_token": "</s>",
6271
  "extra_special_tokens": {},
6272
  "legacy": false,
6273
  "model_max_length": 1000000000000000019884624838656,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e050d88ea9cff38509f38f0a87f8864bd3e02ed7c0c21537e2938309dd9bbf8f
3
  size 5688
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a33f7f031d60eb405b2714a7fa15333360b85a1eeb11b92c1ad6103c937965a
3
  size 5688