samiur-r commited on
Commit
de39ba5
1 Parent(s): ca81643

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -1,5 +1,5 @@
1
  ---
2
- base_model: unsloth/llama-3-8b-instruct-bnb-4bit
3
  library_name: peft
4
  ---
5
 
 
1
  ---
2
+ base_model: unsloth/llama-3-8b-bnb-4bit
3
  library_name: peft
4
  ---
5
 
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "unsloth/llama-3-8b-instruct-bnb-4bit",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -20,13 +20,13 @@
20
  "rank_pattern": {},
21
  "revision": "unsloth",
22
  "target_modules": [
23
- "up_proj",
24
- "k_proj",
25
- "down_proj",
26
  "v_proj",
27
  "o_proj",
28
- "q_proj",
29
- "gate_proj"
 
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "unsloth/llama-3-8b-bnb-4bit",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
20
  "rank_pattern": {},
21
  "revision": "unsloth",
22
  "target_modules": [
23
+ "q_proj",
 
 
24
  "v_proj",
25
  "o_proj",
26
+ "k_proj",
27
+ "gate_proj",
28
+ "down_proj",
29
+ "up_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8a587dd2c60be5a0ab8e33d199b581f143c112c608bb65b09bd07df0d88445ef
3
  size 167832240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a42b86a7d2759bbc563a27b9a0fdedf3f4fba753fa0cf4286568b42d6fcb28ef
3
  size 167832240
special_tokens_map.json CHANGED
@@ -7,7 +7,7 @@
7
  "single_word": false
8
  },
9
  "eos_token": {
10
- "content": "<|eot_id|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
 
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "<|end_of_text|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
tokenizer_config.json CHANGED
@@ -2050,9 +2050,8 @@
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
2053
- "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}",
2054
  "clean_up_tokenization_spaces": true,
2055
- "eos_token": "<|eot_id|>",
2056
  "model_input_names": [
2057
  "input_ids",
2058
  "attention_mask"
 
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
 
2053
  "clean_up_tokenization_spaces": true,
2054
+ "eos_token": "<|end_of_text|>",
2055
  "model_input_names": [
2056
  "input_ids",
2057
  "attention_mask"