maedehm02 commited on
Commit
3a7c9f2
·
verified ·
1 Parent(s): 00b94e5

Upload model

Browse files
Files changed (2) hide show
  1. adapter_config.json +8 -5
  2. adapter_model.safetensors +2 -2
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "codellama/CodeLlama-7b-Instruct-hf",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -15,14 +15,17 @@
15
  "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
- "r": 64,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "q_proj",
23
- "o_proj",
24
  "k_proj",
25
- "v_proj"
 
 
 
 
 
26
  ],
27
  "task_type": "CAUSAL_LM",
28
  "use_rslora": false
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "google/codegemma-7b-it",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
15
  "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
+ "r": 16,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
 
22
  "k_proj",
23
+ "up_proj",
24
+ "o_proj",
25
+ "gate_proj",
26
+ "down_proj",
27
+ "v_proj",
28
+ "q_proj"
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7c87cdc0c2dbc92e3b7551c8c0f8749b3004f5f2f6e97cbed7f50e781eefee77
3
- size 268470272
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab07bf583af4377c08e23887611ad78ebf9c693a3d38552b8ee7d48017c81687
3
+ size 3345821320