sshetty commited on
Commit
f977164
1 Parent(s): f6a0a2b

Upload folder using huggingface_hub

Browse files
adapter_config.json CHANGED
@@ -11,7 +11,7 @@
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
  "lora_alpha": 32,
14
- "lora_dropout": 0.03,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": [
@@ -19,15 +19,18 @@
19
  "embed_tokens"
20
  ],
21
  "peft_type": "LORA",
22
- "r": 128,
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
- "down_proj",
27
- "qkv_proj",
28
- "o_proj",
29
  "ffn",
30
- "gate_up_proj"
 
 
 
 
 
31
  ],
32
  "task_type": "CAUSAL_LM",
33
  "use_dora": false,
 
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
  "lora_alpha": 32,
14
+ "lora_dropout": 0.04,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": [
 
19
  "embed_tokens"
20
  ],
21
  "peft_type": "LORA",
22
+ "r": 64,
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
+ "up_proj",
 
 
27
  "ffn",
28
+ "v_proj",
29
+ "k_proj",
30
+ "o_proj",
31
+ "q_proj",
32
+ "down_proj",
33
+ "gate_proj"
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dd8e321d57a854164962906438f340a1e8a6b88c1a2be4d4e78ac54093b4532a
3
- size 1593347088
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33d0d0edb6b4262c48debb3697f3df64b3798a79851e3c25022a2851f13d8b34
3
+ size 930628768
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:44f35ee3d7cbd61eb1e008919e1c3944dcb609ae12f3499f4159446d5ddd698f
3
  size 5368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57d38cf52e7a7481fc8fe963b55940e08a750105f3536c472c41f20b0f09a93c
3
  size 5368