jtatman commited on
Commit
74f3d29
1 Parent(s): 9c93d22

Training in progress, step 200

Browse files
adapter_config.json CHANGED
@@ -10,17 +10,23 @@
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
- "lora_alpha": 64,
14
  "lora_dropout": 0.05,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
- "modules_to_save": null,
 
 
 
18
  "peft_type": "LORA",
19
  "r": 16,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "query_key_value"
 
 
 
24
  ],
25
  "task_type": "CAUSAL_LM",
26
  "use_dora": false,
 
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
+ "lora_alpha": 32,
14
  "lora_dropout": 0.05,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
+ "modules_to_save": [
18
+ "embed_tokens",
19
+ "lm_head"
20
+ ],
21
  "peft_type": "LORA",
22
  "r": 16,
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
+ "query_key_value",
27
+ "dense_4h_to_h",
28
+ "dense_h_to_4h",
29
+ "dense"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d86b6ad3ea44f94f57c888c35dd39d6be9acf8746b1ee81a891f280d1314a6ac
3
- size 1183112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9cfbf85a88bd132dbca599b2801ab4e81ee9c388aa3c028c3b7815f2765f6a87
3
+ size 4731832
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3723e0d4a88248bbec20abffa6f8cb683c5a4be32d58d3d573094a8e77b0f61d
3
  size 6072
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8807c1d78d4dc6797f28372f858f1bc8000fbf68ee4085845fa850083e832b42
3
  size 6072