Wimplex commited on
Commit
85ae73c
·
1 Parent(s): 454e17f

Training in progress, step 2000

Browse files
Files changed (3) hide show
  1. adapter_config.json +5 -5
  2. adapter_model.bin +2 -2
  3. training_args.bin +1 -1
adapter_config.json CHANGED
@@ -1,21 +1,21 @@
1
  {
2
  "auto_mapping": null,
3
- "base_model_name_or_path": "EleutherAI/pythia-70m",
4
  "bias": "none",
5
  "fan_in_fan_out": false,
6
  "inference_mode": true,
7
  "init_lora_weights": true,
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
- "lora_alpha": 32,
11
  "lora_dropout": 0.05,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
- "r": 64,
15
  "revision": null,
16
  "target_modules": [
17
- "query_key_value",
18
- "dense"
19
  ],
20
  "task_type": "CAUSAL_LM"
21
  }
 
1
  {
2
  "auto_mapping": null,
3
+ "base_model_name_or_path": "microsoft/phi-1_5",
4
  "bias": "none",
5
  "fan_in_fan_out": false,
6
  "inference_mode": true,
7
  "init_lora_weights": true,
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
+ "lora_alpha": 16,
11
  "lora_dropout": 0.05,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
+ "r": 16,
15
  "revision": null,
16
  "target_modules": [
17
+ "Wqkv",
18
+ "out_proj"
19
  ],
20
  "task_type": "CAUSAL_LM"
21
  }
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1efee0c1c3eef8f6a45560e517e1ca09e43ea7c74c1880a6d2bd35356f34c65c
3
- size 4727225
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9be13e33d54a0cbb570af1eaf02c154a136f7c4f620a98a9021dc2aafe075e35
3
+ size 18907665
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:181354fcea9953a4a4f14b9101d6e42b5eb3140677d5373a41346b51f6989ff2
3
  size 4027
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11dcb1892b19ac42695f0897ebd2b7fcdcfd970bc5528d54b38fb713da59a219
3
  size 4027