cmwalker00 commited on
Commit
b479161
·
1 Parent(s): e32e913

Training in progress, epoch 0

Browse files
Files changed (3) hide show
  1. adapter_config.json +1 -1
  2. adapter_model.bin +1 -1
  3. training_args.bin +1 -1
adapter_config.json CHANGED
@@ -8,7 +8,7 @@
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
  "lora_alpha": 16,
11
- "lora_dropout": 0.075,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
  "r": 16,
 
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
  "lora_alpha": 16,
11
+ "lora_dropout": 0.07,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
  "r": 16,
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:48d6a5dad0906fc36b94cfc388f06597a5e2f60f2ac83dd53815b24e4e38cb86
3
  size 18908110
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87e993893a78685878d359d66756ba7e6a768bb1a8049abda1f2314e3343d324
3
  size 18908110
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:78954ab6cda0b5d84de63e09cd6d9ae88ae95b6082f8041ab7644133f6ec4af9
3
  size 4536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16dda20bf989c80127767e22459d516cbf15eaaaf61338bffd6c23e4693e1c59
3
  size 4536