UphamProjects commited on
Commit
58e1880
·
verified ·
1 Parent(s): 5abacc2

Training in progress, step 10

Browse files
adapter_config.json CHANGED
@@ -20,10 +20,10 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "x_proj",
24
- "embeddings",
25
  "out_proj",
26
- "in_proj"
 
 
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
23
  "out_proj",
24
+ "in_proj",
25
+ "x_proj",
26
+ "embeddings"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6969a482e01275ddd19f55e6644f8801cde7ae24bfab5e398dfad2c1bea50f22
3
  size 27307856
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a221a3a119031e65586cdb93918249ba0e79472acdd9e00d57ad6fd4f0d47bc
3
  size 27307856
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:08960d36bc4dafad4f33c9664d9af67d84c5dc42279d2d944fdc6eaf64846cb1
3
  size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:556f4fb1955bf350eb1f8abaf36a7da0abc5e5605f36b37548f0f6fa2f04c041
3
  size 4920