phungkhaccuong commited on
Commit
2848ca5
·
verified ·
1 Parent(s): 5d7f8bf

Training in progress, step 13

Browse files
adapter_config.json CHANGED
@@ -20,10 +20,10 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
23
  "dense",
24
  "dense_h_to_4h",
25
- "query_key_value",
26
- "dense_4h_to_h"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "dense_4h_to_h",
24
  "dense",
25
  "dense_h_to_4h",
26
+ "query_key_value"
 
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9546b89e837ef6d13d8fb91618c1723b8f5d2e3ee5b32fdd737230b6f1b7dd50
3
  size 25192496
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e605cbd8dca2f289223b1144394cdea6eae0eb1991cd394a526331ed83717976
3
  size 25192496
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1f41e47fc02200d27e4cf05e5defb04ecf6266755d942dd800526cbdd2b2c060
3
  size 6776
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf48085f8c9bd17c0613d7c6531743e17c4d3cf9ce00666044be4b86bdff4f89
3
  size 6776