Hyeonseo commited on
Commit
286fe86
1 Parent(s): aed8ace

Training in progress, epoch 1

Browse files
adapter_config.json CHANGED
@@ -20,13 +20,13 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "up_proj",
24
- "down_proj",
25
  "k_proj",
 
26
  "o_proj",
27
  "gate_proj",
28
- "v_proj",
29
- "q_proj"
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
23
  "k_proj",
24
+ "v_proj",
25
  "o_proj",
26
  "gate_proj",
27
+ "q_proj",
28
+ "up_proj",
29
+ "down_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4cbd3339a61a558732ac86bea40109f12e2d59a50d3fc2c082c927b3dcb302aa
3
  size 864368280
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ba1a97d7a98857db36ac4fd747a223d617f3b48ddf15d98b6807540920c59fa
3
  size 864368280
runs/Sep07_13-12-59_82d160be2f24/events.out.tfevents.1725714781.82d160be2f24.5931.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b533ff648635ca95ba4e599a3fae05b286ab8c44bbd9ee46fb0a2b92b496184d
3
+ size 9877
runs/Sep07_13-25-06_82d160be2f24/events.out.tfevents.1725715508.82d160be2f24.10496.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c45094c8bdb3d9231675cc79ae7fbe69f6aab9b3928eb6fb6bb41c114e18bb54
3
+ size 11824
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f28bd39abb26b2ab2a5de32f49af3c2a942b2caf79937c01ac8df6cd34861d16
3
  size 5560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79dfb5f26dd7c0e63e33f113a0c830cb3583e6bb1da169304845e13137663643
3
  size 5560