shouray commited on
Commit
aa1dcfd
·
verified ·
1 Parent(s): 8ddd2ca

shouray/Condition-Model

Browse files
Files changed (21) hide show
  1. README.md +82 -0
  2. adapter_config.json +31 -0
  3. adapter_model.safetensors +3 -0
  4. runs/Jun14_15-03-14_6574e26d2cdf/events.out.tfevents.1718377616.6574e26d2cdf.1380.0 +3 -0
  5. runs/Jun17_23-57-11_3a79ca44463a/events.out.tfevents.1718668633.3a79ca44463a.318.1 +3 -0
  6. runs/Jun17_23-58-31_3a79ca44463a/events.out.tfevents.1718668712.3a79ca44463a.318.2 +3 -0
  7. runs/Jun18_00-00-11_3a79ca44463a/events.out.tfevents.1718668812.3a79ca44463a.318.3 +3 -0
  8. runs/Jun18_00-00-28_3a79ca44463a/events.out.tfevents.1718668829.3a79ca44463a.318.4 +3 -0
  9. runs/Jun18_00-02-31_3a79ca44463a/events.out.tfevents.1718668952.3a79ca44463a.318.5 +3 -0
  10. runs/Jun18_00-03-20_3a79ca44463a/events.out.tfevents.1718669000.3a79ca44463a.318.6 +3 -0
  11. runs/Jun18_00-06-59_3a79ca44463a/events.out.tfevents.1718669220.3a79ca44463a.318.7 +3 -0
  12. runs/Jun18_20-00-42_cc2260668183/events.out.tfevents.1718740843.cc2260668183.461.1 +3 -0
  13. runs/Jun18_20-04-17_cc2260668183/events.out.tfevents.1718741058.cc2260668183.461.2 +3 -0
  14. runs/Jun18_20-05-29_cc2260668183/events.out.tfevents.1718741129.cc2260668183.461.3 +3 -0
  15. runs/Jun18_20-06-24_cc2260668183/events.out.tfevents.1718741185.cc2260668183.461.4 +3 -0
  16. runs/Jun18_20-10-43_cc2260668183/events.out.tfevents.1718741444.cc2260668183.461.5 +3 -0
  17. runs/Jun18_20-14-05_cc2260668183/events.out.tfevents.1718741645.cc2260668183.461.6 +3 -0
  18. runs/Jun18_20-14-14_cc2260668183/events.out.tfevents.1718741655.cc2260668183.461.7 +3 -0
  19. runs/Jun18_20-14-20_cc2260668183/events.out.tfevents.1718741660.cc2260668183.461.8 +3 -0
  20. runs/Jun18_20-15-38_cc2260668183/events.out.tfevents.1718741738.cc2260668183.461.9 +3 -0
  21. training_args.bin +3 -0
README.md ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: llama2
3
+ library_name: peft
4
+ tags:
5
+ - generated_from_trainer
6
+ base_model: TheBloke/Llama-2-13B-chat-GPTQ
7
+ model-index:
8
+ - name: qlora
9
+ results: []
10
+ ---
11
+
12
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
13
+ should probably proofread and complete it, then remove this comment. -->
14
+
15
+ # qlora
16
+
17
+ This model is a fine-tuned version of [TheBloke/Llama-2-13B-chat-GPTQ](https://huggingface.co/TheBloke/Llama-2-13B-chat-GPTQ) on an unknown dataset.
18
+ It achieves the following results on the evaluation set:
19
+ - Loss: 0.2281
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 0.0002
39
+ - train_batch_size: 4
40
+ - eval_batch_size: 4
41
+ - seed: 42
42
+ - gradient_accumulation_steps: 4
43
+ - total_train_batch_size: 16
44
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
45
+ - lr_scheduler_type: linear
46
+ - lr_scheduler_warmup_steps: 2
47
+ - training_steps: 30
48
+ - mixed_precision_training: Native AMP
49
+
50
+ ### Training results
51
+
52
+ | Training Loss | Epoch | Step | Validation Loss |
53
+ |:-------------:|:-----:|:----:|:---------------:|
54
+ | 1.7256 | 1.0 | 1 | 2.3492 |
55
+ | 0.8422 | 2.0 | 3 | 2.0594 |
56
+ | 0.7278 | 3.0 | 5 | 1.6949 |
57
+ | 1.2299 | 4.0 | 6 | 1.5504 |
58
+ | 1.1219 | 5.0 | 7 | 1.4207 |
59
+ | 0.4884 | 6.0 | 9 | 1.2156 |
60
+ | 0.4229 | 7.0 | 11 | 0.9934 |
61
+ | 0.7082 | 8.0 | 12 | 0.8837 |
62
+ | 0.6263 | 9.0 | 13 | 0.7754 |
63
+ | 0.2502 | 10.0 | 15 | 0.5958 |
64
+ | 0.1915 | 11.0 | 17 | 0.4347 |
65
+ | 0.2841 | 12.0 | 18 | 0.3766 |
66
+ | 0.2395 | 13.0 | 19 | 0.3339 |
67
+ | 0.0959 | 14.0 | 21 | 0.2914 |
68
+ | 0.0808 | 15.0 | 23 | 0.2631 |
69
+ | 0.1391 | 16.0 | 24 | 0.2533 |
70
+ | 0.13 | 17.0 | 25 | 0.2434 |
71
+ | 0.0589 | 18.0 | 27 | 0.2357 |
72
+ | 0.0558 | 19.0 | 29 | 0.2298 |
73
+ | 0.1076 | 20.0 | 30 | 0.2281 |
74
+
75
+
76
+ ### Framework versions
77
+
78
+ - PEFT 0.11.1
79
+ - Transformers 4.41.2
80
+ - Pytorch 2.1.0+cu121
81
+ - Datasets 2.20.0
82
+ - Tokenizers 0.19.1
adapter_config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "TheBloke/Llama-2-13B-chat-GPTQ",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layer_replication": null,
10
+ "layers_pattern": null,
11
+ "layers_to_transform": null,
12
+ "loftq_config": {},
13
+ "lora_alpha": 32,
14
+ "lora_dropout": 0.05,
15
+ "megatron_config": null,
16
+ "megatron_core": "megatron.core",
17
+ "modules_to_save": null,
18
+ "peft_type": "LORA",
19
+ "r": 8,
20
+ "rank_pattern": {},
21
+ "revision": null,
22
+ "target_modules": [
23
+ "k_proj",
24
+ "q_proj",
25
+ "o_proj",
26
+ "v_proj"
27
+ ],
28
+ "task_type": "CAUSAL_LM",
29
+ "use_dora": false,
30
+ "use_rslora": false
31
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:32cb0c34b918a098f8049525bd83fd8078a5f2306024537195072170e79b57fd
3
+ size 52471504
runs/Jun14_15-03-14_6574e26d2cdf/events.out.tfevents.1718377616.6574e26d2cdf.1380.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04b06ee59affdc2a1d31340d635755269d281d9b4543f7894d102e20ae5ca1fa
3
+ size 5771
runs/Jun17_23-57-11_3a79ca44463a/events.out.tfevents.1718668633.3a79ca44463a.318.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99065e99fba4f9fc1ffdaeb9bb6782798827225a49a47c2e5b9a78943f8f7bcc
3
+ size 5564
runs/Jun17_23-58-31_3a79ca44463a/events.out.tfevents.1718668712.3a79ca44463a.318.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00b5100508fd5afa0a0f4ee9570fb4904409021b1f5094e117381fe7912c130c
3
+ size 5564
runs/Jun18_00-00-11_3a79ca44463a/events.out.tfevents.1718668812.3a79ca44463a.318.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af856664c95bce799efd6b40f5cd25056dd1b2a7bce1d879a2d50157d8cb0dc4
3
+ size 5564
runs/Jun18_00-00-28_3a79ca44463a/events.out.tfevents.1718668829.3a79ca44463a.318.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd0753e6799370d304bf56fd428a5a15f878c86eba217311085b29d6b2ca051f
3
+ size 5564
runs/Jun18_00-02-31_3a79ca44463a/events.out.tfevents.1718668952.3a79ca44463a.318.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c6e4b4adc1b3cf8b571fe8fda0335317eb9206ade3a065f08fe7c52be535837
3
+ size 5564
runs/Jun18_00-03-20_3a79ca44463a/events.out.tfevents.1718669000.3a79ca44463a.318.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a5e680700c3db4c5315aca62baf81ddc42659a002dad15872eb6deee52afcefa
3
+ size 5771
runs/Jun18_00-06-59_3a79ca44463a/events.out.tfevents.1718669220.3a79ca44463a.318.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90112226e94d8cda0fde6a1f0c9e91d1015485cadf96786e5dd17995550bb5b3
3
+ size 9223
runs/Jun18_20-00-42_cc2260668183/events.out.tfevents.1718740843.cc2260668183.461.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b7ae18c73a9a449285ed3e683a3e9f472cef8435bd2bffd7e6f93c5e75f3755
3
+ size 5564
runs/Jun18_20-04-17_cc2260668183/events.out.tfevents.1718741058.cc2260668183.461.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a044863d535b97cab75fa2ff6682204dfe393b8587a72ee4205fb049f1a75182
3
+ size 5564
runs/Jun18_20-05-29_cc2260668183/events.out.tfevents.1718741129.cc2260668183.461.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0100e1bfc5be4654a83955324f8964a6d062a32568aba88f8d4f02a4d7281123
3
+ size 5564
runs/Jun18_20-06-24_cc2260668183/events.out.tfevents.1718741185.cc2260668183.461.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c66b6376c9e73f4d350fcd2e9776cdd16f631da712f2e4a68b2919d22304e183
3
+ size 5564
runs/Jun18_20-10-43_cc2260668183/events.out.tfevents.1718741444.cc2260668183.461.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56f54a7b40de9edcbd0a6d221d352313343ef3ec8d45ef723cc3b57228c5b601
3
+ size 5564
runs/Jun18_20-14-05_cc2260668183/events.out.tfevents.1718741645.cc2260668183.461.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8530960a042572599d911591f721267c89aa1ba350144ff3ea3ca3694f7d3fe
3
+ size 5564
runs/Jun18_20-14-14_cc2260668183/events.out.tfevents.1718741655.cc2260668183.461.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29c3853327c10c263ac858aa1773533761ab0f8b36bdc0f9132634c1aa3b2236
3
+ size 5564
runs/Jun18_20-14-20_cc2260668183/events.out.tfevents.1718741660.cc2260668183.461.8 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fefd3e377b10198bbcb3c01ecb1beb6466623af1c1341d19074ba57d31a3070
3
+ size 5564
runs/Jun18_20-15-38_cc2260668183/events.out.tfevents.1718741738.cc2260668183.461.9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f978b6aa0c784faf4f2453f173543aa924edf140575ec48add3cce1cec0b6681
3
+ size 15372
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c87801c6747a4cd159adbfe4f2017b4086ce356a7f6327f2e2eb3f5e8fd66427
3
+ size 5112