hagelk commited on
Commit
1f4a3b7
·
verified ·
1 Parent(s): 46a4db9

Upload folder using huggingface_hub

Browse files
.locks/models--psd401--llama3-2-1b-Solutions/1580a1e1d09d5cb04074affc7de2586cf98791d5.lock ADDED
File without changes
models--psd401--llama3-2-1b-Solutions/.no_exist/46a4db908150692764409d7facea15df79128598/README.md ADDED
File without changes
models--psd401--llama3-2-1b-Solutions/.no_exist/46a4db908150692764409d7facea15df79128598/adapter_config.json ADDED
File without changes
models--psd401--llama3-2-1b-Solutions/.no_exist/46a4db908150692764409d7facea15df79128598/adapter_model.safetensors ADDED
File without changes
models--psd401--llama3-2-1b-Solutions/.no_exist/46a4db908150692764409d7facea15df79128598/config.json ADDED
File without changes
models--psd401--llama3-2-1b-Solutions/.no_exist/46a4db908150692764409d7facea15df79128598/generation_config.json ADDED
File without changes
models--psd401--llama3-2-1b-Solutions/.no_exist/46a4db908150692764409d7facea15df79128598/special_tokens_map.json ADDED
File without changes
models--psd401--llama3-2-1b-Solutions/.no_exist/46a4db908150692764409d7facea15df79128598/tokenizer.json ADDED
File without changes
models--psd401--llama3-2-1b-Solutions/.no_exist/46a4db908150692764409d7facea15df79128598/tokenizer_config.json ADDED
File without changes
models--psd401--llama3-2-1b-Solutions/.no_exist/46a4db908150692764409d7facea15df79128598/training_args.bin ADDED
File without changes
models--psd401--llama3-2-1b-Solutions/blobs/1580a1e1d09d5cb04074affc7de2586cf98791d5 ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "meta-llama/Llama-3.2-1B-Instruct",
3
+ "project_name": "llama3-2-1b-Solutions",
4
+ "data_path": "psd401/PSD401SolutionsData/data/dataset.jsonl",
5
+ "train_split": "train",
6
+ "valid_split": null,
7
+ "add_eos_token": true,
8
+ "block_size": -1,
9
+ "model_max_length": 2048,
10
+ "padding": "right",
11
+ "trainer": "sft",
12
+ "use_flash_attention_2": false,
13
+ "log": "tensorboard",
14
+ "disable_gradient_checkpointing": false,
15
+ "logging_steps": -1,
16
+ "eval_strategy": "epoch",
17
+ "save_total_limit": 1,
18
+ "auto_find_batch_size": false,
19
+ "mixed_precision": "bf16",
20
+ "lr": 0.00001,
21
+ "epochs": 3,
22
+ "batch_size": 1,
23
+ "warmup_ratio": 0.1,
24
+ "gradient_accumulation": 8,
25
+ "optimizer": "paged_adamw_8bit",
26
+ "scheduler": "cosine",
27
+ "weight_decay": 0.0,
28
+ "max_grad_norm": 1.0,
29
+ "seed": 42,
30
+ "chat_template": "tokenizer",
31
+ "quantization": "int8",
32
+ "target_modules": "all-linear",
33
+ "merge_adapter": true,
34
+ "peft": true,
35
+ "lora_r": 16,
36
+ "lora_alpha": 32,
37
+ "lora_dropout": 0.05,
38
+ "model_ref": null,
39
+ "dpo_beta": 0.1,
40
+ "max_prompt_length": 128,
41
+ "max_completion_length": null,
42
+ "prompt_text_column": null,
43
+ "text_column": "messages",
44
+ "rejected_text_column": null,
45
+ "push_to_hub": true,
46
+ "username": null,
47
+ "token": null,
48
+ "unsloth": false,
49
+ "distributed_backend": null
50
+ }
models--psd401--llama3-2-1b-Solutions/refs/main CHANGED
@@ -1 +1 @@
1
- 0b2a76d7ecedea05b1009b450a47dc843b4b1b53
 
1
+ 46a4db908150692764409d7facea15df79128598
models--psd401--llama3-2-1b-Solutions/snapshots/46a4db908150692764409d7facea15df79128598/training_params.json ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "meta-llama/Llama-3.2-1B-Instruct",
3
+ "project_name": "llama3-2-1b-Solutions",
4
+ "data_path": "psd401/PSD401SolutionsData/data/dataset.jsonl",
5
+ "train_split": "train",
6
+ "valid_split": null,
7
+ "add_eos_token": true,
8
+ "block_size": -1,
9
+ "model_max_length": 2048,
10
+ "padding": "right",
11
+ "trainer": "sft",
12
+ "use_flash_attention_2": false,
13
+ "log": "tensorboard",
14
+ "disable_gradient_checkpointing": false,
15
+ "logging_steps": -1,
16
+ "eval_strategy": "epoch",
17
+ "save_total_limit": 1,
18
+ "auto_find_batch_size": false,
19
+ "mixed_precision": "bf16",
20
+ "lr": 0.00001,
21
+ "epochs": 3,
22
+ "batch_size": 1,
23
+ "warmup_ratio": 0.1,
24
+ "gradient_accumulation": 8,
25
+ "optimizer": "paged_adamw_8bit",
26
+ "scheduler": "cosine",
27
+ "weight_decay": 0.0,
28
+ "max_grad_norm": 1.0,
29
+ "seed": 42,
30
+ "chat_template": "tokenizer",
31
+ "quantization": "int8",
32
+ "target_modules": "all-linear",
33
+ "merge_adapter": true,
34
+ "peft": true,
35
+ "lora_r": 16,
36
+ "lora_alpha": 32,
37
+ "lora_dropout": 0.05,
38
+ "model_ref": null,
39
+ "dpo_beta": 0.1,
40
+ "max_prompt_length": 128,
41
+ "max_completion_length": null,
42
+ "prompt_text_column": null,
43
+ "text_column": "messages",
44
+ "rejected_text_column": null,
45
+ "push_to_hub": true,
46
+ "username": null,
47
+ "token": null,
48
+ "unsloth": false,
49
+ "distributed_backend": null
50
+ }