Delta-Vector commited on
Commit
7bfa786
·
verified ·
1 Parent(s): 6e671ea

Upload 32b.yml

Browse files
Files changed (1) hide show
  1. 32b.yml +104 -0
32b.yml ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_model: NewEden/Hamanasu-32B-V1
2
+ model_type: AutoModelForCausalLM
3
+ tokenizer_type: AutoTokenizer
4
+
5
+ hub_model_id: NewEden/Hamanasu-Lora-Instruct
6
+ hub_strategy: "all_checkpoints"
7
+ push_dataset_to_hub:
8
+ hf_use_auth_token: true
9
+
10
+ plugins:
11
+ - axolotl.integrations.liger.LigerPlugin
12
+ liger_rope: true
13
+ liger_rms_norm: true
14
+ liger_swiglu: true
15
+ liger_fused_linear_cross_entropy: true
16
+
17
+ load_in_8bit: false
18
+ load_in_4bit: false
19
+ strict: false
20
+
21
+ datasets:
22
+ - path: NewEden/Hydrus-R1-Thinking-Sharegpt
23
+ type: dan-chat-advanced
24
+ - path: PocketDoc/Dans-MemoryCore-CoreCurriculum-Small
25
+ type: dan-chat-advanced
26
+ - path: Nitral-AI/ARES-ShareGPT
27
+ type: dan-chat-advanced
28
+ - path: NewEden/Hydrus-HelpSteer2
29
+ type: dan-chat-advanced
30
+ - path: PocketDoc/Dans-Codemaxx-CodeFeedback-Conversations
31
+ type: dan-chat-advanced
32
+ - path: PocketDoc/Dans-Toolmaxx-Agent
33
+ type: dan-chat-advanced
34
+ - path: PocketDoc/Dans-Assistantmaxx-Tulu3-IF
35
+ type: dan-chat-advancedd
36
+ - path: NewEden/Hydrus-SonnetOrca
37
+ type: dan-chat-advanced
38
+ - path: NewEden/Hydrus-Chat_error-Pure-Dove-sharegpt
39
+ type: dan-chat-advanced
40
+ - path: NewEden/No_Robots-R1-Filtered
41
+ type: dan-chat-advanced
42
+ - path: NewEden/GSM8K-R1-filtered
43
+ type: dan-chat-advanced
44
+
45
+ dataset_prepared_path: prepared_data
46
+ val_set_size: 0.0
47
+ output_dir: ./qwq-inst
48
+
49
+ sequence_len: 16384
50
+ sample_packing: true
51
+ pad_to_sequence_len: true
52
+
53
+ adapter: lora
54
+ lora_model_dir:
55
+ lora_r: 128
56
+ lora_alpha: 16
57
+ lora_dropout: 0.05
58
+ lora_target_modules:
59
+ - gate_proj
60
+ - down_proj
61
+ - up_proj
62
+ - q_proj
63
+ - v_proj
64
+ - k_proj
65
+ - o_proj
66
+
67
+ wandb_project: qwq
68
+ wandb_entity:
69
+ wandb_watch:
70
+ wandb_name: instruct-attempt-01
71
+ wandb_log_model:
72
+
73
+ gradient_accumulation_steps: 2
74
+ micro_batch_size: 1
75
+ num_epochs: 2
76
+ optimizer: paged_adamw_8bit
77
+ lr_scheduler: cosine
78
+ learning_rate: 2.85e-5
79
+
80
+ train_on_inputs: false
81
+ group_by_length: false
82
+ bf16: auto
83
+ fp16:
84
+ tf32: false
85
+
86
+ gradient_checkpointing: true
87
+ early_stopping_patience:
88
+ resume_from_checkpoint:
89
+ local_rank:
90
+ logging_steps: 1
91
+ xformers_attention:
92
+ flash_attention: true
93
+
94
+ warmup_steps: 40
95
+ evals_per_epoch:
96
+ eval_table_size:
97
+ eval_max_new_tokens:
98
+ saves_per_epoch: 2
99
+ debug:
100
+ deepspeed: deepspeed_configs/zero3_bf16.json
101
+ weight_decay: 0.02
102
+ fsdp:
103
+ fsdp_config:
104
+ special_tokens: