Delta-Vector commited on
Commit
40a2b74
·
verified ·
1 Parent(s): 813badc

Create gemma3.yml

Browse files
Files changed (1) hide show
  1. gemma3.yml +128 -0
gemma3.yml ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_model: unsloth/gemma-3-12b-it
2
+ model_type: AutoModelForCausalLM
3
+ tokenizer_type: AutoTokenizer
4
+
5
+ load_in_8bit: false
6
+ load_in_4bit: false
7
+ strict: false
8
+
9
+ datasets:
10
+ - path: NewEden/LIMARP-Complexity
11
+ type: chat_template
12
+ roles_to_train: ["gpt"]
13
+ field_messages: conversations
14
+ message_field_role: from
15
+ message_field_content: value
16
+ train_on_eos: turn
17
+ - path: NewEden/Creative_Writing-Complexity
18
+ type: chat_template
19
+ roles_to_train: ["gpt"]
20
+ field_messages: conversations
21
+ message_field_role: from
22
+ message_field_content: value
23
+ train_on_eos: turn
24
+ - path: NewEden/wata-Oshi-No-prefix
25
+ type: chat_template
26
+ roles_to_train: ["gpt"]
27
+ field_messages: conversations
28
+ message_field_role: from
29
+ message_field_content: value
30
+ train_on_eos: turn
31
+ - path: NewEden/No-Prefix-LN
32
+ type: chat_template
33
+ roles_to_train: ["gpt"]
34
+ field_messages: conversations
35
+ message_field_role: from
36
+ message_field_content: value
37
+ train_on_eos: turn
38
+ - path: NewEden/Storium-No-Prefix
39
+ type: chat_template
40
+ roles_to_train: ["gpt"]
41
+ field_messages: conversations
42
+ message_field_role: from
43
+ message_field_content: value
44
+ train_on_eos: turn
45
+ - path: NewEden/PIPPA-Mega-Filtered
46
+ type: chat_template
47
+ roles_to_train: ["gpt"]
48
+ field_messages: conversations
49
+ message_field_role: from
50
+ message_field_content: value
51
+ train_on_eos: turn
52
+ - path: NewEden/OpenCAI-ShareGPT
53
+ type: chat_template
54
+ roles_to_train: ["gpt"]
55
+ field_messages: conversations
56
+ message_field_role: from
57
+ message_field_content: value
58
+ train_on_eos: turn
59
+ val_set_size: 0.01
60
+ output_dir: ./outputs
61
+
62
+ sequence_len: 16384
63
+ sample_packing: true
64
+ eval_sample_packing: false
65
+ pad_to_sequence_len: true
66
+
67
+
68
+ plugins:
69
+ - axolotl.integrations.liger.LigerPlugin
70
+ - axolotl.integrations.cut_cross_entropy.CutCrossEntropyPlugin
71
+ liger_rope: true
72
+ liger_rms_norm: true
73
+ liger_glu_activation: true
74
+ liger_fused_linear_cross_entropy: false
75
+ cut_cross_entropy: true
76
+ optimizer: apollo_adamw
77
+ optim_args: proj=random,rank=1,scale=128.0,scale_type=tensor,update_proj_gap=200
78
+ optim_target_modules:
79
+ - .*.attn.*
80
+ - .*.mlp.*
81
+ gradient_checkpointing: unsloth
82
+ flash_attention: true
83
+
84
+ adapter: lora
85
+ lora_model_dir:
86
+ lora_r: 64
87
+ lora_alpha: 32
88
+ lora_dropout: 0.1
89
+ lora_target_modules:
90
+ - gate_proj
91
+ - down_proj
92
+ - up_proj
93
+ - q_proj
94
+ - v_proj
95
+ - k_proj
96
+ - o_proj
97
+
98
+ gradient_accumulation_steps: 2
99
+ micro_batch_size: 1
100
+ num_epochs: 4
101
+ lr_scheduler: rex
102
+ learning_rate: 1e-5
103
+ weight_decay: 0.02
104
+
105
+ train_on_inputs: false
106
+ group_by_length: false
107
+ bf16: auto
108
+ fp16:
109
+ tf32: true
110
+
111
+ early_stopping_patience:
112
+ resume_from_checkpoint:
113
+ #auto_resume_from_checkpoints: true
114
+ local_rank:
115
+ logging_steps: 1
116
+ xformers_attention:
117
+ flash_attention: true
118
+
119
+ warmup_steps: 35
120
+ evals_per_epoch: 4
121
+ eval_table_size:
122
+ eval_max_new_tokens: 128
123
+ saves_per_epoch: 1
124
+
125
+ debug:
126
+ deepspeed:
127
+ fsdp:
128
+ fsdp_config: