Delta-Vector commited on
Commit
9587fa7
·
verified ·
1 Parent(s): f3e6cda

Upload axolotl_config_lu170wzt.yml

Browse files
Files changed (1) hide show
  1. axolotl_config_lu170wzt.yml +143 -0
axolotl_config_lu170wzt.yml ADDED
@@ -0,0 +1,143 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## model
2
+ base_model: /home/dgxuser/workspace/Mango/models/Vulpecula
3
+ model_type: AutoModelForCausalLM
4
+ tokenizer_type: AutoTokenizer
5
+
6
+ ## qlora
7
+ load_in_8bit: false
8
+ load_in_4bit: false
9
+ strict: false
10
+ ## Lora
11
+ adapter: lora
12
+ lora_model_dir:
13
+ lora_r: 64
14
+ lora_alpha: 32
15
+ lora_dropout: 0.0
16
+ peft_use_rslora: true
17
+ lora_target_modules:
18
+ lora_mlp_kernel: true
19
+ lora_qkv_kernel: true
20
+ lora_o_kernel: true
21
+ lora_target_linear: true
22
+ #lora_target_modules:
23
+ # - gate_proj
24
+ # - down_proj
25
+ # - up_proj
26
+ # - q_proj
27
+ # - v_proj
28
+ # - k_proj
29
+ # - o_proj
30
+ ## data
31
+ datasets:
32
+ - path: PocketDoc/Dans-Personamaxx-VN
33
+ type: dan-chat-advanced-llama3
34
+ - path: NewEden/LIMARP-Complexity
35
+ type: dan-chat-advanced-llama3
36
+ - path: NewEden/PIPPA-Mega-Filtered
37
+ type: dan-chat-advanced-llama3
38
+ - path: NewEden/OpenCAI-ShareGPT
39
+ type: dan-chat-advanced-llama3
40
+ - path: NewEden/Creative_Writing-Complexity
41
+ type: dan-chat-advanced-llama3
42
+ - path: NewEden/Light-Novels-Roleplay-Logs-Books-Oh-My-duplicate-turns-removed
43
+ type: dan-chat-advanced-llama3
44
+ - path: PocketDoc/Dans-Failuremaxx-Adventure-3
45
+ type: dan-chat-advanced-llama3
46
+ - path: NewEden/Books-V2-ShareGPT
47
+ type: dan-chat-advanced-llama3
48
+ - path: NewEden/Deepseek-V3-RP-Filtered
49
+ type: dan-chat-advanced-llama3
50
+ - path: NewEden/Final-Alpindale-LNs-ShareGPT
51
+ type: dan-chat-advanced-llama3
52
+ - path: NewEden/DeepseekRP-Filtered
53
+ type: dan-chat-advanced-llama3
54
+ - path: NewEden/RP-logs-V2-Experimental
55
+ type: dan-chat-advanced-llama3
56
+ - path: anthracite-org/kalo_opus_misc_240827
57
+ type: dan-chat-advanced-llama3
58
+ - path: anthracite-org/kalo_misc_part2
59
+ type: dan-chat-advanced-llama3
60
+ - path: NewEden/Storium-Prefixed-Clean
61
+ type: dan-chat-advanced-llama3
62
+ shuffle_merged_datasets: true
63
+ dataset_prepared_path: base-dataset_prepared
64
+ val_set_size: 0.0
65
+ output_dir: ./SFT-Vulpecula
66
+
67
+ ## Liger + CCE
68
+ plugins:
69
+ - axolotl.integrations.liger.LigerPlugin
70
+ - axolotl.integrations.cut_cross_entropy.CutCrossEntropyPlugin
71
+ liger_rope: true
72
+ liger_rms_norm: true
73
+ liger_layer_norm: true
74
+ liger_glu_activation: true
75
+ liger_fused_linear_cross_entropy: false
76
+ cut_cross_entropy: true
77
+
78
+ ## CTX settings
79
+ sequence_len: 8192
80
+ sample_packing: true
81
+ eval_sample_packing: false
82
+ pad_to_sequence_len: true
83
+
84
+ ## WandB
85
+ wandb_project: Francois
86
+ wandb_entity:
87
+ wandb_watch:
88
+ wandb_name: Big_boy
89
+ wandb_log_model:
90
+
91
+ ## evals
92
+ #evals_per_epoch: 4
93
+ #eval_table_size:
94
+ #eval_max_new_tokens: 128
95
+
96
+ ## hparams
97
+ gradient_accumulation_steps: 3
98
+ micro_batch_size: 3
99
+ num_epochs: 2
100
+ optimizer: adamw_bnb_8bit
101
+ lr_scheduler: cosine
102
+ learning_rate: 2e-5
103
+ warmup_steps: 50
104
+ weight_decay: 0.0025
105
+ ## max grad norm
106
+ max_grad_norm: 1.0
107
+
108
+ train_on_inputs: false
109
+ group_by_length: false
110
+ bf16: auto
111
+ fp16:
112
+ tf32: false
113
+
114
+ gradient_checkpointing: true
115
+ early_stopping_patience:
116
+ resume_from_checkpoint:
117
+ local_rank:
118
+ logging_steps: 1
119
+ xformers_attention:
120
+ flash_attention: true
121
+ s2_attention:
122
+ saves_per_epoch: 2
123
+ debug:
124
+ deepspeed: ./deepspeed_configs/zero3_bf16.json
125
+
126
+ #fsdp:
127
+ # - full_shard
128
+ # - auto_wrap
129
+ #fsdp_config:
130
+ # fsdp_limit_all_gathers: true
131
+ # fsdp_sync_module_states: true
132
+ # fsdp_offload_params: true
133
+ # fsdp_activation_checkpointing: true
134
+ # fsdp_use_orig_params: false
135
+ # fsdp_cpu_ram_efficient_loading: true
136
+ # fsdp_auto_wrap_policy: TRANSFORMER_BASED_WRAP
137
+ # fsdp_transformer_layer_cls_to_wrap: LlamaDecoderLayer
138
+ # fsdp_state_dict_type: FULL_STATE_DICT
139
+ # fsdp_sharding_strategy: FULL_SHARD
140
+ special_tokens:
141
+ pad_token: <|finetune_right_pad_id|>
142
+ eos_token: <|eot_id|>
143
+