Delta-Vector commited on
Commit
0455da7
·
verified ·
1 Parent(s): 258b2c9

Upload chat.yml

Browse files
Files changed (1) hide show
  1. chat.yml +108 -0
chat.yml ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## model
2
+ base_model: NewEden/Hamanasu-KTO-V2
3
+ model_type: AutoModelForCausalLM
4
+ tokenizer_type: AutoTokenizer
5
+
6
+ ## qlora COPE
7
+ load_in_8bit: false
8
+ load_in_4bit: false
9
+ strict: false
10
+
11
+ ## data
12
+ datasets:
13
+ - path: NewEden/RP-logs-V2-Experimental-prefixed
14
+ type: dan-chat-advanced
15
+ - path: NewEden/Discord-Filtered
16
+ type: dan-chat-advanced
17
+ - path: NewEden/Basket-Weaving-Filtered
18
+ type: dan-chat-advanced
19
+ - path: NewEden/Misc-Data-Sharegpt-Prefixed
20
+ type: dan-chat-advanced
21
+ - path: NewEden/BlueSky-10K-Complexity
22
+ type: dan-chat-advanced
23
+ - path: PocketDoc/Dans-Kinomaxx-VanillaBackrooms
24
+ type: dan-chat-advanced
25
+ shuffle_merged_datasets: true
26
+ dataset_prepared_path: dataset_prepared
27
+ val_set_size: 0.02
28
+ output_dir: 4b-out-rslora
29
+
30
+ ## LIGGER
31
+ plugins:
32
+ - axolotl.integrations.liger.LigerPlugin
33
+ liger_rope: true
34
+ liger_rms_norm: true
35
+ liger_layer_norm: true
36
+ liger_glu_activation: true
37
+ liger_fused_linear_cross_entropy: true
38
+
39
+ ## CTX settings
40
+ sequence_len: 32768
41
+ sample_packing: true
42
+ eval_sample_packing: true
43
+ pad_to_sequence_len: true
44
+
45
+ ## Lora
46
+ #adapter: lora
47
+ #lora_model_dir:
48
+ #lora_r: 128
49
+ #lora_alpha: 16
50
+ #lora_dropout: 0.05
51
+ #lora_target_modules:
52
+ # - gate_proj
53
+ # - down_proj
54
+ # - up_proj
55
+ # - q_proj
56
+ # - v_proj
57
+ # - k_proj
58
+ # - o_proj
59
+ #lora_fan_in_fan_out:
60
+ #peft_use_rslora: true
61
+ #lora_modules_to_save:
62
+ # - embed_tokens
63
+ # - lm_head
64
+
65
+ ## WandB
66
+ wandb_project: tavbussy
67
+ wandb_entity:
68
+ wandb_watch:
69
+ wandb_name: chat-v2
70
+ wandb_log_model:
71
+
72
+ ## evals
73
+ evals_per_epoch: 4
74
+ eval_table_size:
75
+ eval_max_new_tokens: 128
76
+
77
+ ## hoe params
78
+ gradient_accumulation_steps: 2
79
+ micro_batch_size: 6
80
+ num_epochs: 4
81
+ optimizer: paged_ademamix_8bit
82
+ lr_scheduler: cosine
83
+ learning_rate: 2e-5
84
+
85
+ train_on_inputs: false
86
+ group_by_length: false
87
+ bf16: auto
88
+ fp16:
89
+ tf32: false
90
+
91
+ gradient_checkpointing: true
92
+ early_stopping_patience:
93
+ resume_from_checkpoint:
94
+ local_rank:
95
+ logging_steps: 1
96
+ xformers_attention:
97
+ flash_attention: true
98
+ s2_attention:
99
+
100
+ warmup_steps: 40
101
+ saves_per_epoch: 2
102
+ debug:
103
+ deepspeed: ./deepspeed_configs/zero3_bf16.json
104
+ weight_decay: 0.02
105
+ fsdp:
106
+ fsdp_config:
107
+ special_tokens:
108
+ pad_token: <|finetune_right_pad_id|>