Delta-Vector commited on
Commit
132b247
·
verified ·
1 Parent(s): 80dd233

Upload rp.yml

Browse files
Files changed (1) hide show
  1. rp.yml +107 -0
rp.yml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_model: NewEden/32B-inst
2
+ model_type: AutoModelForCausalLM
3
+ tokenizer_type: AutoTokenizer
4
+
5
+ hub_model_id: NewEden/32b-rp
6
+ hub_strategy: "all_checkpoints"
7
+ push_dataset_to_hub:
8
+ hf_use_auth_token: true
9
+
10
+ plugins:
11
+ - axolotl.integrations.liger.LigerPlugin
12
+ - axolotl.integrations.cut_cross_entropy.CutCrossEntropyPlugin
13
+ liger_rope: true
14
+ liger_rms_norm: true
15
+ liger_layer_norm: true
16
+ liger_glu_activation: true
17
+ liger_fused_linear_cross_entropy: false
18
+ cut_cross_entropy: true
19
+
20
+ load_in_8bit: false
21
+ load_in_4bit: false
22
+ strict: false
23
+
24
+ datasets:
25
+ - path: NewEden/RP-logs-V2-Experimental-prefixed
26
+ type: dan-chat-advanced
27
+ - path: NewEden/Creative_Writing-Complexity
28
+ type: dan-chat-advanced
29
+ - path: NewEden/Discord-Filtered
30
+ type: dan-chat-advanced
31
+ - path: NewEden/DeepseekRP-Filtered
32
+ type: dan-chat-advanced
33
+ - path: NewEden/Storium-Prefixed-Clean
34
+ type: dan-chat-advanced
35
+ - path: NewEden/Basket-Weaving-Filtered
36
+ type: dan-chat-advanced
37
+ - path: NewEden/LIMARP-Complexity
38
+ type: dan-chat-advancedd
39
+ - path: NewEden/Misc-Data-Sharegpt-Prefixed
40
+ type: dan-chat-advanced
41
+ - path: NewEden/BlueSky-10K-Complexity
42
+ type: dan-chat-advanced
43
+ - path: NewEden/OpenCAI-ShareGPT
44
+ type: dan-chat-advanced
45
+ - path: NewEden/Basket-Weaving-Filtered
46
+ type: dan-chat-advanced
47
+
48
+ dataset_prepared_path: prepared_data
49
+ val_set_size: 0.02
50
+ output_dir: ./qwq-inst
51
+
52
+ sequence_len: 32768
53
+ sample_packing: true
54
+ pad_to_sequence_len: true
55
+
56
+ # adapter: lora
57
+ # lora_model_dir:
58
+ # lora_r: 128
59
+ # lora_alpha: 16
60
+ # lora_dropout: 0.05
61
+ # lora_target_modules:
62
+ # - gate_proj
63
+ # - down_proj
64
+ # - up_proj
65
+ # - q_proj
66
+ # - v_proj
67
+ # - k_proj
68
+ # - o_proj
69
+
70
+ wandb_project: qwq
71
+ wandb_entity:
72
+ wandb_watch:
73
+ wandb_name: rp-attempt-01
74
+ wandb_log_model:
75
+
76
+ gradient_accumulation_steps: 2
77
+ micro_batch_size: 2
78
+ num_epochs: 4
79
+ optimizer: adamw_bnb_8bit
80
+ lr_scheduler: cosine
81
+ learning_rate: 3.17e-5
82
+
83
+ train_on_inputs: false
84
+ group_by_length: false
85
+ bf16: auto
86
+ fp16:
87
+ tf32: false
88
+
89
+ gradient_checkpointing: true
90
+ early_stopping_patience:
91
+ resume_from_checkpoint:
92
+ local_rank:
93
+ logging_steps: 1
94
+ xformers_attention:
95
+ flash_attention: true
96
+
97
+ warmup_steps: 40
98
+ evals_per_epoch: 4
99
+ eval_table_size: 128
100
+ eval_max_new_tokens:
101
+ saves_per_epoch: 2
102
+ debug:
103
+ deepspeed: deepspeed_configs/zero3_bf16.json
104
+ weight_decay: 0.02
105
+ fsdp:
106
+ fsdp_config:
107
+ special_tokens: