Tonic commited on
Commit
401f18e
·
1 Parent(s): 6995e18

adds packing argument to gpt oss custom config

Browse files
Files changed (1) hide show
  1. config/train_gpt_oss_custom.py +3 -0
config/train_gpt_oss_custom.py CHANGED
@@ -92,6 +92,9 @@ class GPTOSSEnhancedCustomConfig:
92
  save_steps: int = 500 # Save checkpoint every N steps
93
  save_total_limit: Optional[int] = 3 # Keep only N best checkpoints
94
  save_only_model: bool = False # Save only model weights
 
 
 
95
 
96
  # Model Selection
97
  metric_for_best_model: str = "eval_loss"
 
92
  save_steps: int = 500 # Save checkpoint every N steps
93
  save_total_limit: Optional[int] = 3 # Keep only N best checkpoints
94
  save_only_model: bool = False # Save only model weights
95
+ # TRL packing (token packing of multiple samples into a single sequence)
96
+ # Some configs (e.g., openhermes_fr_memory_optimized) set this to True
97
+ packing: bool = False
98
 
99
  # Model Selection
100
  metric_for_best_model: str = "eval_loss"