File size: 410 Bytes
5807133
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
{
  "accumulate_grad_batches": 64,
  "auxk": 256,
  "auxk_coef": 0.03125,
  "batch_size": 1,
  "dead_steps_threshold": null,
  "dead_threshold": 0.001,
  "dead_tokens_threshold": 10000000,
  "expansion_factor": 256,
  "k": 32,
  "layers": null,
  "lr": 0.0001,
  "max_length": 2048,
  "model_name": "EleutherAI/pythia-160m-deduped",
  "skip_special_tokens": true,
  "standardize": true,
  "tuned_lens": false
}