File size: 416 Bytes
cd2baf1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
{
  "accumulate_grad_batches": 64,
  "auxk": 256,
  "auxk_coef": 0.03125,
  "batch_size": 1,
  "dead_steps_threshold": null,
  "dead_threshold": 0.001,
  "dead_tokens_threshold": 10000000,
  "expansion_factor": 64,
  "k": 32,
  "layers": [
    9
  ],
  "lr": 0.0001,
  "max_length": 2048,
  "model_name": "EleutherAI/pythia-410m-deduped",
  "skip_special_tokens": true,
  "standardize": true,
  "tuned_lens": false
}