File size: 411 Bytes
246c106
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
{
    "num_layers": 32,
    "num_heads": 8,
    "d_model": 256,
    "T": 12,
    "S": 256,
    "image_vocab_size": 262144,
    "use_mup": false,
    "num_factored_vocabs": 2,
    "qkv_bias": true,
    "proj_bias": true,
    "use_actions": true,
    "action_network": "modulate",
    "attn_drop": 0.1,
    "qk_norm": false,
    "mlp_ratio": 4.0,
    "mlp_drop": 0.05,
    "mlp_bias": false,
    "patch_size": 2
}