File size: 402 Bytes
297f611
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
    "num_layers":40,
    "vocab_size":50176,
    "hidden_size":3072,
    "num_attention_heads":48,
    "embedding_dropout_prob":0.1,
    "attention_dropout_prob":0.1,
    "output_dropout_prob":0.1,
    "max_sequence_length":1024,
    "max_memory_length":512,
    "checkpoint_activations":false,
    "checkpoint_num_layers":1,
    "parallel_output":true,
    "relative_encoding":true
}