File size: 465 Bytes
de28b97
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
name: roberta_lm
config_type: model
task: language_modeling
attention_probs_dropout_prob: 0.1
bos_token_id: 0
eos_token_id: 2
gradient_checkpointing: false
hidden_act: gelu
hidden_dropout_prob: 0.1
hidden_size: 768
initializer_range: 0.02
intermediate_size: 3072
layer_norm_eps: 1.0e-12
max_position_embeddings: 514
num_attention_heads: 12
num_hidden_layers: 12
pad_token_id: 1
position_embedding_type: absolute
type_vocab_size: 1
use_cache: true
vocab_size: 42000