TunBERT / config_tunbert.py
not-lain's picture
Rename config_TunBERT.py to config_tunbert.py
8adc581
raw
history blame
1.94 kB
from transformers import PretrainedConfig
class TunBertConfig(PretrainedConfig):
model_type = "bert"
def __init__(self,
attention_probs_dropout_prob = 0.1,
classifier_dropout = None,
gradient_checkpointing = False,
hidden_act = "gelu",
hidden_dropout_prob = 0.1,
hidden_size = 768,
initializer_range = 0.02,
intermediate_size = 3072,
layer_norm_eps = 1e-12,
max_position_embeddings = 512,
model_type = "bert",
num_attention_heads = 12,
num_hidden_layers = 12,
pad_token_id = 0,
position_embedding_type = "absolute",
transformers_version = "4.35.2",
type_vocab_size = 2,
use_cache = True,
vocab_size = 30522,
**kwargs):
self.attention_probs_dropout_prob = attention_probs_dropout_prob
self.classifier_dropout = classifier_dropout
self.gradient_checkpointing = gradient_checkpointing
self.hidden_act = hidden_act
self.hidden_dropout_prob = hidden_dropout_prob
self.hidden_size = hidden_size
self.initializer_range = initializer_range
self.intermediate_size = intermediate_size
self.layer_norm_eps = layer_norm_eps
self.max_position_embeddings = max_position_embeddings
self.model_type = model_type
self.num_attention_heads = num_attention_heads
self.num_hidden_layers = num_hidden_layers
self.pad_token_id = pad_token_id
self.position_embedding_type = position_embedding_type
self.transformers_version = transformers_version
self.type_vocab_size = type_vocab_size
self.use_cache = use_cache
self.vocab_size = vocab_size
super().__init__(**kwargs)