mlp / configuration_mlp.py
wpp02's picture
Upload model
284f08e verified
raw
history blame contribute delete
830 Bytes
from transformers import PretrainedConfig
class MLPConfig(PretrainedConfig):
model_type = "mlp"
def __init__(
self,
num_hidden_layers: int = 2,
input_size: int = 64,
hidden_size: list[int] = [256, 256],
output_size: int = 2,
hidden_act: str = "relu",
initializer_range: float = 0.02,
**kwargs
):
if len(hidden_size) != num_hidden_layers:
raise ValueError("num_hidden_layers should equal to len(hidden_size)")
self.num_hidden_layers = num_hidden_layers
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
self.hidden_act = hidden_act
self.initializer_range = initializer_range
super().__init__(**kwargs)