File size: 830 Bytes
284f08e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
from transformers import PretrainedConfig
class MLPConfig(PretrainedConfig):
model_type = "mlp"
def __init__(
self,
num_hidden_layers: int = 2,
input_size: int = 64,
hidden_size: list[int] = [256, 256],
output_size: int = 2,
hidden_act: str = "relu",
initializer_range: float = 0.02,
**kwargs
):
if len(hidden_size) != num_hidden_layers:
raise ValueError("num_hidden_layers should equal to len(hidden_size)")
self.num_hidden_layers = num_hidden_layers
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
self.hidden_act = hidden_act
self.initializer_range = initializer_range
super().__init__(**kwargs) |