File size: 721 Bytes
1b36437
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
from transformers import PretrainedConfig

class MapperConfig(PretrainedConfig):
    model_type = "embedding_mapper"

    def __init__(self,
                 d_in: int = 64,
                 d_hidden: int = 1024,
                 n_layers: int = 6,
                 d_out: int = 64,
                 n_out: int = 2,
                 dropout: float = 0.1,
                 layer_norm_eps: float = 1e-12,
                 **kwargs
                ):
        
        self.d_in = d_in
        self.d_hidden = d_hidden
        self.n_layers = n_layers
        self.d_out = d_out
        self.n_out = n_out
        self.dropout = dropout
        self.layer_norm_eps = layer_norm_eps
        
        super().__init__(**kwargs)