File size: 751 Bytes
92ca8d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
from transformers.configuration_utils import PretrainedConfig

class Phi3Config(PretrainedConfig):
    model_type = "phi3"

    def __init__(self, 
                 vocab_size=32064, 
                 hidden_size=3072, 
                 num_hidden_layers=32, 
                 num_attention_heads=32, 
                 intermediate_size=8192, 
                 max_position_embeddings=4096, 
                 **kwargs):
        super().__init__(**kwargs)
        self.vocab_size = vocab_size
        self.hidden_size = hidden_size
        self.num_hidden_layers = num_hidden_layers
        self.num_attention_heads = num_attention_heads
        self.intermediate_size = intermediate_size
        self.max_position_embeddings = max_position_embeddings