{ | |
"gpt_config": { | |
"seq_length": 1024, | |
"vocab_size": 8192, | |
"n_embed": 512, | |
"n_head": 8, | |
"n_layer": 13, | |
"dropout": 0.0 | |
} | |
} |
{ | |
"gpt_config": { | |
"seq_length": 1024, | |
"vocab_size": 8192, | |
"n_embed": 512, | |
"n_head": 8, | |
"n_layer": 13, | |
"dropout": 0.0 | |
} | |
} |