{ | |
"num_layers": 12, | |
"d_model": 768, | |
"attention_heads": 12, | |
"ffn_dim": 3072, | |
"vocab_size": 50261, | |
"bos_token_id": 50257, | |
"eos_token_id": 50259, | |
"activation_dropout": 0.0, | |
"activation_function": "gelu", | |
"architectures": [ | |
"XGLMForCausalLM" | |
], | |
"attention_dropout": 0.1, | |
"dropout": 0.1, | |
"init_std": 0.02, | |
"layerdrop": 0.0, | |
"max_position_embeddings": 2048, | |
"model_type": "xglm", | |
"tokenizer_class": "GPT2Tokenizer", | |
"scale_embedding": true, | |
"transformers_version": "4.17.0", | |
"use_cache": true | |
} |