SmolLM2 / config.json
jatingocodeo's picture
Upload folder using huggingface_hub
b900240 verified
raw
history blame contribute delete
498 Bytes
{
"vocab_size": 49152,
"hidden_size": 576,
"intermediate_size": 1536,
"num_hidden_layers": 30,
"num_attention_heads": 9,
"num_key_value_heads": 3,
"hidden_act": "silu",
"max_position_embeddings": 2048,
"initializer_range": 0.041666666666666664,
"rms_norm_eps": 1e-05,
"use_cache": true,
"pad_token_id": null,
"bos_token_id": 0,
"eos_token_id": 0,
"tie_word_embeddings": true,
"rope_theta": 10000.0,
"architectures": [
"SmolLM2"
],
"model_type": "smollm2"
}