qwen2.5_7b_en / config.json
Divyasreepat's picture
Upload folder using huggingface_hub
b235646 verified
raw
history blame contribute delete
674 Bytes
{
"module": "keras_hub.src.models.qwen.qwen_backbone",
"class_name": "QwenBackbone",
"config": {
"name": "qwen_backbone",
"trainable": true,
"vocabulary_size": 152064,
"num_layers": 28,
"num_query_heads": 28,
"hidden_dim": 3584,
"intermediate_dim": 18944,
"rope_max_wavelength": 1000000.0,
"rope_scaling_factor": 1.0,
"num_key_value_heads": 4,
"layer_norm_epsilon": 1e-06,
"dropout": 0,
"tie_word_embeddings": false,
"use_sliding_window_attention": false,
"sliding_window_size": 131072
},
"registered_name": "keras_hub>QwenBackbone"
}