AgentRAG-3B / config.json
TroyDoesAI's picture
Grokking for Context Obedience in multiple forms, still messing up on url occasionally when the url is long sentences
3849b41 verified
raw
history blame contribute delete
752 Bytes
{
"_name_or_path": ".\\AgentRAG",
"architectures": [
"LlamaForCausalLM"
],
"attention_bias": true,
"attention_dropout": 0.0,
"bos_token_id": 1,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 32768,
"mlp_bias": false,
"model_type": "llama",
"num_attention_heads": 32,
"num_hidden_layers": 30,
"num_key_value_heads": 32,
"pretraining_tp": 1,
"rms_norm_eps": 1e-05,
"rope_scaling": null,
"rope_theta": 10000.0,
"sliding_window": 2047,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.44.0",
"use_cache": true,
"vocab_size": 32064
}