candle-quantized-phi / phi-1_5.json
lmz's picture
add configs to repo (#4)
f9f50c1
raw
history blame
736 Bytes
{
"_name_or_path": "phi-1.5-half",
"activation_function": "gelu",
"architectures": [
"MixFormerSequentialForCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_mixformer_sequential.MixFormerSequentialConfig",
"AutoModelForCausalLM": "modeling_mixformer_sequential.MixFormerSequentialForCausalLM"
},
"embd_pdrop": 0.0,
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-05,
"model_type": "mixformer-sequential",
"n_embd": 2048,
"n_head": 32,
"n_inner": null,
"n_layer": 24,
"n_positions": 2048,
"resid_pdrop": 0.0,
"rotary_dim": 32,
"tie_word_embeddings": false,
"torch_dtype": "float16",
"transformers_version": "4.32.1",
"vocab_size": 51200,
"pad_vocab_size_multiple": 64
}