{ | |
"_name_or_path": "/ceph/hdd/staff/charpent/.cache/modelsbc25_3c2ww89igya", | |
"architectures": [ | |
"MambaForCausalLM" | |
], | |
"bos_token_id": 1, | |
"conv_kernel": 4, | |
"eos_token_id": 2, | |
"expand": 2, | |
"hidden_act": "silu", | |
"hidden_size": 768, | |
"initializer_range": 0.1, | |
"intermediate_size": 1536, | |
"layer_norm_epsilon": 1e-05, | |
"model_type": "mamba", | |
"num_hidden_layers": 12, | |
"pad_token_id": 3, | |
"quantization_config": { | |
"_load_in_4bit": true, | |
"_load_in_8bit": false, | |
"bnb_4bit_compute_dtype": "bfloat16", | |
"bnb_4bit_quant_storage": "uint8", | |
"bnb_4bit_quant_type": "fp4", | |
"bnb_4bit_use_double_quant": false, | |
"llm_int8_enable_fp32_cpu_offload": false, | |
"llm_int8_has_fp16_weight": false, | |
"llm_int8_skip_modules": [ | |
"lm_head" | |
], | |
"llm_int8_threshold": 6.0, | |
"load_in_4bit": true, | |
"load_in_8bit": false, | |
"quant_method": "bitsandbytes" | |
}, | |
"rescale_prenorm_residual": false, | |
"residual_in_fp32": true, | |
"state_size": 16, | |
"time_step_floor": 0.0001, | |
"time_step_init_scheme": "random", | |
"time_step_max": 0.1, | |
"time_step_min": 0.001, | |
"time_step_rank": 48, | |
"time_step_scale": 1.0, | |
"torch_dtype": "float16", | |
"transformers_version": "4.40.0", | |
"use_bias": false, | |
"use_cache": true, | |
"use_conv_bias": true, | |
"vocab_size": 32000 | |
} | |