{ "_name_or_path": "openbmb/CPM-2B", "architectures": [ "MiniCPMForCausalLM" ], "auto_map": { "AutoConfig": "configuration_minicpm.MiniCPMConfig", "AutoModel": "modeling_minicpm.MiniCPMModel", "AutoModelForCausalLM": "modeling_minicpm.MiniCPMForCausalLM", "AutoModelForSeq2SeqLM": "modeling_minicpm.MiniCPMForCausalLM", "AutoModelForSequenceClassification": "modeling_minicpm.MiniCPMForSequenceClassification" }, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1792, "initializer_range": 0.1, "intermediate_size": 5760, "max_position_embeddings": 4096, "num_attention_heads": 14, "num_hidden_layers": 35, "num_key_value_heads": 32, "rms_norm_eps": 1e-05, "rope_scaling": null, "torch_dtype": "bfloat16", "transformers_version": "4.36.0", "use_cache": true, "vocab_size": 122753, "scale_emb": 12, "dim_model_base": 256, "scale_depth": 1.4, "tie_word_embeddings": false, "norm_after_router": "rms", "norm_scale": 1.0, "attention_type": "mla", "q_lora_rank": 768, "kv_lora_rank": 256, "qk_nope_head_dim": 128, "qk_rope_head_dim": 64, "v_head_dim": 128, "ffn_type": "block_linear", "ffn_gated": true, "router_act": "relu", "expert_size": 128, "num_experts": 56, "block_implementation": "torch" }