|
{
|
|
"_name_or_path": "Phi-3.5-MoE-instruct",
|
|
"architectures": [
|
|
"PhiMoEForCausalLM"
|
|
],
|
|
"attention_bias": true,
|
|
"attention_dropout": 0.0,
|
|
"auto_map": {
|
|
"AutoConfig": "configuration_phimoe.PhiMoEConfig",
|
|
"AutoModelForCausalLM": "modeling_phimoe.PhiMoEForCausalLM"
|
|
},
|
|
"bos_token_id": 1,
|
|
"eos_token_id": 32000,
|
|
"hidden_act": "silu",
|
|
"hidden_dropout": 0.0,
|
|
"hidden_size": 4096,
|
|
"initializer_range": 0.02,
|
|
"input_jitter_noise": 0.01,
|
|
"intermediate_size": 6400,
|
|
"lm_head_bias": true,
|
|
"max_position_embeddings": 131072,
|
|
"model_type": "rasphi",
|
|
"num_attention_heads": 32,
|
|
"num_experts_per_tok": 2,
|
|
"num_hidden_layers": 32,
|
|
"num_key_value_heads": 8,
|
|
"num_local_experts": 16,
|
|
"original_max_position_embeddings": 4096,
|
|
"output_router_logits": false,
|
|
"rms_norm_eps": 1e-05,
|
|
"rope_scaling": {
|
|
"long_factor": [
|
|
1.0199999809265137,
|
|
1.0299999713897705,
|
|
1.0399999618530273,
|
|
1.0499999523162842,
|
|
1.0499999523162842,
|
|
1.0499999523162842,
|
|
1.059999942779541,
|
|
1.059999942779541,
|
|
1.059999942779541,
|
|
1.059999942779541,
|
|
1.059999942779541,
|
|
1.059999942779541,
|
|
1.0999999046325684,
|
|
1.1799999475479126,
|
|
1.1799999475479126,
|
|
1.3700000047683716,
|
|
1.4899998903274536,
|
|
2.109999895095825,
|
|
2.8899998664855957,
|
|
3.9499998092651367,
|
|
4.299999713897705,
|
|
6.429999828338623,
|
|
8.09000015258789,
|
|
10.690000534057617,
|
|
12.050000190734863,
|
|
18.229999542236328,
|
|
18.84000015258789,
|
|
19.899999618530273,
|
|
21.420000076293945,
|
|
26.200000762939453,
|
|
34.28000259399414,
|
|
34.590003967285156,
|
|
38.730003356933594,
|
|
40.22000503540039,
|
|
42.54000473022461,
|
|
44.000003814697266,
|
|
47.590003967285156,
|
|
54.750003814697266,
|
|
56.19000244140625,
|
|
57.44000244140625,
|
|
57.4900016784668,
|
|
61.20000076293945,
|
|
61.540000915527344,
|
|
61.75,
|
|
61.779998779296875,
|
|
62.06999969482422,
|
|
63.11000061035156,
|
|
63.43000030517578,
|
|
63.560001373291016,
|
|
63.71000289916992,
|
|
63.92000198364258,
|
|
63.94000244140625,
|
|
63.94000244140625,
|
|
63.96000289916992,
|
|
63.980003356933594,
|
|
64.0300064086914,
|
|
64.0300064086914,
|
|
64.0300064086914,
|
|
64.04000854492188,
|
|
64.10000610351562,
|
|
64.19000244140625,
|
|
64.20999908447266,
|
|
64.75,
|
|
64.95999908447266
|
|
],
|
|
"long_mscale": 1.243163121016122,
|
|
"original_max_position_embeddings": 4096,
|
|
"short_factor": [
|
|
1.0,
|
|
1.0399999618530273,
|
|
1.0399999618530273,
|
|
1.0399999618530273,
|
|
1.0499999523162842,
|
|
1.0499999523162842,
|
|
1.0499999523162842,
|
|
1.0499999523162842,
|
|
1.0499999523162842,
|
|
1.0499999523162842,
|
|
1.0499999523162842,
|
|
1.0499999523162842,
|
|
1.0499999523162842,
|
|
1.0499999523162842,
|
|
1.059999942779541,
|
|
1.059999942779541,
|
|
1.0699999332427979,
|
|
1.0699999332427979,
|
|
1.0699999332427979,
|
|
1.0699999332427979,
|
|
1.1399999856948853,
|
|
1.159999966621399,
|
|
1.159999966621399,
|
|
1.159999966621399,
|
|
1.159999966621399,
|
|
1.1799999475479126,
|
|
1.1999999284744263,
|
|
1.3199999332427979,
|
|
1.3399999141693115,
|
|
1.3499999046325684,
|
|
1.3999998569488525,
|
|
1.4799998998641968,
|
|
1.4999998807907104,
|
|
1.589999794960022,
|
|
1.6499998569488525,
|
|
1.71999990940094,
|
|
1.8999998569488525,
|
|
1.9099998474121094,
|
|
1.9099998474121094,
|
|
1.9899998903274536,
|
|
1.9999998807907104,
|
|
1.9999998807907104,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.009999990463257,
|
|
2.0999999046325684,
|
|
2.319999933242798,
|
|
2.419999837875366,
|
|
2.5899999141693115,
|
|
2.7899999618530273
|
|
],
|
|
"short_mscale": 1.243163121016122,
|
|
"type": "longrope"
|
|
},
|
|
"rope_theta": 10000.0,
|
|
"router_aux_loss_coef": 0.0,
|
|
"router_jitter_noise": 0.01,
|
|
"sliding_window": 131072,
|
|
"tie_word_embeddings": false,
|
|
"torch_dtype": "bfloat16",
|
|
"transformers_version": "4.43.3",
|
|
"use_cache": true,
|
|
"vocab_size": 32064,
|
|
"reasoning_hidden_size": 2048,
|
|
"content_hidden_size": 2048,
|
|
"num_reasoning_experts": 8,
|
|
"num_content_experts": 8
|
|
} |