shreyaspimpalgaonkar commited on
Commit
a054ace
1 Parent(s): 4b05ef8

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +113 -15
config.json CHANGED
@@ -1,36 +1,134 @@
1
- {
2
- "_name_or_path": "sciphi/triplex",
3
  "architectures": [
4
- "Phi3ForCausalLM"
5
  ],
 
6
  "attention_dropout": 0.0,
7
  "auto_map": {
8
- "AutoConfig": "configuration_phi3.Phi3Config",
9
- "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
 
10
  },
11
  "bos_token_id": 1,
12
  "embd_pdrop": 0.0,
13
- "eos_token_id": 32000,
14
- "hidden_act": "silu",
15
  "hidden_size": 3072,
16
  "initializer_range": 0.02,
17
  "intermediate_size": 8192,
18
- "max_position_embeddings": 4096,
19
  "model_type": "phi3",
20
  "num_attention_heads": 32,
21
  "num_hidden_layers": 32,
22
- "num_key_value_heads": 32,
23
- "original_max_position_embeddings": 4096,
24
  "pad_token_id": 32000,
25
  "resid_pdrop": 0.0,
26
  "rms_norm_eps": 1e-05,
27
- "rope_scaling": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  "rope_theta": 10000.0,
29
- "sliding_window": 2047,
30
  "tie_word_embeddings": false,
31
  "torch_dtype": "bfloat16",
32
- "transformers_version": "4.40.2",
33
- "use_cache": true,
34
- "attention_bias": false,
35
  "vocab_size": 32064
36
  }
 
 
 
1
  "architectures": [
2
+ "sciphi/triplex"
3
  ],
4
+ "attention_bias": false,
5
  "attention_dropout": 0.0,
6
  "auto_map": {
7
+ "AutoConfig": "microsoft/Phi-3-mini-128k-instruct--configuration_phi3.Phi3Config",
8
+ "AutoModel": "microsoft/Phi-3-mini-128k-instruct--modeling_phi3.Phi3ForCausalLM",
9
+ "AutoModelForCausalLM": "microsoft/Phi-3-mini-128k-instruct--modeling_phi3.Phi3ForCausalLM"
10
  },
11
  "bos_token_id": 1,
12
  "embd_pdrop": 0.0,
 
 
13
  "hidden_size": 3072,
14
  "initializer_range": 0.02,
15
  "intermediate_size": 8192,
16
+ "max_position_embeddings": 131072,
17
  "model_type": "phi3",
18
  "num_attention_heads": 32,
19
  "num_hidden_layers": 32,
 
 
20
  "pad_token_id": 32000,
21
  "resid_pdrop": 0.0,
22
  "rms_norm_eps": 1e-05,
23
+ "rope_scaling": {
24
+ "long_factor": [
25
+ 1.0700000524520874,
26
+ 1.1200000047683716,
27
+ 1.149999976158142,
28
+ 1.4199999570846558,
29
+ 1.5699999332427979,
30
+ 1.7999999523162842,
31
+ 2.129999876022339,
32
+ 2.129999876022339,
33
+ 3.009999990463257,
34
+ 5.910000324249268,
35
+ 6.950000286102295,
36
+ 9.070000648498535,
37
+ 9.930000305175781,
38
+ 10.710000038146973,
39
+ 11.130000114440918,
40
+ 14.609999656677246,
41
+ 15.409998893737793,
42
+ 19.809999465942383,
43
+ 37.279998779296875,
44
+ 38.279998779296875,
45
+ 38.599998474121094,
46
+ 40.12000274658203,
47
+ 46.20000457763672,
48
+ 50.940006256103516,
49
+ 53.66000747680664,
50
+ 54.9373893737793,
51
+ 56.89738845825195,
52
+ 57.28738784790039,
53
+ 59.98738479614258,
54
+ 60.86738586425781,
55
+ 60.887386322021484,
56
+ 61.71739196777344,
57
+ 62.91739273071289,
58
+ 62.957393646240234,
59
+ 63.41739273071289,
60
+ 63.8173942565918,
61
+ 63.83739471435547,
62
+ 63.897396087646484,
63
+ 63.93739700317383,
64
+ 64.06739807128906,
65
+ 64.11434936523438,
66
+ 64.12435150146484,
67
+ 64.15435028076172,
68
+ 64.19435119628906,
69
+ 64.24435424804688,
70
+ 64.57435607910156,
71
+ 64.69000244140625,
72
+ 64.76000213623047
73
+ ],
74
+ "short_factor": [
75
+ 1.1,
76
+ 1.1,
77
+ 1.1,
78
+ 1.3000000000000003,
79
+ 1.3500000000000003,
80
+ 1.3500000000000003,
81
+ 1.4000000000000004,
82
+ 1.5500000000000005,
83
+ 2.000000000000001,
84
+ 2.000000000000001,
85
+ 2.000000000000001,
86
+ 2.000000000000001,
87
+ 2.000000000000001,
88
+ 2.000000000000001,
89
+ 2.000000000000001,
90
+ 2.000000000000001,
91
+ 2.000000000000001,
92
+ 2.000000000000001,
93
+ 2.000000000000001,
94
+ 2.000000000000001,
95
+ 2.000000000000001,
96
+ 2.000000000000001,
97
+ 2.000000000000001,
98
+ 2.000000000000001,
99
+ 2.000000000000001,
100
+ 2.0500000000000007,
101
+ 2.0500000000000007,
102
+ 2.0500000000000007,
103
+ 2.0500000000000007,
104
+ 2.0500000000000007,
105
+ 2.0500000000000007,
106
+ 2.1000000000000005,
107
+ 2.1000000000000005,
108
+ 2.1500000000000004,
109
+ 2.25,
110
+ 2.25,
111
+ 2.25,
112
+ 2.25,
113
+ 2.25,
114
+ 2.3999999999999995,
115
+ 2.4499999999999993,
116
+ 2.499999999999999,
117
+ 2.6999999999999984,
118
+ 2.6999999999999984,
119
+ 2.7499999999999982,
120
+ 2.799999999999998,
121
+ 2.8999999999999977,
122
+ 3.049999999999997
123
+ ],
124
+ "type": "yarn"
125
+ },
126
  "rope_theta": 10000.0,
127
+ "sliding_window": 262144,
128
  "tie_word_embeddings": false,
129
  "torch_dtype": "bfloat16",
130
+ "transformers_version": "4.42.3",
131
+ "use_cache": false,
132
+
133
  "vocab_size": 32064
134
  }