borna commited on
Commit
c94526f
·
1 Parent(s): b97c602

commit files to HF hub

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. config_rope.json +175 -0
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "PersianStories-4k",
3
  "architectures": [
4
- "Phi3ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "auto_map": {
 
1
  {
2
  "_name_or_path": "PersianStories-4k",
3
  "architectures": [
4
+ "PersianStoriesForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "auto_map": {
config_rope.json ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "PersianStories-4k",
3
+ "architectures": [
4
+ "Phi3ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration.PersianStoriesConfig",
9
+ "AutoModelForCausalLM": "modeling.PersianStoriesForCausalLM"
10
+ },
11
+ "bos_token_id": 1,
12
+ "embd_pdrop": 0.0,
13
+ "eos_token_id": 32000,
14
+ "hidden_act": "silu",
15
+ "hidden_size": 2048,
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 8192,
18
+ "max_position_embeddings": 4096,
19
+ "model_type": "phi3",
20
+ "num_attention_heads": 16,
21
+ "num_hidden_layers": 24,
22
+ "num_key_value_heads": 4,
23
+ "original_max_position_embeddings": 4096,
24
+ "pad_token_id": 32000,
25
+ "resid_pdrop": 0.0,
26
+ "rms_norm_eps": 1e-05,
27
+ "rope_position_scale": 1.0,
28
+ "rope_scaling": {
29
+ "long_factor": [
30
+ 1.0,
31
+ 1.01,
32
+ 1.01,
33
+ 1.02,
34
+ 1.04,
35
+ 1.04,
36
+ 1.04,
37
+ 1.05,
38
+ 1.05,
39
+ 1.06,
40
+ 1.07,
41
+ 1.08,
42
+ 1.08,
43
+ 1.08,
44
+ 1.08,
45
+ 1.08,
46
+ 1.08,
47
+ 1.08,
48
+ 1.09,
49
+ 1.09,
50
+ 1.2,
51
+ 2.31,
52
+ 3.76,
53
+ 9.38,
54
+ 10.1,
55
+ 10.8,
56
+ 18.1,
57
+ 25.2,
58
+ 25.3,
59
+ 26.1,
60
+ 26.6,
61
+ 30.2,
62
+ 33.0,
63
+ 41.5,
64
+ 44.4,
65
+ 44.8,
66
+ 50.2,
67
+ 51.9,
68
+ 59.3,
69
+ 62.7,
70
+ 66.1,
71
+ 66.3,
72
+ 85.8,
73
+ 89.3,
74
+ 90.0,
75
+ 99.9,
76
+ 107.0,
77
+ 110.0,
78
+ 111.0,
79
+ 117.0,
80
+ 118.0,
81
+ 121.0,
82
+ 122.0,
83
+ 127.0,
84
+ 127.0,
85
+ 128.0,
86
+ 128.0,
87
+ 128.0,
88
+ 128.0,
89
+ 128.0,
90
+ 128.0,
91
+ 129.0,
92
+ 129.0,
93
+ 129.0
94
+ ],
95
+ "long_mscale": 1.1902380714238083,
96
+ "original_max_position_embeddings": 8192,
97
+ "short_factor": [
98
+ 1.02,
99
+ 1.02,
100
+ 1.05,
101
+ 1.05,
102
+ 1.06,
103
+ 1.08,
104
+ 1.08,
105
+ 1.08,
106
+ 1.08,
107
+ 1.12,
108
+ 1.1800000000000002,
109
+ 1.1900000000000002,
110
+ 1.1900000000000002,
111
+ 1.2100000000000002,
112
+ 1.2300000000000002,
113
+ 1.2400000000000002,
114
+ 1.2400000000000002,
115
+ 1.2500000000000002,
116
+ 1.3000000000000003,
117
+ 1.3100000000000003,
118
+ 1.4600000000000004,
119
+ 1.5100000000000005,
120
+ 1.7000000000000006,
121
+ 1.9300000000000008,
122
+ 2.080000000000001,
123
+ 2.4399999999999933,
124
+ 3.2199999999999767,
125
+ 3.4499999999999718,
126
+ 3.579999999999969,
127
+ 4.669999999999946,
128
+ 4.779999999999943,
129
+ 5.999999999999917,
130
+ 6.009999999999917,
131
+ 6.4199999999999084,
132
+ 6.619999999999904,
133
+ 7.189999999999892,
134
+ 7.3099999999998895,
135
+ 7.339999999999889,
136
+ 7.479999999999886,
137
+ 9.749999999999837,
138
+ 10.919999999999812,
139
+ 11.219999999999805,
140
+ 11.749999999999794,
141
+ 11.979999999999789,
142
+ 13.239999999999762,
143
+ 13.579999999999755,
144
+ 13.669999999999753,
145
+ 13.82999999999975,
146
+ 14.009999999999746,
147
+ 14.679999999999731,
148
+ 14.889999999999727,
149
+ 15.769999999999708,
150
+ 15.769999999999708,
151
+ 15.819999999999707,
152
+ 15.839999999999707,
153
+ 15.919999999999705,
154
+ 16.029999999999703,
155
+ 16.12999999999972,
156
+ 16.44999999999977,
157
+ 16.44999999999977,
158
+ 16.77999999999982,
159
+ 16.83999999999983,
160
+ 16.83999999999983,
161
+ 16.889999999999837
162
+ ],
163
+ "short_mscale": 1.0,
164
+ "type": "su"
165
+ },
166
+ "rope_theta": 10000.0,
167
+ "sliding_window": 2047,
168
+ "tie_word_embeddings": false,
169
+ "torch_dtype": "bfloat16",
170
+ "transformers_version": "4.40.2",
171
+ "use_cache": true,
172
+ "attention_bias": false,
173
+ "vocab_size": 32064
174
+ }
175
+