Commit
·
f8bf680
1
Parent(s):
bfb9ea0
Upload config.json
Browse files- config.json +30 -0
config.json
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"backbone_checkpoint_name": "t5-large",
|
3 |
+
"backbone_class": "T5ForConditionalGeneration",
|
4 |
+
"backbone_hash": "6297bd1acc36524547c8a76cc03fef5c",
|
5 |
+
"bottleneck_dim": null,
|
6 |
+
"common_structure": null,
|
7 |
+
"delta_type": "compacter",
|
8 |
+
"factorized_phm": true,
|
9 |
+
"factorized_phm_rule": false,
|
10 |
+
"hypercomplex_division": 4,
|
11 |
+
"hypercomplex_nonlinearity": "glorot-uniform",
|
12 |
+
"kronecker_prod": null,
|
13 |
+
"learn_phm": true,
|
14 |
+
"modified_modules": [
|
15 |
+
"SelfAttention",
|
16 |
+
"DenseReluDense"
|
17 |
+
],
|
18 |
+
"non_linearity": "gelu_new",
|
19 |
+
"opendelta_version": "0.0.1",
|
20 |
+
"phm_c_init": "normal",
|
21 |
+
"phm_init_range": 0.0001,
|
22 |
+
"phm_rank": 1,
|
23 |
+
"reduction_factor": 16,
|
24 |
+
"sequential": null,
|
25 |
+
"shared_W_phm": false,
|
26 |
+
"shared_phm_rule": false,
|
27 |
+
"transformers_version": "4.17.0",
|
28 |
+
"use_bias_down_sampler": true,
|
29 |
+
"use_bias_up_sampler": true
|
30 |
+
}
|