legacy107's picture
Training in progress, step 500
f0dce44
{
"_name_or_path": "google/flan-t5-large",
"adapters": {
"adapters": {
"question_answering_union": "1f8ebc8c4a9631dd"
},
"config_map": {
"1f8ebc8c4a9631dd": {
"architecture": "union",
"configs": [
{
"alpha": 1,
"architecture": "lora",
"attn_matrices": [
"q",
"v"
],
"composition_mode": "scale",
"dropout": 0.0,
"init_weights": "ia3",
"intermediate_lora": true,
"output_lora": false,
"r": 1,
"selfattn_lora": true,
"use_gating": false
},
{
"adapter_residual_before_ln": false,
"cross_adapter": false,
"factorized_phm_W": true,
"factorized_phm_rule": false,
"hypercomplex_nonlinearity": "glorot-uniform",
"init_weights": "bert",
"inv_adapter": null,
"inv_adapter_reduction_factor": null,
"is_parallel": false,
"learn_phm": true,
"leave_out": [],
"ln_after": false,
"ln_before": false,
"mh_adapter": true,
"non_linearity": "swish",
"original_ln_after": true,
"original_ln_before": false,
"output_adapter": true,
"phm_bias": true,
"phm_c_init": "normal",
"phm_dim": 4,
"phm_init_range": 0.0001,
"phm_layer": false,
"phm_rank": 1,
"reduction_factor": 8,
"residual_before_ln": true,
"scaling": 1.0,
"shared_W_phm": false,
"shared_phm_rule": true,
"use_gating": false
}
]
}
},
"fusion_config_map": {},
"fusions": {}
},
"architectures": [
"T5ForConditionalGeneration"
],
"d_ff": 2816,
"d_kv": 64,
"d_model": 1024,
"decoder_start_token_id": 0,
"dense_act_fn": "gelu_new",
"dropout_rate": 0.1,
"eos_token_id": 1,
"feed_forward_proj": "gated-gelu",
"initializer_factor": 1.0,
"is_encoder_decoder": true,
"is_gated_act": true,
"layer_norm_epsilon": 1e-06,
"model_type": "t5",
"n_positions": 512,
"num_decoder_layers": 24,
"num_heads": 16,
"num_layers": 24,
"output_past": true,
"pad_token_id": 0,
"relative_attention_max_distance": 128,
"relative_attention_num_buckets": 32,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.26.1",
"use_cache": true,
"vocab_size": 32128
}