Faeze's picture
Upload folder using huggingface_hub
5f2f386 verified
raw
history blame
2.69 kB
{
"_name_or_path": "t5-base",
"architectures": [
"T5ForConditionalGeneration"
],
"attn_method": "linear",
"attn_prefix_tuning": false,
"class_weights": [
0.43349358974358976,
0.4663793103448276,
1.0734126984126984,
1.3004807692307692,
1.4701086956521738,
1.8277027027027026,
1.9321428571428572,
9.660714285714286
],
"d_ff": 3072,
"d_kv": 64,
"d_model": 768,
"decoder_start_token_id": 0,
"dropout_rate": 0.1,
"eos_token_id": 1,
"feed_forward_proj": "relu",
"fix_attention": false,
"gradient_checkpointing": false,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1",
"2": "LABEL_2",
"3": "LABEL_3",
"4": "LABEL_4",
"5": "LABEL_5",
"6": "LABEL_6",
"7": "LABEL_7"
},
"ignore_target": false,
"init_prefix_method": "random",
"initializer_factor": 1.0,
"is_contrastive": false,
"is_encoder_decoder": true,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1,
"LABEL_2": 2,
"LABEL_3": 3,
"LABEL_4": 4,
"LABEL_5": 5,
"LABEL_6": 6,
"LABEL_7": 7
},
"label_based": false,
"label_token_ids": [
[
16,
10454,
2493,
1
],
[
1921,
1
],
[
1690,
1
],
[
21530,
1
],
[
11746,
1
],
[
11122,
1
],
[
822,
1
],
[
30430,
1
]
],
"layer_norm_epsilon": 1e-06,
"learned_temperature": false,
"max_length": 4,
"model_type": "t5",
"n_positions": 512,
"normalize_prefixes": false,
"num_decoder_layers": 12,
"num_heads": 12,
"num_layers": 12,
"num_target": 1,
"output_past": true,
"pad_token_id": 0,
"prefix_num": 1,
"prefix_tuning": false,
"relative_attention_num_buckets": 32,
"shared_attn": false,
"task_specific_params": {
"summarization": {
"early_stopping": true,
"length_penalty": 2.0,
"max_length": 200,
"min_length": 30,
"no_repeat_ngram_size": 3,
"num_beams": 4,
"prefix": "summarize: "
},
"translation_en_to_de": {
"early_stopping": true,
"max_length": 300,
"num_beams": 4,
"prefix": "translate English to German: "
},
"translation_en_to_fr": {
"early_stopping": true,
"max_length": 300,
"num_beams": 4,
"prefix": "translate English to French: "
},
"translation_en_to_ro": {
"early_stopping": true,
"max_length": 300,
"num_beams": 4,
"prefix": "translate English to Romanian: "
}
},
"temperature": 2000,
"train_task_adapters": true,
"transformers_version": "4.6.0",
"use_cache": true,
"vocab_size": 32100
}