KoichiYasuoka's picture
configure improved
fe5a5e8
raw
history blame
2.52 kB
{
"architectures": [
"BertForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"directionality": "bidi",
"eos_token_id": 2,
"finetuning_task": "ner",
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "ADJ",
"1": "ADP",
"2": "ADV",
"3": "AUX",
"4": "B-ADJ",
"5": "B-ADP",
"6": "B-ADV",
"7": "B-AUX",
"8": "B-CCONJ",
"9": "B-DET",
"10": "B-NOUN",
"11": "B-NUM",
"12": "B-PART",
"13": "B-PRON",
"14": "B-PROPN",
"15": "B-PUNCT",
"16": "B-SYM",
"17": "B-VERB",
"18": "B-X",
"19": "CCONJ",
"20": "DET",
"21": "I-ADJ",
"22": "I-ADP",
"23": "I-ADV",
"24": "I-AUX",
"25": "I-CCONJ",
"26": "I-DET",
"27": "I-NOUN",
"28": "I-NUM",
"29": "I-PART",
"30": "I-PRON",
"31": "I-PROPN",
"32": "I-PUNCT",
"33": "I-SYM",
"34": "I-VERB",
"35": "I-X",
"36": "NOUN",
"37": "NUM",
"38": "PART",
"39": "PRON",
"40": "PROPN",
"41": "PUNCT",
"42": "SYM",
"43": "VERB",
"44": "X"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"ADJ": 0,
"ADP": 1,
"ADV": 2,
"AUX": 3,
"B-ADJ": 4,
"B-ADP": 5,
"B-ADV": 6,
"B-AUX": 7,
"B-CCONJ": 8,
"B-DET": 9,
"B-NOUN": 10,
"B-NUM": 11,
"B-PART": 12,
"B-PRON": 13,
"B-PROPN": 14,
"B-PUNCT": 15,
"B-SYM": 16,
"B-VERB": 17,
"B-X": 18,
"CCONJ": 19,
"DET": 20,
"I-ADJ": 21,
"I-ADP": 22,
"I-ADV": 23,
"I-AUX": 24,
"I-CCONJ": 25,
"I-DET": 26,
"I-NOUN": 27,
"I-NUM": 28,
"I-PART": 29,
"I-PRON": 30,
"I-PROPN": 31,
"I-PUNCT": 32,
"I-SYM": 33,
"I-VERB": 34,
"I-X": 35,
"NOUN": 36,
"NUM": 37,
"PART": 38,
"PRON": 39,
"PROPN": 40,
"PUNCT": 41,
"SYM": 42,
"VERB": 43,
"X": 44
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"output_past": true,
"pad_token_id": 1,
"pooler_fc_size": 768,
"pooler_num_attention_heads": 12,
"pooler_num_fc_layers": 3,
"pooler_size_per_head": 128,
"pooler_type": "first_token_transform",
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.9.2",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 21128
}