{ "_name_or_path": "dmis-lab/biobert-base-cased-v1.2", "architectures": [ "BertForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": {0: "VBD", 1: "N", 2: "XT", 3: "JJS", 4: "E2A", 5: "WRB", 6: "VB", 7: "TO", 8: "VBP", 9: "FW", 10: "EX", 11: "VBN", 12: "VBZ", 13: "NNS", 14: "VBG", 15: "RBR", 16: "WP", 17: "CT", 18: "PRP", 19: "JJR", 20: "CC", 21: "NNPS", 22: "CD", 23: "DT", 24: "NNP", 25: "PDT", 26: "LS", 27: "PP", 28: "PRP$", 29: "NN", 30: "JJ", 31: "RP", 32: "RBS", 33: "MD", 34: "WP$", 35: "RB", 36: "SYM", 37: "IN", 38: "PUNCT", 39: "WDT", 40: "POS", 41: ""}, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": {"VBD": 0, "N": 1, "XT": 2, "JJS": 3, "E2A": 4, "WRB": 5, "VB": 6, "TO": 7, "VBP": 8, "FW": 9, "EX": 10, "VBN": 11, "VBZ": 12, "NNS": 13, "VBG": 14, "RBR": 15, "WP": 16, "CT": 17, "PRP": 18, "JJR": 19, "CC": 20, "NNPS": 21, "CD": 22, "DT": 23, "NNP": 24, "PDT": 25, "LS": 26, "PP": 27, "PRP$": 28, "NN": 29, "JJ": 30, "RP": 31, "RBS": 32, "MD": 33, "WP$": 34, "RB": 35, "SYM": 36, "IN": 37, "PUNCT": 38, "WDT": 39, "POS": 40, "": 41 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.21.3", "type_vocab_size": 2, "use_cache": true, "vocab_size": 28996 }