{ "_name_or_path": "xlm-roberta-base", "architectures": [ "XLMRobertaForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "ar", "1": "bg", "2": "cs", "3": "de", "4": "el", "5": "en", "6": "es", "7": "fr", "8": "hi", "9": "id", "10": "it", "11": "ja", "12": "ko", "13": "nl", "14": "pl", "15": "pt", "16": "ro", "17": "ru", "18": "sv", "19": "sw", "20": "th", "21": "tr", "22": "uk", "23": "ur", "24": "vi", "25": "zh" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "ar": 0, "bg": 1, "cs": 2, "de": 3, "el": 4, "en": 5, "es": 6, "fr": 7, "hi": 8, "id": 9, "it": 10, "ja": 11, "ko": 12, "nl": 13, "pl": 14, "pt": 15, "ro": 16, "ru": 17, "sv": 18, "sw": 19, "th": 20, "tr": 21, "uk": 22, "ur": 23, "vi": 24, "zh": 25 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "xlm-roberta", "num_attention_heads": 12, "num_hidden_layers": 12, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "problem_type": "single_label_classification", "torch_dtype": "float32", "transformers_version": "4.28.1", "type_vocab_size": 1, "use_cache": true, "vocab_size": 250002 }