Defetya commited on
Commit
853e33c
·
verified ·
1 Parent(s): e1a908b

Rename config (1).json to config.json

Browse files
Files changed (2) hide show
  1. config (1).json +0 -22
  2. config.json +42 -0
config (1).json DELETED
@@ -1,22 +0,0 @@
1
- {"architectures": [
2
- "BertForMaskedLM"
3
- ],
4
- "attention_probs_dropout_prob": 0.1,
5
- "directionality": "bidi",
6
- "hidden_act": "gelu",
7
- "hidden_dropout_prob": 0.1,
8
- "hidden_size": 768,
9
- "initializer_range": 0.02,
10
- "intermediate_size": 3072,
11
- "max_position_embeddings": 512,
12
- "num_attention_heads": 12,
13
- "num_hidden_layers": 12,
14
- "pooler_fc_size": 768,
15
- "pooler_num_attention_heads": 12,
16
- "pooler_num_fc_layers": 3,
17
- "pooler_size_per_head": 128,
18
- "pooler_type": "first_token_transform",
19
- "type_vocab_size": 2,
20
- "vocab_size": 120138,
21
- "model_type": "bert"
22
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ _name_or_path: "ai-forever/ruBert-large",
3
+ architectures: [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ attention_probs_dropout_prob: 0.1,
7
+ classifier_dropout: null,
8
+ directionality: "bidi",
9
+ hidden_act: "gelu",
10
+ hidden_dropout_prob: 0.1,
11
+ hidden_size: 1024,
12
+ id2label: {
13
+ 0: "LABEL_0",
14
+ 1: "LABEL_1",
15
+ 2: "LABEL_2"
16
+ },
17
+ initializer_range: 0.02,
18
+ intermediate_size: 4096,
19
+ label2id: {
20
+ LABEL_0: 0,
21
+ LABEL_1: 1,
22
+ LABEL_2: 2
23
+ },
24
+ layer_norm_eps: 1e-12,
25
+ max_position_embeddings: 512,
26
+ model_type: "bert",
27
+ num_attention_heads: 16,
28
+ num_hidden_layers: 24,
29
+ pad_token_id: 0,
30
+ pooler_fc_size: 768,
31
+ pooler_num_attention_heads: 12,
32
+ pooler_num_fc_layers: 3,
33
+ pooler_size_per_head: 128,
34
+ pooler_type: "first_token_transform",
35
+ position_embedding_type: "absolute",
36
+ problem_type: "single_label_classification",
37
+ torch_dtype: "float32",
38
+ transformers_version: "4.45.1",
39
+ type_vocab_size: 2,
40
+ use_cache: true,
41
+ vocab_size: 120138
42
+ }