b3x0m commited on
Commit
4e69322
Β·
verified Β·
1 Parent(s): 3fbcbc0

Upload BertForTokenClassification

Browse files
Files changed (2) hide show
  1. config.json +36 -32
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "bert-base-chinese",
3
  "architectures": [
4
  "BertForTokenClassification"
5
  ],
@@ -10,40 +10,44 @@
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
- "0": "O",
14
- "1": "B-TIM",
15
- "2": "I-TIM",
16
- "3": "B-PER",
17
- "4": "I-PER",
18
- "5": "B-LOC",
19
- "6": "I-LOC",
20
- "7": "B-ORG",
21
- "8": "I-ORG",
22
- "9": "B-ETC",
23
- "10": "I-ETC",
24
- "11": "B-MET",
25
- "12": "I-MET",
26
- "13": "B-ABS",
27
- "14": "I-ABS"
 
 
28
  },
29
  "initializer_range": 0.02,
30
  "intermediate_size": 3072,
31
  "label2id": {
32
- "B-ABS": 13,
33
- "B-ETC": 9,
34
- "B-LOC": 5,
35
- "B-MET": 11,
36
- "B-ORG": 7,
37
- "B-PER": 3,
38
- "B-TIM": 1,
39
- "I-ABS": 14,
40
- "I-ETC": 10,
41
- "I-LOC": 6,
42
- "I-MET": 12,
43
- "I-ORG": 8,
44
- "I-PER": 4,
45
- "I-TIM": 2,
46
- "O": 0
 
 
47
  },
48
  "layer_norm_eps": 1e-12,
49
  "max_position_embeddings": 512,
@@ -58,7 +62,7 @@
58
  "pooler_type": "first_token_transform",
59
  "position_embedding_type": "absolute",
60
  "torch_dtype": "float32",
61
- "transformers_version": "4.21.3",
62
  "type_vocab_size": 2,
63
  "use_cache": true,
64
  "vocab_size": 21128
 
1
  {
2
+ "_name_or_path": "google-bert/bert-base-chinese",
3
  "architectures": [
4
  "BertForTokenClassification"
5
  ],
 
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
+ "0": "B-GPE",
14
+ "1": "M-GPE",
15
+ "2": "E-GPE",
16
+ "3": "O",
17
+ "4": "B-PER",
18
+ "5": "M-PER",
19
+ "6": "E-PER",
20
+ "7": "B-LOC",
21
+ "8": "M-LOC",
22
+ "9": "E-LOC",
23
+ "10": "B-ORG",
24
+ "11": "M-ORG",
25
+ "12": "E-ORG",
26
+ "13": "S-GPE",
27
+ "14": "S-LOC",
28
+ "15": "S-PER",
29
+ "16": "S-ORG"
30
  },
31
  "initializer_range": 0.02,
32
  "intermediate_size": 3072,
33
  "label2id": {
34
+ "B-GPE": 0,
35
+ "B-LOC": 7,
36
+ "B-ORG": 10,
37
+ "B-PER": 4,
38
+ "E-GPE": 2,
39
+ "E-LOC": 9,
40
+ "E-ORG": 12,
41
+ "E-PER": 6,
42
+ "M-GPE": 1,
43
+ "M-LOC": 8,
44
+ "M-ORG": 11,
45
+ "M-PER": 5,
46
+ "O": 3,
47
+ "S-GPE": 13,
48
+ "S-LOC": 14,
49
+ "S-ORG": 16,
50
+ "S-PER": 15
51
  },
52
  "layer_norm_eps": 1e-12,
53
  "max_position_embeddings": 512,
 
62
  "pooler_type": "first_token_transform",
63
  "position_embedding_type": "absolute",
64
  "torch_dtype": "float32",
65
+ "transformers_version": "4.45.1",
66
  "type_vocab_size": 2,
67
  "use_cache": true,
68
  "vocab_size": 21128
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:857f5c71866ab382f8806227e199c66598f568cce726448c119c95ffd8836fe9
3
- size 406781860
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e826f9c0b69baec5728f879df990e7658135c2d227294041a8494f686e528897
3
+ size 406783828