harshildarji commited on
Commit
00bb4e2
·
1 Parent(s): 7b2467d

update config.json

Browse files
Files changed (4) hide show
  1. .gitignore +1 -0
  2. config.json +136 -136
  3. pytorch_model.bin +1 -1
  4. tf_model.h5 +1 -1
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ .DS_store
config.json CHANGED
@@ -10,146 +10,146 @@
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
- "0": "LABEL_0",
14
- "1": "LABEL_1",
15
- "2": "LABEL_2",
16
- "3": "LABEL_3",
17
- "4": "LABEL_4",
18
- "5": "LABEL_5",
19
- "6": "LABEL_6",
20
- "7": "LABEL_7",
21
- "8": "LABEL_8",
22
- "9": "LABEL_9",
23
- "10": "LABEL_10",
24
- "11": "LABEL_11",
25
- "12": "LABEL_12",
26
- "13": "LABEL_13",
27
- "14": "LABEL_14",
28
- "15": "LABEL_15",
29
- "16": "LABEL_16",
30
- "17": "LABEL_17",
31
- "18": "LABEL_18",
32
- "19": "LABEL_19",
33
- "20": "LABEL_20",
34
- "21": "LABEL_21",
35
- "22": "LABEL_22",
36
- "23": "LABEL_23",
37
- "24": "LABEL_24",
38
- "25": "LABEL_25",
39
- "26": "LABEL_26",
40
- "27": "LABEL_27",
41
- "28": "LABEL_28",
42
- "29": "LABEL_29",
43
- "30": "LABEL_30",
44
- "31": "LABEL_31",
45
- "32": "LABEL_32",
46
- "33": "LABEL_33",
47
- "34": "LABEL_34",
48
- "35": "LABEL_35",
49
- "36": "LABEL_36",
50
- "37": "LABEL_37",
51
- "38": "LABEL_38",
52
- "39": "LABEL_39",
53
- "40": "LABEL_40",
54
- "41": "LABEL_41",
55
- "42": "LABEL_42",
56
- "43": "LABEL_43",
57
- "44": "LABEL_44",
58
- "45": "LABEL_45",
59
- "46": "LABEL_46",
60
- "47": "LABEL_47",
61
- "48": "LABEL_48",
62
- "49": "LABEL_49",
63
- "50": "LABEL_50",
64
- "51": "LABEL_51",
65
- "52": "LABEL_52",
66
- "53": "LABEL_53",
67
- "54": "LABEL_54",
68
- "55": "LABEL_55",
69
- "56": "LABEL_56",
70
- "57": "LABEL_57",
71
- "58": "LABEL_58",
72
- "59": "LABEL_59",
73
- "60": "LABEL_60",
74
- "61": "LABEL_61",
75
- "62": "LABEL_62",
76
- "63": "LABEL_63",
77
- "64": "LABEL_64",
78
- "65": "LABEL_65",
79
- "66": "LABEL_66",
80
- "67": "LABEL_67"
81
  },
82
  "initializer_range": 0.02,
83
  "intermediate_size": 3072,
84
  "label2id": {
85
- "LABEL_0": 0,
86
- "LABEL_1": 1,
87
- "LABEL_10": 10,
88
- "LABEL_11": 11,
89
- "LABEL_12": 12,
90
- "LABEL_13": 13,
91
- "LABEL_14": 14,
92
- "LABEL_15": 15,
93
- "LABEL_16": 16,
94
- "LABEL_17": 17,
95
- "LABEL_18": 18,
96
- "LABEL_19": 19,
97
- "LABEL_2": 2,
98
- "LABEL_20": 20,
99
- "LABEL_21": 21,
100
- "LABEL_22": 22,
101
- "LABEL_23": 23,
102
- "LABEL_24": 24,
103
- "LABEL_25": 25,
104
- "LABEL_26": 26,
105
- "LABEL_27": 27,
106
- "LABEL_28": 28,
107
- "LABEL_29": 29,
108
- "LABEL_3": 3,
109
- "LABEL_30": 30,
110
- "LABEL_31": 31,
111
- "LABEL_32": 32,
112
- "LABEL_33": 33,
113
- "LABEL_34": 34,
114
- "LABEL_35": 35,
115
- "LABEL_36": 36,
116
- "LABEL_37": 37,
117
- "LABEL_38": 38,
118
- "LABEL_39": 39,
119
- "LABEL_4": 4,
120
- "LABEL_40": 40,
121
- "LABEL_41": 41,
122
- "LABEL_42": 42,
123
- "LABEL_43": 43,
124
- "LABEL_44": 44,
125
- "LABEL_45": 45,
126
- "LABEL_46": 46,
127
- "LABEL_47": 47,
128
- "LABEL_48": 48,
129
- "LABEL_49": 49,
130
- "LABEL_5": 5,
131
- "LABEL_50": 50,
132
- "LABEL_51": 51,
133
- "LABEL_52": 52,
134
- "LABEL_53": 53,
135
- "LABEL_54": 54,
136
- "LABEL_55": 55,
137
- "LABEL_56": 56,
138
- "LABEL_57": 57,
139
- "LABEL_58": 58,
140
- "LABEL_59": 59,
141
- "LABEL_6": 6,
142
- "LABEL_60": 60,
143
- "LABEL_61": 61,
144
- "LABEL_62": 62,
145
- "LABEL_63": 63,
146
- "LABEL_64": 64,
147
- "LABEL_65": 65,
148
- "LABEL_66": 66,
149
- "LABEL_67": 67,
150
- "LABEL_7": 7,
151
- "LABEL_8": 8,
152
- "LABEL_9": 9
153
  },
154
  "layer_norm_eps": 1e-12,
155
  "max_position_embeddings": 512,
 
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
+ "0": "I-DSR18",
14
+ "1": "B-PD",
15
+ "2": "I-NPD",
16
+ "3": "I-DC",
17
+ "4": "I-R",
18
+ "5": "I-CONS",
19
+ "6": "B-A",
20
+ "7": "I-LI",
21
+ "8": "B-NPD",
22
+ "9": "B-DSR16",
23
+ "10": "I-A",
24
+ "11": "I-DSR16",
25
+ "12": "I-DSR19",
26
+ "13": "B-SNEU",
27
+ "14": "B-DC",
28
+ "15": "I-DSR21",
29
+ "16": "I-P",
30
+ "17": "I-DSR15",
31
+ "18": "I-RP",
32
+ "19": "I-DPO",
33
+ "20": "B-LI",
34
+ "21": "B-DSR20",
35
+ "22": "B-CONS",
36
+ "23": "B-DSR18",
37
+ "24": "I-SEU",
38
+ "25": "B-LB",
39
+ "26": "B-RI",
40
+ "27": "B-TP",
41
+ "28": "I-LB",
42
+ "29": "B-DP",
43
+ "30": "B-LC",
44
+ "31": "I-NRP",
45
+ "32": "O",
46
+ "33": "I-SNEU",
47
+ "34": "I-DSR22",
48
+ "35": "B-NRP",
49
+ "36": "B-OM",
50
+ "37": "B-ADM",
51
+ "38": "B-DSR19",
52
+ "39": "I-TM",
53
+ "40": "I-DSR20",
54
+ "41": "I-RET",
55
+ "42": "I-CONT",
56
+ "43": "B-DSR17",
57
+ "44": "I-DSO",
58
+ "45": "I-RI",
59
+ "46": "B-RP",
60
+ "47": "I-LC",
61
+ "48": "B-DSR21",
62
+ "49": "B-RET",
63
+ "50": "B-SEU",
64
+ "51": "B-TM",
65
+ "52": "I-ADM",
66
+ "53": "B-DS",
67
+ "54": "I-TP",
68
+ "55": "B-P",
69
+ "56": "I-DS",
70
+ "57": "B-DPO",
71
+ "58": "I-PD",
72
+ "59": "I-OM",
73
+ "60": "B-R",
74
+ "61": "I-DSR17",
75
+ "62": "B-DSR15",
76
+ "63": "I-DP",
77
+ "64": "B-DSR22",
78
+ "65": "B-DSO",
79
+ "66": "B-CONT",
80
+ "67": "PAD"
81
  },
82
  "initializer_range": 0.02,
83
  "intermediate_size": 3072,
84
  "label2id": {
85
+ "B-A": 6,
86
+ "B-ADM": 37,
87
+ "B-CONS": 22,
88
+ "B-CONT": 66,
89
+ "B-DC": 14,
90
+ "B-DP": 29,
91
+ "B-DPO": 57,
92
+ "B-DS": 53,
93
+ "B-DSO": 65,
94
+ "B-DSR15": 62,
95
+ "B-DSR16": 9,
96
+ "B-DSR17": 43,
97
+ "B-DSR18": 23,
98
+ "B-DSR19": 38,
99
+ "B-DSR20": 21,
100
+ "B-DSR21": 48,
101
+ "B-DSR22": 64,
102
+ "B-LB": 25,
103
+ "B-LC": 30,
104
+ "B-LI": 20,
105
+ "B-NPD": 8,
106
+ "B-NRP": 35,
107
+ "B-OM": 36,
108
+ "B-P": 55,
109
+ "B-PD": 1,
110
+ "B-R": 60,
111
+ "B-RET": 49,
112
+ "B-RI": 26,
113
+ "B-RP": 46,
114
+ "B-SEU": 50,
115
+ "B-SNEU": 13,
116
+ "B-TM": 51,
117
+ "B-TP": 27,
118
+ "I-A": 10,
119
+ "I-ADM": 52,
120
+ "I-CONS": 5,
121
+ "I-CONT": 42,
122
+ "I-DC": 3,
123
+ "I-DP": 63,
124
+ "I-DPO": 19,
125
+ "I-DS": 56,
126
+ "I-DSO": 44,
127
+ "I-DSR15": 17,
128
+ "I-DSR16": 11,
129
+ "I-DSR17": 61,
130
+ "I-DSR18": 0,
131
+ "I-DSR19": 12,
132
+ "I-DSR20": 40,
133
+ "I-DSR21": 15,
134
+ "I-DSR22": 34,
135
+ "I-LB": 28,
136
+ "I-LC": 47,
137
+ "I-LI": 7,
138
+ "I-NPD": 2,
139
+ "I-NRP": 31,
140
+ "I-OM": 59,
141
+ "I-P": 16,
142
+ "I-PD": 58,
143
+ "I-R": 4,
144
+ "I-RET": 41,
145
+ "I-RI": 45,
146
+ "I-RP": 18,
147
+ "I-SEU": 24,
148
+ "I-SNEU": 33,
149
+ "I-TM": 39,
150
+ "I-TP": 54,
151
+ "O": 32,
152
+ "PAD": 67
153
  },
154
  "layer_norm_eps": 1e-12,
155
  "max_position_embeddings": 512,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e01cd01ce79183f1e48be8a9c813ab8340209c38ea545e1da8f9a7de7ada68f2
3
  size 431173037
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:249b34e6f9d5723b678eaf0c811f827c65da16a37f2436ab2fe4f2dd511b88a1
3
  size 431173037
tf_model.h5 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1b1388cd0278eb72218b8099f878f422af07916aa57cc0c6cce639d0ea24044c
3
  size 431356000
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6de5256c43062abd4b9eb0acde00709e7f7f0ab90655caab23d641855b51aff7
3
  size 431356000