trying to convert to sequence classification
Browse files- config.json +7 -7
- tf_model.h5 +1 -1
- tokenizer_config.json +1 -1
config.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"architectures": [
|
4 |
-
"
|
5 |
],
|
6 |
"attention_probs_dropout_prob": 0.1,
|
7 |
"directionality": "bidi",
|
@@ -29,6 +29,10 @@
|
|
29 |
"intermediate_size": 3072,
|
30 |
"label2id": {
|
31 |
"1": 0,
|
|
|
|
|
|
|
|
|
32 |
"2": 1,
|
33 |
"3": 2,
|
34 |
"4": 3,
|
@@ -36,11 +40,7 @@
|
|
36 |
"6": 5,
|
37 |
"7": 6,
|
38 |
"8": 7,
|
39 |
-
"9": 8
|
40 |
-
"10": 9,
|
41 |
-
"11": 10,
|
42 |
-
"12": 11,
|
43 |
-
"13": 12
|
44 |
},
|
45 |
"layer_norm_eps": 1e-12,
|
46 |
"max_position_embeddings": 512,
|
|
|
1 |
{
|
2 |
+
"_name_or_path": ".",
|
3 |
"architectures": [
|
4 |
+
"BertForSequenceClassification"
|
5 |
],
|
6 |
"attention_probs_dropout_prob": 0.1,
|
7 |
"directionality": "bidi",
|
|
|
29 |
"intermediate_size": 3072,
|
30 |
"label2id": {
|
31 |
"1": 0,
|
32 |
+
"10": 9,
|
33 |
+
"11": 10,
|
34 |
+
"12": 11,
|
35 |
+
"13": 12,
|
36 |
"2": 1,
|
37 |
"3": 2,
|
38 |
"4": 3,
|
|
|
40 |
"6": 5,
|
41 |
"7": 6,
|
42 |
"8": 7,
|
43 |
+
"9": 8
|
|
|
|
|
|
|
|
|
44 |
},
|
45 |
"layer_norm_eps": 1e-12,
|
46 |
"max_position_embeddings": 512,
|
tf_model.h5
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 669736340
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0db013b083fae5bcde451cffa45a076e3bcc1d174b53cd3275b1b1a41f5f5f3b
|
3 |
size 669736340
|
tokenizer_config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "name_or_path": "
|
|
|
1 |
+
{"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "name_or_path": ".", "special_tokens_map_file": "./special_tokens_map.json", "do_basic_tokenize": true, "never_split": null}
|