|
[ |
|
{ |
|
"TestClass": "AlbertTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"▁i", |
|
"▁was", |
|
"▁born", |
|
"▁in", |
|
"▁9", |
|
"2000", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁false", |
|
"." |
|
], |
|
"encoded": [ |
|
31, |
|
23, |
|
386, |
|
19, |
|
561, |
|
3050, |
|
15, |
|
17, |
|
48, |
|
25, |
|
4997, |
|
9 |
|
], |
|
"encoded_special": [ |
|
2, |
|
31, |
|
23, |
|
386, |
|
19, |
|
561, |
|
3050, |
|
15, |
|
17, |
|
48, |
|
25, |
|
4997, |
|
9, |
|
3 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "BertTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "UNwantéd,running", |
|
"tokens": [ |
|
"un", |
|
"##want", |
|
"##ed", |
|
",", |
|
"runn", |
|
"##ing" |
|
], |
|
"encoded": [ |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11 |
|
], |
|
"encoded_special": [ |
|
1, |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "BertTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "UNwantéd,running", |
|
"tokens": [ |
|
"un", |
|
"##want", |
|
"##ed", |
|
",", |
|
"runn", |
|
"##ing" |
|
], |
|
"encoded": [ |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11 |
|
], |
|
"encoded_special": [ |
|
1, |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11, |
|
2 |
|
], |
|
"params": { |
|
"do_lower_case": true |
|
}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "BigBirdPegasusTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "This is a test", |
|
"tokens": [ |
|
"▁This", |
|
"▁is", |
|
"▁a", |
|
"▁", |
|
"t", |
|
"est" |
|
], |
|
"encoded": [ |
|
288, |
|
46, |
|
9, |
|
3, |
|
12, |
|
390 |
|
], |
|
"encoded_special": [ |
|
288, |
|
46, |
|
9, |
|
3, |
|
12, |
|
390, |
|
1 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "BigBirdTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"▁I", |
|
"▁was", |
|
"▁b", |
|
"or", |
|
"n", |
|
"▁in", |
|
"▁", |
|
"9", |
|
"2", |
|
"0", |
|
"0", |
|
"0", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁f", |
|
"al", |
|
"s", |
|
"é", |
|
"." |
|
], |
|
"encoded": [ |
|
8, |
|
21, |
|
84, |
|
55, |
|
24, |
|
19, |
|
7, |
|
0, |
|
602, |
|
347, |
|
347, |
|
347, |
|
3, |
|
12, |
|
66, |
|
46, |
|
72, |
|
80, |
|
6, |
|
0, |
|
4 |
|
], |
|
"encoded_special": [ |
|
1002, |
|
8, |
|
21, |
|
84, |
|
55, |
|
24, |
|
19, |
|
7, |
|
0, |
|
602, |
|
347, |
|
347, |
|
347, |
|
3, |
|
12, |
|
66, |
|
46, |
|
72, |
|
80, |
|
6, |
|
0, |
|
4, |
|
1000 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "CLIPTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "lower newer", |
|
"tokens": [ |
|
"lo", |
|
"w", |
|
"er</w>", |
|
"n", |
|
"e", |
|
"w", |
|
"er</w>" |
|
], |
|
"encoded": [ |
|
10, |
|
2, |
|
16, |
|
9, |
|
3, |
|
2, |
|
16 |
|
], |
|
"encoded_special": [ |
|
21, |
|
10, |
|
2, |
|
16, |
|
9, |
|
3, |
|
2, |
|
16, |
|
22 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "CamembertTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"▁I", |
|
"▁was", |
|
"▁b", |
|
"or", |
|
"n", |
|
"▁in", |
|
"▁", |
|
"9", |
|
"2", |
|
"0", |
|
"0", |
|
"0", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁f", |
|
"al", |
|
"s", |
|
"é", |
|
"." |
|
], |
|
"encoded": [ |
|
12, |
|
25, |
|
88, |
|
59, |
|
28, |
|
23, |
|
11, |
|
3, |
|
606, |
|
351, |
|
351, |
|
351, |
|
7, |
|
16, |
|
70, |
|
50, |
|
76, |
|
84, |
|
10, |
|
3, |
|
8 |
|
], |
|
"encoded_special": [ |
|
5, |
|
12, |
|
25, |
|
88, |
|
59, |
|
28, |
|
23, |
|
11, |
|
3, |
|
606, |
|
351, |
|
351, |
|
351, |
|
7, |
|
16, |
|
70, |
|
50, |
|
76, |
|
84, |
|
10, |
|
3, |
|
8, |
|
6 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "CodeGenTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "lower newer", |
|
"tokens": [ |
|
"Ġlow", |
|
"er", |
|
"Ġ", |
|
"n", |
|
"e", |
|
"w", |
|
"er" |
|
], |
|
"encoded": [ |
|
14, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15 |
|
], |
|
"encoded_special": [ |
|
14, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15 |
|
], |
|
"params": { |
|
"add_prefix_space": true |
|
}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "DPRContextEncoderTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "UNwantéd,running", |
|
"tokens": [ |
|
"un", |
|
"##want", |
|
"##ed", |
|
",", |
|
"runn", |
|
"##ing" |
|
], |
|
"encoded": [ |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11 |
|
], |
|
"encoded_special": [ |
|
1, |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "DPRQuestionEncoderTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "UNwantéd,running", |
|
"tokens": [ |
|
"un", |
|
"##want", |
|
"##ed", |
|
",", |
|
"runn", |
|
"##ing" |
|
], |
|
"encoded": [ |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11 |
|
], |
|
"encoded_special": [ |
|
1, |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "DPRReaderTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "UNwantéd,running", |
|
"tokens": [ |
|
"un", |
|
"##want", |
|
"##ed", |
|
",", |
|
"runn", |
|
"##ing" |
|
], |
|
"encoded": [ |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11 |
|
], |
|
"encoded_special": [ |
|
1, |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "DebertaTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "lower newer", |
|
"tokens": [ |
|
"l", |
|
"o", |
|
"w", |
|
"er", |
|
"Ġ", |
|
"n", |
|
"e", |
|
"w", |
|
"er" |
|
], |
|
"encoded": [ |
|
0, |
|
1, |
|
2, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15 |
|
], |
|
"encoded_special": [ |
|
20, |
|
0, |
|
1, |
|
2, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15, |
|
21 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "DebertaV2TokenizationTest", |
|
"sp_base": false, |
|
"sequence": "I was born in 92000, and this is falsé!", |
|
"tokens": [ |
|
"▁", |
|
"I", |
|
"▁was", |
|
"▁born", |
|
"▁in", |
|
"▁9", |
|
"2000", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁fal", |
|
"s", |
|
"é", |
|
"!" |
|
], |
|
"encoded": [ |
|
13, |
|
1, |
|
23, |
|
386, |
|
19, |
|
561, |
|
3050, |
|
15, |
|
17, |
|
48, |
|
25, |
|
8256, |
|
18, |
|
1, |
|
187 |
|
], |
|
"encoded_special": [ |
|
2, |
|
13, |
|
1, |
|
23, |
|
386, |
|
19, |
|
561, |
|
3050, |
|
15, |
|
17, |
|
48, |
|
25, |
|
8256, |
|
18, |
|
1, |
|
187, |
|
3 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "DistilBertTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "UNwantéd,running", |
|
"tokens": [ |
|
"un", |
|
"##want", |
|
"##ed", |
|
",", |
|
"runn", |
|
"##ing" |
|
], |
|
"encoded": [ |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11 |
|
], |
|
"encoded_special": [ |
|
1, |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "ElectraTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "UNwantéd,running", |
|
"tokens": [ |
|
"un", |
|
"##want", |
|
"##ed", |
|
",", |
|
"runn", |
|
"##ing" |
|
], |
|
"encoded": [ |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11 |
|
], |
|
"encoded_special": [ |
|
1, |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "ElectraTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "UNwantéd,running", |
|
"tokens": [ |
|
"un", |
|
"##want", |
|
"##ed", |
|
",", |
|
"runn", |
|
"##ing" |
|
], |
|
"encoded": [ |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11 |
|
], |
|
"encoded_special": [ |
|
1, |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11, |
|
2 |
|
], |
|
"params": { |
|
"do_lower_case": true |
|
}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "FNetTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"▁", |
|
"I", |
|
"▁was", |
|
"▁born", |
|
"▁in", |
|
"▁9", |
|
"2000", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁fal", |
|
"s", |
|
"é", |
|
"." |
|
], |
|
"encoded": [ |
|
13, |
|
1, |
|
23, |
|
386, |
|
19, |
|
561, |
|
3050, |
|
15, |
|
17, |
|
48, |
|
25, |
|
8256, |
|
18, |
|
1, |
|
9 |
|
], |
|
"encoded_special": [ |
|
2, |
|
13, |
|
1, |
|
23, |
|
386, |
|
19, |
|
561, |
|
3050, |
|
15, |
|
17, |
|
48, |
|
25, |
|
8256, |
|
18, |
|
1, |
|
9, |
|
3 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "FunnelTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "UNwantéd,running", |
|
"tokens": [ |
|
"un", |
|
"##want", |
|
"##ed", |
|
",", |
|
"runn", |
|
"##ing" |
|
], |
|
"encoded": [ |
|
7, |
|
4, |
|
5, |
|
10, |
|
8, |
|
9 |
|
], |
|
"encoded_special": [ |
|
1, |
|
7, |
|
4, |
|
5, |
|
10, |
|
8, |
|
9, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "GPT2TokenizationTest", |
|
"sp_base": false, |
|
"sequence": "lower newer", |
|
"tokens": [ |
|
"Ġlow", |
|
"er", |
|
"Ġ", |
|
"n", |
|
"e", |
|
"w", |
|
"er" |
|
], |
|
"encoded": [ |
|
14, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15 |
|
], |
|
"encoded_special": [ |
|
14, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15 |
|
], |
|
"params": { |
|
"add_prefix_space": true |
|
}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "HerbertTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "lower,newer", |
|
"tokens": [ |
|
"low", |
|
"er</w>", |
|
",</w>", |
|
"n", |
|
"e", |
|
"w", |
|
"er</w>" |
|
], |
|
"encoded": [ |
|
16, |
|
17, |
|
22, |
|
11, |
|
5, |
|
4, |
|
17 |
|
], |
|
"encoded_special": [ |
|
0, |
|
16, |
|
17, |
|
22, |
|
11, |
|
5, |
|
4, |
|
17, |
|
1 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "LayoutLMTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "UNwantéd,running", |
|
"tokens": [ |
|
"un", |
|
"##want", |
|
"##ed", |
|
",", |
|
"runn", |
|
"##ing" |
|
], |
|
"encoded": [ |
|
7, |
|
4, |
|
5, |
|
10, |
|
8, |
|
9 |
|
], |
|
"encoded_special": [ |
|
1, |
|
7, |
|
4, |
|
5, |
|
10, |
|
8, |
|
9, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "LayoutLMv2TokenizationTest", |
|
"sp_base": false, |
|
"sequence": null, |
|
"tokens": null, |
|
"encoded": [ |
|
10, |
|
11, |
|
12, |
|
13 |
|
], |
|
"encoded_special": [ |
|
1, |
|
10, |
|
11, |
|
12, |
|
13, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": { |
|
"text": [ |
|
"a", |
|
"weirdly", |
|
"test" |
|
], |
|
"boxes": [ |
|
[ |
|
423, |
|
237, |
|
440, |
|
251 |
|
], |
|
[ |
|
427, |
|
272, |
|
441, |
|
287 |
|
], |
|
[ |
|
419, |
|
115, |
|
437, |
|
129 |
|
] |
|
] |
|
} |
|
}, |
|
{ |
|
"TestClass": "LayoutLMv3TokenizationTest", |
|
"sp_base": false, |
|
"sequence": null, |
|
"tokens": null, |
|
"encoded": [ |
|
14, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15 |
|
], |
|
"encoded_special": [ |
|
20, |
|
14, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15, |
|
21 |
|
], |
|
"params": {}, |
|
"params_encode": { |
|
"text": [ |
|
"lower", |
|
"newer" |
|
], |
|
"boxes": [ |
|
[ |
|
423, |
|
237, |
|
440, |
|
251 |
|
], |
|
[ |
|
427, |
|
272, |
|
441, |
|
287 |
|
] |
|
] |
|
} |
|
}, |
|
{ |
|
"TestClass": "LayoutXLMTokenizationTest", |
|
"sp_base": true, |
|
"sequence": null, |
|
"tokens": null, |
|
"encoded": [ |
|
11, |
|
113, |
|
159, |
|
17, |
|
39, |
|
171, |
|
383 |
|
], |
|
"encoded_special": [ |
|
0, |
|
11, |
|
113, |
|
159, |
|
17, |
|
39, |
|
171, |
|
383, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": { |
|
"text": [ |
|
"a", |
|
"weirdly", |
|
"test" |
|
], |
|
"boxes": [ |
|
[ |
|
423, |
|
237, |
|
440, |
|
251 |
|
], |
|
[ |
|
427, |
|
272, |
|
441, |
|
287 |
|
], |
|
[ |
|
419, |
|
115, |
|
437, |
|
129 |
|
] |
|
] |
|
} |
|
}, |
|
{ |
|
"TestClass": "LongformerTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "lower newer", |
|
"tokens": [ |
|
"l", |
|
"o", |
|
"w", |
|
"er", |
|
"Ġ", |
|
"n", |
|
"e", |
|
"w", |
|
"er" |
|
], |
|
"encoded": [ |
|
0, |
|
1, |
|
2, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15 |
|
], |
|
"encoded_special": [ |
|
20, |
|
0, |
|
1, |
|
2, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15, |
|
21 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "LxmertTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"[UNK]", |
|
"[UNK]", |
|
"[UNK]", |
|
"[UNK]", |
|
"[UNK]", |
|
",", |
|
"[UNK]", |
|
"[UNK]", |
|
"[UNK]", |
|
"[UNK]", |
|
"[UNK]" |
|
], |
|
"encoded": [ |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
10, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0 |
|
], |
|
"encoded_special": [ |
|
1, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
10, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "MBart50TokenizationTest", |
|
"sp_base": true, |
|
"sequence": "the I to a and of in was it me that be he for with my not is s you", |
|
"tokens": [ |
|
"▁the", |
|
"▁I", |
|
"▁to", |
|
"▁a", |
|
"▁and", |
|
"▁of", |
|
"▁in", |
|
"▁was", |
|
"▁it", |
|
"▁me", |
|
"▁that", |
|
"▁be", |
|
"▁he", |
|
"▁for", |
|
"▁with", |
|
"▁my", |
|
"▁not", |
|
"▁is", |
|
"▁s", |
|
"▁you" |
|
], |
|
"encoded": [ |
|
6, |
|
9, |
|
10, |
|
11, |
|
13, |
|
14, |
|
20, |
|
22, |
|
26, |
|
32, |
|
35, |
|
37, |
|
38, |
|
41, |
|
42, |
|
44, |
|
45, |
|
47, |
|
48, |
|
49 |
|
], |
|
"encoded_special": [ |
|
1004, |
|
6, |
|
9, |
|
10, |
|
11, |
|
13, |
|
14, |
|
20, |
|
22, |
|
26, |
|
32, |
|
35, |
|
37, |
|
38, |
|
41, |
|
42, |
|
44, |
|
45, |
|
47, |
|
48, |
|
49, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "MBartTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "the I to a and of in was it me that be he for with my not is s you", |
|
"tokens": [ |
|
"▁the", |
|
"▁I", |
|
"▁to", |
|
"▁a", |
|
"▁and", |
|
"▁of", |
|
"▁in", |
|
"▁was", |
|
"▁it", |
|
"▁me", |
|
"▁that", |
|
"▁be", |
|
"▁he", |
|
"▁for", |
|
"▁with", |
|
"▁my", |
|
"▁not", |
|
"▁is", |
|
"▁s", |
|
"▁you" |
|
], |
|
"encoded": [ |
|
6, |
|
9, |
|
10, |
|
11, |
|
13, |
|
14, |
|
20, |
|
22, |
|
26, |
|
32, |
|
35, |
|
37, |
|
38, |
|
41, |
|
42, |
|
44, |
|
45, |
|
47, |
|
48, |
|
49 |
|
], |
|
"encoded_special": [ |
|
6, |
|
9, |
|
10, |
|
11, |
|
13, |
|
14, |
|
20, |
|
22, |
|
26, |
|
32, |
|
35, |
|
37, |
|
38, |
|
41, |
|
42, |
|
44, |
|
45, |
|
47, |
|
48, |
|
49, |
|
2, |
|
1004 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "MPNetTokenizerTest", |
|
"sp_base": false, |
|
"sequence": "UNwantéd,running", |
|
"tokens": [ |
|
"un", |
|
"##want", |
|
"##ed", |
|
",", |
|
"runn", |
|
"##ing" |
|
], |
|
"encoded": [ |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11 |
|
], |
|
"encoded_special": [ |
|
15, |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11, |
|
16 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "MarkupLMTokenizationTest", |
|
"sp_base": false, |
|
"sequence": null, |
|
"tokens": null, |
|
"encoded": [ |
|
21, |
|
3, |
|
0, |
|
0, |
|
1, |
|
2, |
|
1, |
|
4, |
|
0, |
|
8 |
|
], |
|
"encoded_special": [ |
|
22, |
|
21, |
|
3, |
|
0, |
|
0, |
|
1, |
|
2, |
|
1, |
|
4, |
|
0, |
|
8, |
|
23 |
|
], |
|
"params": {}, |
|
"params_encode": { |
|
"text": [ |
|
"hello", |
|
"world" |
|
], |
|
"xpaths": [ |
|
",/html/body/div/li[1]/div/span", |
|
",/html/body/div/li[1]/div/span" |
|
] |
|
} |
|
}, |
|
{ |
|
"TestClass": "MobileBERTTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "UNwantéd,running", |
|
"tokens": [ |
|
"un", |
|
"##want", |
|
"##ed", |
|
",", |
|
"runn", |
|
"##ing" |
|
], |
|
"encoded": [ |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11 |
|
], |
|
"encoded_special": [ |
|
1, |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "NllbTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "the I to a and of in was it me that be he for with my not is s you", |
|
"tokens": [ |
|
"▁the", |
|
"▁I", |
|
"▁to", |
|
"▁a", |
|
"▁and", |
|
"▁of", |
|
"▁in", |
|
"▁was", |
|
"▁it", |
|
"▁me", |
|
"▁that", |
|
"▁be", |
|
"▁he", |
|
"▁for", |
|
"▁with", |
|
"▁my", |
|
"▁not", |
|
"▁is", |
|
"▁s", |
|
"▁you" |
|
], |
|
"encoded": [ |
|
6, |
|
9, |
|
10, |
|
11, |
|
13, |
|
14, |
|
20, |
|
22, |
|
26, |
|
32, |
|
35, |
|
37, |
|
38, |
|
41, |
|
42, |
|
44, |
|
45, |
|
47, |
|
48, |
|
49 |
|
], |
|
"encoded_special": [ |
|
1048, |
|
6, |
|
9, |
|
10, |
|
11, |
|
13, |
|
14, |
|
20, |
|
22, |
|
26, |
|
32, |
|
35, |
|
37, |
|
38, |
|
41, |
|
42, |
|
44, |
|
45, |
|
47, |
|
48, |
|
49, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "OpenAIGPTTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "lower newer", |
|
"tokens": [ |
|
"low", |
|
"er</w>", |
|
"n", |
|
"e", |
|
"w", |
|
"er</w>" |
|
], |
|
"encoded": [ |
|
14, |
|
15, |
|
9, |
|
3, |
|
2, |
|
15 |
|
], |
|
"encoded_special": [ |
|
14, |
|
15, |
|
9, |
|
3, |
|
2, |
|
15 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "PegasusTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "This is a test", |
|
"tokens": [ |
|
"▁This", |
|
"▁is", |
|
"▁a", |
|
"▁", |
|
"t", |
|
"est" |
|
], |
|
"encoded": [ |
|
391, |
|
149, |
|
112, |
|
106, |
|
115, |
|
493 |
|
], |
|
"encoded_special": [ |
|
391, |
|
149, |
|
112, |
|
106, |
|
115, |
|
493, |
|
1 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "Qwen2TokenizationTest", |
|
"sp_base": false, |
|
"sequence": "lower lower newer 010;}\r\n<|endoftext|>ϓ", |
|
"tokens": [ |
|
"l", |
|
"o", |
|
"w", |
|
"er", |
|
"Ġlow", |
|
"er", |
|
"Ġ", |
|
"n", |
|
"e", |
|
"w", |
|
"er", |
|
"Ġ", |
|
"0", |
|
"1", |
|
"0", |
|
";}", |
|
"č", |
|
"Ċ", |
|
"<|endoftext|>", |
|
"Ïĵ" |
|
], |
|
"encoded": [ |
|
75, |
|
78, |
|
86, |
|
260, |
|
259, |
|
260, |
|
220, |
|
77, |
|
68, |
|
86, |
|
260, |
|
220, |
|
15, |
|
16, |
|
15, |
|
265, |
|
201, |
|
198, |
|
270, |
|
267 |
|
], |
|
"encoded_special": [ |
|
75, |
|
78, |
|
86, |
|
260, |
|
259, |
|
260, |
|
220, |
|
77, |
|
68, |
|
86, |
|
260, |
|
220, |
|
15, |
|
16, |
|
15, |
|
265, |
|
201, |
|
198, |
|
270, |
|
267 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "ReformerTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"▁I", |
|
"▁was", |
|
"▁b", |
|
"or", |
|
"n", |
|
"▁in", |
|
"▁", |
|
"9", |
|
"2", |
|
"0", |
|
"0", |
|
"0", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁f", |
|
"al", |
|
"s", |
|
"é", |
|
"." |
|
], |
|
"encoded": [ |
|
8, |
|
21, |
|
84, |
|
55, |
|
24, |
|
19, |
|
7, |
|
0, |
|
602, |
|
347, |
|
347, |
|
347, |
|
3, |
|
12, |
|
66, |
|
46, |
|
72, |
|
80, |
|
6, |
|
0, |
|
4 |
|
], |
|
"encoded_special": [ |
|
8, |
|
21, |
|
84, |
|
55, |
|
24, |
|
19, |
|
7, |
|
0, |
|
602, |
|
347, |
|
347, |
|
347, |
|
3, |
|
12, |
|
66, |
|
46, |
|
72, |
|
80, |
|
6, |
|
0, |
|
4 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "RemBertTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "this is a test", |
|
"tokens": [ |
|
"▁this", |
|
"▁is", |
|
"▁a", |
|
"▁t", |
|
"est" |
|
], |
|
"encoded": [ |
|
66, |
|
46, |
|
10, |
|
170, |
|
382 |
|
], |
|
"encoded_special": [ |
|
1000, |
|
66, |
|
46, |
|
10, |
|
170, |
|
382, |
|
1001 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "RobertaTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "lower newer", |
|
"tokens": [ |
|
"l", |
|
"o", |
|
"w", |
|
"er", |
|
"Ġ", |
|
"n", |
|
"e", |
|
"w", |
|
"er" |
|
], |
|
"encoded": [ |
|
0, |
|
1, |
|
2, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15 |
|
], |
|
"encoded_special": [ |
|
20, |
|
0, |
|
1, |
|
2, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15, |
|
21 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "SeamlessM4TTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "the I to a and of in was it me that be he for with my not is s you", |
|
"tokens": [ |
|
"▁the", |
|
"▁I", |
|
"▁to", |
|
"▁a", |
|
"▁and", |
|
"▁of", |
|
"▁in", |
|
"▁was", |
|
"▁it", |
|
"▁me", |
|
"▁that", |
|
"▁be", |
|
"▁he", |
|
"▁for", |
|
"▁with", |
|
"▁my", |
|
"▁not", |
|
"▁is", |
|
"▁s", |
|
"▁you" |
|
], |
|
"encoded": [ |
|
6, |
|
9, |
|
10, |
|
11, |
|
13, |
|
14, |
|
20, |
|
22, |
|
26, |
|
32, |
|
35, |
|
37, |
|
38, |
|
41, |
|
42, |
|
44, |
|
45, |
|
47, |
|
48, |
|
49 |
|
], |
|
"encoded_special": [ |
|
3, |
|
1, |
|
6, |
|
9, |
|
10, |
|
11, |
|
13, |
|
14, |
|
20, |
|
22, |
|
26, |
|
32, |
|
35, |
|
37, |
|
38, |
|
41, |
|
42, |
|
44, |
|
45, |
|
47, |
|
48, |
|
49, |
|
3 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "SplinterTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "I need to test this rigor", |
|
"tokens": [ |
|
"[UNK]", |
|
"[UNK]", |
|
"[UNK]", |
|
"test", |
|
"this", |
|
"rigor" |
|
], |
|
"encoded": [ |
|
3, |
|
10, |
|
10, |
|
10, |
|
16, |
|
13, |
|
21, |
|
1 |
|
], |
|
"encoded_special": [ |
|
3, |
|
10, |
|
10, |
|
10, |
|
16, |
|
13, |
|
21, |
|
1 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "SqueezeBertTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "UNwantéd,running", |
|
"tokens": [ |
|
"un", |
|
"##want", |
|
"##ed", |
|
",", |
|
"runn", |
|
"##ing" |
|
], |
|
"encoded": [ |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11 |
|
], |
|
"encoded_special": [ |
|
1, |
|
9, |
|
6, |
|
7, |
|
12, |
|
10, |
|
11, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "T5TokenizationTest", |
|
"sp_base": false, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"▁I", |
|
"▁was", |
|
"▁b", |
|
"or", |
|
"n", |
|
"▁in", |
|
"▁", |
|
"9", |
|
"2", |
|
"0", |
|
"0", |
|
"0", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁f", |
|
"al", |
|
"s", |
|
"é", |
|
"." |
|
], |
|
"encoded": [ |
|
8, |
|
21, |
|
84, |
|
55, |
|
24, |
|
19, |
|
7, |
|
0, |
|
602, |
|
347, |
|
347, |
|
347, |
|
3, |
|
12, |
|
66, |
|
46, |
|
72, |
|
80, |
|
6, |
|
0, |
|
4 |
|
], |
|
"encoded_special": [ |
|
8, |
|
21, |
|
84, |
|
55, |
|
24, |
|
19, |
|
7, |
|
0, |
|
602, |
|
347, |
|
347, |
|
347, |
|
3, |
|
12, |
|
66, |
|
46, |
|
72, |
|
80, |
|
6, |
|
0, |
|
4, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "TestTokenizationBart", |
|
"sp_base": false, |
|
"sequence": "lower newer", |
|
"tokens": [ |
|
"l", |
|
"o", |
|
"w", |
|
"er", |
|
"Ġ", |
|
"n", |
|
"e", |
|
"w", |
|
"er" |
|
], |
|
"encoded": [ |
|
0, |
|
1, |
|
2, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15 |
|
], |
|
"encoded_special": [ |
|
20, |
|
0, |
|
1, |
|
2, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15, |
|
21 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "TestTokenizationLED", |
|
"sp_base": false, |
|
"sequence": "lower newer", |
|
"tokens": [ |
|
"l", |
|
"o", |
|
"w", |
|
"er", |
|
"Ġ", |
|
"n", |
|
"e", |
|
"w", |
|
"er" |
|
], |
|
"encoded": [ |
|
0, |
|
1, |
|
2, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15 |
|
], |
|
"encoded_special": [ |
|
20, |
|
0, |
|
1, |
|
2, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15, |
|
21 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "TestTokenizationMvp", |
|
"sp_base": false, |
|
"sequence": "lower newer", |
|
"tokens": [ |
|
"l", |
|
"o", |
|
"w", |
|
"er", |
|
"Ġ", |
|
"n", |
|
"e", |
|
"w", |
|
"er" |
|
], |
|
"encoded": [ |
|
0, |
|
1, |
|
2, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15 |
|
], |
|
"encoded_special": [ |
|
20, |
|
0, |
|
1, |
|
2, |
|
15, |
|
10, |
|
9, |
|
3, |
|
2, |
|
15, |
|
21 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "UdopTokenizationTest", |
|
"sp_base": true, |
|
"sequence": null, |
|
"tokens": null, |
|
"encoded": [ |
|
10, |
|
112, |
|
158, |
|
16, |
|
38, |
|
170, |
|
382, |
|
37, |
|
86, |
|
20 |
|
], |
|
"encoded_special": [ |
|
10, |
|
112, |
|
158, |
|
16, |
|
38, |
|
170, |
|
382, |
|
37, |
|
86, |
|
20, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": { |
|
"text": [ |
|
"a", |
|
"weirdly", |
|
"test", |
|
"hello" |
|
], |
|
"boxes": [ |
|
[ |
|
423, |
|
237, |
|
440, |
|
251 |
|
], |
|
[ |
|
427, |
|
272, |
|
441, |
|
287 |
|
], |
|
[ |
|
419, |
|
115, |
|
437, |
|
129 |
|
], |
|
[ |
|
961, |
|
885, |
|
992, |
|
912 |
|
] |
|
] |
|
} |
|
}, |
|
{ |
|
"TestClass": "WhisperTokenizerTest", |
|
"sp_base": false, |
|
"sequence": "A BCDEFGHIJKLMNOPQRST", |
|
"tokens": [ |
|
"A", |
|
"ĠBC", |
|
"DE", |
|
"F", |
|
"GH", |
|
"I", |
|
"J", |
|
"K", |
|
"L", |
|
"M", |
|
"N", |
|
"OP", |
|
"Q", |
|
"R", |
|
"ST" |
|
], |
|
"encoded": [ |
|
32, |
|
14359, |
|
22296, |
|
37, |
|
4269, |
|
40, |
|
41, |
|
42, |
|
43, |
|
44, |
|
45, |
|
12059, |
|
48, |
|
49, |
|
6840 |
|
], |
|
"encoded_special": [ |
|
50258, |
|
50363, |
|
32, |
|
14359, |
|
22296, |
|
37, |
|
4269, |
|
40, |
|
41, |
|
42, |
|
43, |
|
44, |
|
45, |
|
12059, |
|
48, |
|
49, |
|
6840, |
|
50257 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "XGLMTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"▁I", |
|
"▁was", |
|
"▁b", |
|
"or", |
|
"n", |
|
"▁in", |
|
"▁", |
|
"9", |
|
"2", |
|
"0", |
|
"0", |
|
"0", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁f", |
|
"al", |
|
"s", |
|
"é", |
|
"." |
|
], |
|
"encoded": [ |
|
9, |
|
22, |
|
85, |
|
56, |
|
25, |
|
20, |
|
8, |
|
3, |
|
603, |
|
348, |
|
348, |
|
348, |
|
4, |
|
13, |
|
67, |
|
47, |
|
73, |
|
81, |
|
7, |
|
3, |
|
5 |
|
], |
|
"encoded_special": [ |
|
2, |
|
9, |
|
22, |
|
85, |
|
56, |
|
25, |
|
20, |
|
8, |
|
3, |
|
603, |
|
348, |
|
348, |
|
348, |
|
4, |
|
13, |
|
67, |
|
47, |
|
73, |
|
81, |
|
7, |
|
3, |
|
5 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "XLMRobertaTokenizationTest", |
|
"sp_base": false, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"▁I", |
|
"▁was", |
|
"▁b", |
|
"or", |
|
"n", |
|
"▁in", |
|
"▁", |
|
"9", |
|
"2", |
|
"0", |
|
"0", |
|
"0", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁f", |
|
"al", |
|
"s", |
|
"é", |
|
"." |
|
], |
|
"encoded": [ |
|
9, |
|
22, |
|
85, |
|
56, |
|
25, |
|
20, |
|
8, |
|
3, |
|
603, |
|
348, |
|
348, |
|
348, |
|
4, |
|
13, |
|
67, |
|
47, |
|
73, |
|
81, |
|
7, |
|
3, |
|
5 |
|
], |
|
"encoded_special": [ |
|
0, |
|
9, |
|
22, |
|
85, |
|
56, |
|
25, |
|
20, |
|
8, |
|
3, |
|
603, |
|
348, |
|
348, |
|
348, |
|
4, |
|
13, |
|
67, |
|
47, |
|
73, |
|
81, |
|
7, |
|
3, |
|
5, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "XLNetTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "the I to a and of in was it me that be he for with my not is s you", |
|
"tokens": [ |
|
"▁the", |
|
"▁I", |
|
"▁to", |
|
"▁a", |
|
"▁and", |
|
"▁of", |
|
"▁in", |
|
"▁was", |
|
"▁it", |
|
"▁me", |
|
"▁that", |
|
"▁be", |
|
"▁he", |
|
"▁for", |
|
"▁with", |
|
"▁my", |
|
"▁not", |
|
"▁is", |
|
"▁s", |
|
"▁you" |
|
], |
|
"encoded": [ |
|
5, |
|
8, |
|
9, |
|
10, |
|
12, |
|
13, |
|
19, |
|
21, |
|
25, |
|
31, |
|
34, |
|
36, |
|
37, |
|
40, |
|
41, |
|
43, |
|
44, |
|
46, |
|
47, |
|
48 |
|
], |
|
"encoded_special": [ |
|
5, |
|
8, |
|
9, |
|
10, |
|
12, |
|
13, |
|
19, |
|
21, |
|
25, |
|
31, |
|
34, |
|
36, |
|
37, |
|
40, |
|
41, |
|
43, |
|
44, |
|
46, |
|
47, |
|
48, |
|
1000, |
|
1002 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "CamembertTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "This is a test", |
|
"tokens": [ |
|
"▁This", |
|
"▁is", |
|
"▁a", |
|
"▁t", |
|
"est" |
|
], |
|
"encoded": [ |
|
289, |
|
50, |
|
14, |
|
174, |
|
386 |
|
], |
|
"encoded_special": [ |
|
5, |
|
289, |
|
50, |
|
14, |
|
174, |
|
386, |
|
6 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "CamembertTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "the I to a and of in was it me that be he for with my not is s you", |
|
"tokens": [ |
|
"▁the", |
|
"▁I", |
|
"▁to", |
|
"▁a", |
|
"▁and", |
|
"▁of", |
|
"▁in", |
|
"▁was", |
|
"▁it", |
|
"▁me", |
|
"▁that", |
|
"▁be", |
|
"▁he", |
|
"▁for", |
|
"▁with", |
|
"▁my", |
|
"▁not", |
|
"▁is", |
|
"▁s", |
|
"▁you" |
|
], |
|
"encoded": [ |
|
9, |
|
12, |
|
13, |
|
14, |
|
16, |
|
17, |
|
23, |
|
25, |
|
29, |
|
35, |
|
38, |
|
40, |
|
41, |
|
44, |
|
45, |
|
47, |
|
48, |
|
50, |
|
51, |
|
52 |
|
], |
|
"encoded_special": [ |
|
5, |
|
9, |
|
12, |
|
13, |
|
14, |
|
16, |
|
17, |
|
23, |
|
25, |
|
29, |
|
35, |
|
38, |
|
40, |
|
41, |
|
44, |
|
45, |
|
47, |
|
48, |
|
50, |
|
51, |
|
52, |
|
6 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "MBartTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "This is a test", |
|
"tokens": [ |
|
"▁This", |
|
"▁is", |
|
"▁a", |
|
"▁t", |
|
"est" |
|
], |
|
"encoded": [ |
|
286, |
|
47, |
|
11, |
|
171, |
|
383 |
|
], |
|
"encoded_special": [ |
|
286, |
|
47, |
|
11, |
|
171, |
|
383, |
|
2, |
|
1004 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "MBartTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"▁I", |
|
"▁was", |
|
"▁b", |
|
"or", |
|
"n", |
|
"▁in", |
|
"▁", |
|
"9", |
|
"2", |
|
"0", |
|
"0", |
|
"0", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁f", |
|
"al", |
|
"s", |
|
"é", |
|
"." |
|
], |
|
"encoded": [ |
|
9, |
|
22, |
|
85, |
|
56, |
|
25, |
|
20, |
|
8, |
|
3, |
|
603, |
|
348, |
|
348, |
|
348, |
|
4, |
|
13, |
|
67, |
|
47, |
|
73, |
|
81, |
|
7, |
|
3, |
|
5 |
|
], |
|
"encoded_special": [ |
|
9, |
|
22, |
|
85, |
|
56, |
|
25, |
|
20, |
|
8, |
|
3, |
|
603, |
|
348, |
|
348, |
|
348, |
|
4, |
|
13, |
|
67, |
|
47, |
|
73, |
|
81, |
|
7, |
|
3, |
|
5, |
|
2, |
|
1004 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "MBart50TokenizationTest", |
|
"sp_base": true, |
|
"sequence": "This is a test", |
|
"tokens": [ |
|
"▁This", |
|
"▁is", |
|
"▁a", |
|
"▁t", |
|
"est" |
|
], |
|
"encoded": [ |
|
286, |
|
47, |
|
11, |
|
171, |
|
383 |
|
], |
|
"encoded_special": [ |
|
1004, |
|
286, |
|
47, |
|
11, |
|
171, |
|
383, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "MBart50TokenizationTest", |
|
"sp_base": true, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"▁I", |
|
"▁was", |
|
"▁b", |
|
"or", |
|
"n", |
|
"▁in", |
|
"▁", |
|
"9", |
|
"2", |
|
"0", |
|
"0", |
|
"0", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁f", |
|
"al", |
|
"s", |
|
"é", |
|
"." |
|
], |
|
"encoded": [ |
|
9, |
|
22, |
|
85, |
|
56, |
|
25, |
|
20, |
|
8, |
|
3, |
|
603, |
|
348, |
|
348, |
|
348, |
|
4, |
|
13, |
|
67, |
|
47, |
|
73, |
|
81, |
|
7, |
|
3, |
|
5 |
|
], |
|
"encoded_special": [ |
|
1004, |
|
9, |
|
22, |
|
85, |
|
56, |
|
25, |
|
20, |
|
8, |
|
3, |
|
603, |
|
348, |
|
348, |
|
348, |
|
4, |
|
13, |
|
67, |
|
47, |
|
73, |
|
81, |
|
7, |
|
3, |
|
5, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "NllbTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "This is a test", |
|
"tokens": [ |
|
"▁This", |
|
"▁is", |
|
"▁a", |
|
"▁t", |
|
"est" |
|
], |
|
"encoded": [ |
|
286, |
|
47, |
|
11, |
|
171, |
|
383 |
|
], |
|
"encoded_special": [ |
|
1048, |
|
286, |
|
47, |
|
11, |
|
171, |
|
383, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "NllbTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"▁I", |
|
"▁was", |
|
"▁b", |
|
"or", |
|
"n", |
|
"▁in", |
|
"▁", |
|
"9", |
|
"2", |
|
"0", |
|
"0", |
|
"0", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁f", |
|
"al", |
|
"s", |
|
"é", |
|
"." |
|
], |
|
"encoded": [ |
|
9, |
|
22, |
|
85, |
|
56, |
|
25, |
|
20, |
|
8, |
|
3, |
|
603, |
|
348, |
|
348, |
|
348, |
|
4, |
|
13, |
|
67, |
|
47, |
|
73, |
|
81, |
|
7, |
|
3, |
|
5 |
|
], |
|
"encoded_special": [ |
|
1048, |
|
9, |
|
22, |
|
85, |
|
56, |
|
25, |
|
20, |
|
8, |
|
3, |
|
603, |
|
348, |
|
348, |
|
348, |
|
4, |
|
13, |
|
67, |
|
47, |
|
73, |
|
81, |
|
7, |
|
3, |
|
5, |
|
2 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "RemBertTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "This is a test", |
|
"tokens": [ |
|
"▁This", |
|
"▁is", |
|
"▁a", |
|
"▁t", |
|
"est" |
|
], |
|
"encoded": [ |
|
285, |
|
46, |
|
10, |
|
170, |
|
382 |
|
], |
|
"encoded_special": [ |
|
1000, |
|
285, |
|
46, |
|
10, |
|
170, |
|
382, |
|
1001 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "RemBertTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"▁I", |
|
"▁was", |
|
"▁b", |
|
"or", |
|
"n", |
|
"▁in", |
|
"▁", |
|
"9", |
|
"2", |
|
"0", |
|
"0", |
|
"0", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁f", |
|
"al", |
|
"s", |
|
"é", |
|
"." |
|
], |
|
"encoded": [ |
|
8, |
|
21, |
|
84, |
|
55, |
|
24, |
|
19, |
|
7, |
|
0, |
|
602, |
|
347, |
|
347, |
|
347, |
|
3, |
|
12, |
|
66, |
|
46, |
|
72, |
|
80, |
|
6, |
|
0, |
|
4 |
|
], |
|
"encoded_special": [ |
|
1000, |
|
8, |
|
21, |
|
84, |
|
55, |
|
24, |
|
19, |
|
7, |
|
0, |
|
602, |
|
347, |
|
347, |
|
347, |
|
3, |
|
12, |
|
66, |
|
46, |
|
72, |
|
80, |
|
6, |
|
0, |
|
4, |
|
1001 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "RemBertTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "the I to a and of in was it me that be he for with my not is s you", |
|
"tokens": [ |
|
"▁the", |
|
"▁I", |
|
"▁to", |
|
"▁a", |
|
"▁and", |
|
"▁of", |
|
"▁in", |
|
"▁was", |
|
"▁it", |
|
"▁me", |
|
"▁that", |
|
"▁be", |
|
"▁he", |
|
"▁for", |
|
"▁with", |
|
"▁my", |
|
"▁not", |
|
"▁is", |
|
"▁s", |
|
"▁you" |
|
], |
|
"encoded": [ |
|
5, |
|
8, |
|
9, |
|
10, |
|
12, |
|
13, |
|
19, |
|
21, |
|
25, |
|
31, |
|
34, |
|
36, |
|
37, |
|
40, |
|
41, |
|
43, |
|
44, |
|
46, |
|
47, |
|
48 |
|
], |
|
"encoded_special": [ |
|
1000, |
|
5, |
|
8, |
|
9, |
|
10, |
|
12, |
|
13, |
|
19, |
|
21, |
|
25, |
|
31, |
|
34, |
|
36, |
|
37, |
|
40, |
|
41, |
|
43, |
|
44, |
|
46, |
|
47, |
|
48, |
|
1001 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "SeamlessM4TTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "This is a test", |
|
"tokens": [ |
|
"▁This", |
|
"▁is", |
|
"▁a", |
|
"▁t", |
|
"est" |
|
], |
|
"encoded": [ |
|
286, |
|
47, |
|
11, |
|
171, |
|
383 |
|
], |
|
"encoded_special": [ |
|
3, |
|
1, |
|
286, |
|
47, |
|
11, |
|
171, |
|
383, |
|
3 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "SeamlessM4TTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"▁I", |
|
"▁was", |
|
"▁b", |
|
"or", |
|
"n", |
|
"▁in", |
|
"▁", |
|
"9", |
|
"2", |
|
"0", |
|
"0", |
|
"0", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁f", |
|
"al", |
|
"s", |
|
"é", |
|
"." |
|
], |
|
"encoded": [ |
|
9, |
|
22, |
|
85, |
|
56, |
|
25, |
|
20, |
|
8, |
|
1, |
|
603, |
|
348, |
|
348, |
|
348, |
|
4, |
|
13, |
|
67, |
|
47, |
|
73, |
|
81, |
|
7, |
|
1, |
|
5 |
|
], |
|
"encoded_special": [ |
|
3, |
|
1, |
|
9, |
|
22, |
|
85, |
|
56, |
|
25, |
|
20, |
|
8, |
|
1, |
|
603, |
|
348, |
|
348, |
|
348, |
|
4, |
|
13, |
|
67, |
|
47, |
|
73, |
|
81, |
|
7, |
|
1, |
|
5, |
|
3 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
}, |
|
{ |
|
"TestClass": "XLNetTokenizationTest", |
|
"sp_base": true, |
|
"sequence": "I was born in 92000, and this is falsé.", |
|
"tokens": [ |
|
"▁I", |
|
"▁was", |
|
"▁b", |
|
"or", |
|
"n", |
|
"▁in", |
|
"▁", |
|
"9", |
|
"2", |
|
"0", |
|
"0", |
|
"0", |
|
",", |
|
"▁and", |
|
"▁this", |
|
"▁is", |
|
"▁f", |
|
"al", |
|
"s", |
|
"é", |
|
"." |
|
], |
|
"encoded": [ |
|
8, |
|
21, |
|
84, |
|
55, |
|
24, |
|
19, |
|
7, |
|
0, |
|
602, |
|
347, |
|
347, |
|
347, |
|
3, |
|
12, |
|
66, |
|
46, |
|
72, |
|
80, |
|
6, |
|
0, |
|
4 |
|
], |
|
"encoded_special": [ |
|
8, |
|
21, |
|
84, |
|
55, |
|
24, |
|
19, |
|
7, |
|
0, |
|
602, |
|
347, |
|
347, |
|
347, |
|
3, |
|
12, |
|
66, |
|
46, |
|
72, |
|
80, |
|
6, |
|
0, |
|
4, |
|
1000, |
|
1002 |
|
], |
|
"params": {}, |
|
"params_encode": {} |
|
} |
|
] |