TestClass
stringlengths
18
34
sp_base
bool
2 classes
sequence
stringclasses
11 values
tokens
sequencelengths
5
21
encoded
sequencelengths
4
21
encoded_special
sequencelengths
6
24
params
dict
params_encode
dict
AlbertTokenizationTest
false
I was born in 92000, and this is falsé.
[ "▁i", "▁was", "▁born", "▁in", "▁9", "2000", ",", "▁and", "▁this", "▁is", "▁false", "." ]
[ 31, 23, 386, 19, 561, 3050, 15, 17, 48, 25, 4997, 9 ]
[ 2, 31, 23, 386, 19, 561, 3050, 15, 17, 48, 25, 4997, 9, 3 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
BertTokenizationTest
false
UNwantéd,running
[ "un", "##want", "##ed", ",", "runn", "##ing" ]
[ 9, 6, 7, 12, 10, 11 ]
[ 1, 9, 6, 7, 12, 10, 11, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
BertTokenizationTest
false
UNwantéd,running
[ "un", "##want", "##ed", ",", "runn", "##ing" ]
[ 9, 6, 7, 12, 10, 11 ]
[ 1, 9, 6, 7, 12, 10, 11, 2 ]
{ "add_prefix_space": null, "do_lower_case": true }
{ "boxes": null, "text": null, "xpaths": null }
BigBirdPegasusTokenizationTest
true
This is a test
[ "▁This", "▁is", "▁a", "▁", "t", "est" ]
[ 288, 46, 9, 3, 12, 390 ]
[ 288, 46, 9, 3, 12, 390, 1 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
BigBirdTokenizationTest
false
I was born in 92000, and this is falsé.
[ "▁I", "▁was", "▁b", "or", "n", "▁in", "▁", "9", "2", "0", "0", "0", ",", "▁and", "▁this", "▁is", "▁f", "al", "s", "é", "." ]
[ 8, 21, 84, 55, 24, 19, 7, 0, 602, 347, 347, 347, 3, 12, 66, 46, 72, 80, 6, 0, 4 ]
[ 1002, 8, 21, 84, 55, 24, 19, 7, 0, 602, 347, 347, 347, 3, 12, 66, 46, 72, 80, 6, 0, 4, 1000 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
CLIPTokenizationTest
false
lower newer
[ "lo", "w", "er</w>", "n", "e", "w", "er</w>" ]
[ 10, 2, 16, 9, 3, 2, 16 ]
[ 21, 10, 2, 16, 9, 3, 2, 16, 22 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
CamembertTokenizationTest
true
I was born in 92000, and this is falsé.
[ "▁I", "▁was", "▁b", "or", "n", "▁in", "▁", "9", "2", "0", "0", "0", ",", "▁and", "▁this", "▁is", "▁f", "al", "s", "é", "." ]
[ 12, 25, 88, 59, 28, 23, 11, 3, 606, 351, 351, 351, 7, 16, 70, 50, 76, 84, 10, 3, 8 ]
[ 5, 12, 25, 88, 59, 28, 23, 11, 3, 606, 351, 351, 351, 7, 16, 70, 50, 76, 84, 10, 3, 8, 6 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
CodeGenTokenizationTest
false
lower newer
[ "Ġlow", "er", "Ġ", "n", "e", "w", "er" ]
[ 14, 15, 10, 9, 3, 2, 15 ]
[ 14, 15, 10, 9, 3, 2, 15 ]
{ "add_prefix_space": true, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
DPRContextEncoderTokenizationTest
false
UNwantéd,running
[ "un", "##want", "##ed", ",", "runn", "##ing" ]
[ 9, 6, 7, 12, 10, 11 ]
[ 1, 9, 6, 7, 12, 10, 11, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
DPRQuestionEncoderTokenizationTest
false
UNwantéd,running
[ "un", "##want", "##ed", ",", "runn", "##ing" ]
[ 9, 6, 7, 12, 10, 11 ]
[ 1, 9, 6, 7, 12, 10, 11, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
DPRReaderTokenizationTest
false
UNwantéd,running
[ "un", "##want", "##ed", ",", "runn", "##ing" ]
[ 9, 6, 7, 12, 10, 11 ]
[ 1, 9, 6, 7, 12, 10, 11, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
DebertaTokenizationTest
false
lower newer
[ "l", "o", "w", "er", "Ġ", "n", "e", "w", "er" ]
[ 0, 1, 2, 15, 10, 9, 3, 2, 15 ]
[ 20, 0, 1, 2, 15, 10, 9, 3, 2, 15, 21 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
DebertaV2TokenizationTest
false
I was born in 92000, and this is falsé!
[ "▁", "I", "▁was", "▁born", "▁in", "▁9", "2000", ",", "▁and", "▁this", "▁is", "▁fal", "s", "é", "!" ]
[ 13, 1, 23, 386, 19, 561, 3050, 15, 17, 48, 25, 8256, 18, 1, 187 ]
[ 2, 13, 1, 23, 386, 19, 561, 3050, 15, 17, 48, 25, 8256, 18, 1, 187, 3 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
DistilBertTokenizationTest
false
UNwantéd,running
[ "un", "##want", "##ed", ",", "runn", "##ing" ]
[ 9, 6, 7, 12, 10, 11 ]
[ 1, 9, 6, 7, 12, 10, 11, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
ElectraTokenizationTest
false
UNwantéd,running
[ "un", "##want", "##ed", ",", "runn", "##ing" ]
[ 9, 6, 7, 12, 10, 11 ]
[ 1, 9, 6, 7, 12, 10, 11, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
ElectraTokenizationTest
false
UNwantéd,running
[ "un", "##want", "##ed", ",", "runn", "##ing" ]
[ 9, 6, 7, 12, 10, 11 ]
[ 1, 9, 6, 7, 12, 10, 11, 2 ]
{ "add_prefix_space": null, "do_lower_case": true }
{ "boxes": null, "text": null, "xpaths": null }
FNetTokenizationTest
false
I was born in 92000, and this is falsé.
[ "▁", "I", "▁was", "▁born", "▁in", "▁9", "2000", ",", "▁and", "▁this", "▁is", "▁fal", "s", "é", "." ]
[ 13, 1, 23, 386, 19, 561, 3050, 15, 17, 48, 25, 8256, 18, 1, 9 ]
[ 2, 13, 1, 23, 386, 19, 561, 3050, 15, 17, 48, 25, 8256, 18, 1, 9, 3 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
FunnelTokenizationTest
false
UNwantéd,running
[ "un", "##want", "##ed", ",", "runn", "##ing" ]
[ 7, 4, 5, 10, 8, 9 ]
[ 1, 7, 4, 5, 10, 8, 9, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
GPT2TokenizationTest
false
lower newer
[ "Ġlow", "er", "Ġ", "n", "e", "w", "er" ]
[ 14, 15, 10, 9, 3, 2, 15 ]
[ 14, 15, 10, 9, 3, 2, 15 ]
{ "add_prefix_space": true, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
HerbertTokenizationTest
false
lower,newer
[ "low", "er</w>", ",</w>", "n", "e", "w", "er</w>" ]
[ 16, 17, 22, 11, 5, 4, 17 ]
[ 0, 16, 17, 22, 11, 5, 4, 17, 1 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
LayoutLMTokenizationTest
false
UNwantéd,running
[ "un", "##want", "##ed", ",", "runn", "##ing" ]
[ 7, 4, 5, 10, 8, 9 ]
[ 1, 7, 4, 5, 10, 8, 9, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
LayoutLMv2TokenizationTest
false
null
null
[ 10, 11, 12, 13 ]
[ 1, 10, 11, 12, 13, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": [ [ 423, 237, 440, 251 ], [ 427, 272, 441, 287 ], [ 419, 115, 437, 129 ] ], "text": [ "a", "weirdly", "test" ], "xpaths": null }
LayoutLMv3TokenizationTest
false
null
null
[ 14, 15, 10, 9, 3, 2, 15 ]
[ 20, 14, 15, 10, 9, 3, 2, 15, 21 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": [ [ 423, 237, 440, 251 ], [ 427, 272, 441, 287 ] ], "text": [ "lower", "newer" ], "xpaths": null }
LayoutXLMTokenizationTest
true
null
null
[ 11, 113, 159, 17, 39, 171, 383 ]
[ 0, 11, 113, 159, 17, 39, 171, 383, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": [ [ 423, 237, 440, 251 ], [ 427, 272, 441, 287 ], [ 419, 115, 437, 129 ] ], "text": [ "a", "weirdly", "test" ], "xpaths": null }
LongformerTokenizationTest
false
lower newer
[ "l", "o", "w", "er", "Ġ", "n", "e", "w", "er" ]
[ 0, 1, 2, 15, 10, 9, 3, 2, 15 ]
[ 20, 0, 1, 2, 15, 10, 9, 3, 2, 15, 21 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
LxmertTokenizationTest
false
I was born in 92000, and this is falsé.
[ "[UNK]", "[UNK]", "[UNK]", "[UNK]", "[UNK]", ",", "[UNK]", "[UNK]", "[UNK]", "[UNK]", "[UNK]" ]
[ 0, 0, 0, 0, 0, 10, 0, 0, 0, 0, 0 ]
[ 1, 0, 0, 0, 0, 0, 10, 0, 0, 0, 0, 0, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
MBart50TokenizationTest
true
the I to a and of in was it me that be he for with my not is s you
[ "▁the", "▁I", "▁to", "▁a", "▁and", "▁of", "▁in", "▁was", "▁it", "▁me", "▁that", "▁be", "▁he", "▁for", "▁with", "▁my", "▁not", "▁is", "▁s", "▁you" ]
[ 6, 9, 10, 11, 13, 14, 20, 22, 26, 32, 35, 37, 38, 41, 42, 44, 45, 47, 48, 49 ]
[ 1004, 6, 9, 10, 11, 13, 14, 20, 22, 26, 32, 35, 37, 38, 41, 42, 44, 45, 47, 48, 49, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
MBartTokenizationTest
true
the I to a and of in was it me that be he for with my not is s you
[ "▁the", "▁I", "▁to", "▁a", "▁and", "▁of", "▁in", "▁was", "▁it", "▁me", "▁that", "▁be", "▁he", "▁for", "▁with", "▁my", "▁not", "▁is", "▁s", "▁you" ]
[ 6, 9, 10, 11, 13, 14, 20, 22, 26, 32, 35, 37, 38, 41, 42, 44, 45, 47, 48, 49 ]
[ 6, 9, 10, 11, 13, 14, 20, 22, 26, 32, 35, 37, 38, 41, 42, 44, 45, 47, 48, 49, 2, 1004 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
MPNetTokenizerTest
false
UNwantéd,running
[ "un", "##want", "##ed", ",", "runn", "##ing" ]
[ 9, 6, 7, 12, 10, 11 ]
[ 15, 9, 6, 7, 12, 10, 11, 16 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
MarkupLMTokenizationTest
false
null
null
[ 21, 3, 0, 0, 1, 2, 1, 4, 0, 8 ]
[ 22, 21, 3, 0, 0, 1, 2, 1, 4, 0, 8, 23 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": [ "hello", "world" ], "xpaths": [ ",/html/body/div/li[1]/div/span", ",/html/body/div/li[1]/div/span" ] }
MobileBERTTokenizationTest
false
UNwantéd,running
[ "un", "##want", "##ed", ",", "runn", "##ing" ]
[ 9, 6, 7, 12, 10, 11 ]
[ 1, 9, 6, 7, 12, 10, 11, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
NllbTokenizationTest
true
the I to a and of in was it me that be he for with my not is s you
[ "▁the", "▁I", "▁to", "▁a", "▁and", "▁of", "▁in", "▁was", "▁it", "▁me", "▁that", "▁be", "▁he", "▁for", "▁with", "▁my", "▁not", "▁is", "▁s", "▁you" ]
[ 6, 9, 10, 11, 13, 14, 20, 22, 26, 32, 35, 37, 38, 41, 42, 44, 45, 47, 48, 49 ]
[ 1048, 6, 9, 10, 11, 13, 14, 20, 22, 26, 32, 35, 37, 38, 41, 42, 44, 45, 47, 48, 49, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
OpenAIGPTTokenizationTest
false
lower newer
[ "low", "er</w>", "n", "e", "w", "er</w>" ]
[ 14, 15, 9, 3, 2, 15 ]
[ 14, 15, 9, 3, 2, 15 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
PegasusTokenizationTest
true
This is a test
[ "▁This", "▁is", "▁a", "▁", "t", "est" ]
[ 391, 149, 112, 106, 115, 493 ]
[ 391, 149, 112, 106, 115, 493, 1 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
Qwen2TokenizationTest
false
lower lower newer 010;} <|endoftext|>ϓ
[ "l", "o", "w", "er", "Ġlow", "er", "Ġ", "n", "e", "w", "er", "Ġ", "0", "1", "0", ";}", "č", "Ċ", "<|endoftext|>", "Ïĵ" ]
[ 75, 78, 86, 260, 259, 260, 220, 77, 68, 86, 260, 220, 15, 16, 15, 265, 201, 198, 270, 267 ]
[ 75, 78, 86, 260, 259, 260, 220, 77, 68, 86, 260, 220, 15, 16, 15, 265, 201, 198, 270, 267 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
ReformerTokenizationTest
false
I was born in 92000, and this is falsé.
[ "▁I", "▁was", "▁b", "or", "n", "▁in", "▁", "9", "2", "0", "0", "0", ",", "▁and", "▁this", "▁is", "▁f", "al", "s", "é", "." ]
[ 8, 21, 84, 55, 24, 19, 7, 0, 602, 347, 347, 347, 3, 12, 66, 46, 72, 80, 6, 0, 4 ]
[ 8, 21, 84, 55, 24, 19, 7, 0, 602, 347, 347, 347, 3, 12, 66, 46, 72, 80, 6, 0, 4 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
RemBertTokenizationTest
true
this is a test
[ "▁this", "▁is", "▁a", "▁t", "est" ]
[ 66, 46, 10, 170, 382 ]
[ 1000, 66, 46, 10, 170, 382, 1001 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
RobertaTokenizationTest
false
lower newer
[ "l", "o", "w", "er", "Ġ", "n", "e", "w", "er" ]
[ 0, 1, 2, 15, 10, 9, 3, 2, 15 ]
[ 20, 0, 1, 2, 15, 10, 9, 3, 2, 15, 21 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
SeamlessM4TTokenizationTest
true
the I to a and of in was it me that be he for with my not is s you
[ "▁the", "▁I", "▁to", "▁a", "▁and", "▁of", "▁in", "▁was", "▁it", "▁me", "▁that", "▁be", "▁he", "▁for", "▁with", "▁my", "▁not", "▁is", "▁s", "▁you" ]
[ 6, 9, 10, 11, 13, 14, 20, 22, 26, 32, 35, 37, 38, 41, 42, 44, 45, 47, 48, 49 ]
[ 3, 1, 6, 9, 10, 11, 13, 14, 20, 22, 26, 32, 35, 37, 38, 41, 42, 44, 45, 47, 48, 49, 3 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
SplinterTokenizationTest
false
I need to test this rigor
[ "[UNK]", "[UNK]", "[UNK]", "test", "this", "rigor" ]
[ 3, 10, 10, 10, 16, 13, 21, 1 ]
[ 3, 10, 10, 10, 16, 13, 21, 1 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
SqueezeBertTokenizationTest
false
UNwantéd,running
[ "un", "##want", "##ed", ",", "runn", "##ing" ]
[ 9, 6, 7, 12, 10, 11 ]
[ 1, 9, 6, 7, 12, 10, 11, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
T5TokenizationTest
false
I was born in 92000, and this is falsé.
[ "▁I", "▁was", "▁b", "or", "n", "▁in", "▁", "9", "2", "0", "0", "0", ",", "▁and", "▁this", "▁is", "▁f", "al", "s", "é", "." ]
[ 8, 21, 84, 55, 24, 19, 7, 0, 602, 347, 347, 347, 3, 12, 66, 46, 72, 80, 6, 0, 4 ]
[ 8, 21, 84, 55, 24, 19, 7, 0, 602, 347, 347, 347, 3, 12, 66, 46, 72, 80, 6, 0, 4, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
TestTokenizationBart
false
lower newer
[ "l", "o", "w", "er", "Ġ", "n", "e", "w", "er" ]
[ 0, 1, 2, 15, 10, 9, 3, 2, 15 ]
[ 20, 0, 1, 2, 15, 10, 9, 3, 2, 15, 21 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
TestTokenizationLED
false
lower newer
[ "l", "o", "w", "er", "Ġ", "n", "e", "w", "er" ]
[ 0, 1, 2, 15, 10, 9, 3, 2, 15 ]
[ 20, 0, 1, 2, 15, 10, 9, 3, 2, 15, 21 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
TestTokenizationMvp
false
lower newer
[ "l", "o", "w", "er", "Ġ", "n", "e", "w", "er" ]
[ 0, 1, 2, 15, 10, 9, 3, 2, 15 ]
[ 20, 0, 1, 2, 15, 10, 9, 3, 2, 15, 21 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
UdopTokenizationTest
true
null
null
[ 10, 112, 158, 16, 38, 170, 382, 37, 86, 20 ]
[ 10, 112, 158, 16, 38, 170, 382, 37, 86, 20, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": [ [ 423, 237, 440, 251 ], [ 427, 272, 441, 287 ], [ 419, 115, 437, 129 ], [ 961, 885, 992, 912 ] ], "text": [ "a", "weirdly", "test", "hello" ], "xpaths": null }
WhisperTokenizerTest
false
A BCDEFGHIJKLMNOPQRST
[ "A", "ĠBC", "DE", "F", "GH", "I", "J", "K", "L", "M", "N", "OP", "Q", "R", "ST" ]
[ 32, 14359, 22296, 37, 4269, 40, 41, 42, 43, 44, 45, 12059, 48, 49, 6840 ]
[ 50258, 50363, 32, 14359, 22296, 37, 4269, 40, 41, 42, 43, 44, 45, 12059, 48, 49, 6840, 50257 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
XGLMTokenizationTest
false
I was born in 92000, and this is falsé.
[ "▁I", "▁was", "▁b", "or", "n", "▁in", "▁", "9", "2", "0", "0", "0", ",", "▁and", "▁this", "▁is", "▁f", "al", "s", "é", "." ]
[ 9, 22, 85, 56, 25, 20, 8, 3, 603, 348, 348, 348, 4, 13, 67, 47, 73, 81, 7, 3, 5 ]
[ 2, 9, 22, 85, 56, 25, 20, 8, 3, 603, 348, 348, 348, 4, 13, 67, 47, 73, 81, 7, 3, 5 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
XLMRobertaTokenizationTest
false
I was born in 92000, and this is falsé.
[ "▁I", "▁was", "▁b", "or", "n", "▁in", "▁", "9", "2", "0", "0", "0", ",", "▁and", "▁this", "▁is", "▁f", "al", "s", "é", "." ]
[ 9, 22, 85, 56, 25, 20, 8, 3, 603, 348, 348, 348, 4, 13, 67, 47, 73, 81, 7, 3, 5 ]
[ 0, 9, 22, 85, 56, 25, 20, 8, 3, 603, 348, 348, 348, 4, 13, 67, 47, 73, 81, 7, 3, 5, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
XLNetTokenizationTest
true
the I to a and of in was it me that be he for with my not is s you
[ "▁the", "▁I", "▁to", "▁a", "▁and", "▁of", "▁in", "▁was", "▁it", "▁me", "▁that", "▁be", "▁he", "▁for", "▁with", "▁my", "▁not", "▁is", "▁s", "▁you" ]
[ 5, 8, 9, 10, 12, 13, 19, 21, 25, 31, 34, 36, 37, 40, 41, 43, 44, 46, 47, 48 ]
[ 5, 8, 9, 10, 12, 13, 19, 21, 25, 31, 34, 36, 37, 40, 41, 43, 44, 46, 47, 48, 1000, 1002 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
CamembertTokenizationTest
true
This is a test
[ "▁This", "▁is", "▁a", "▁t", "est" ]
[ 289, 50, 14, 174, 386 ]
[ 5, 289, 50, 14, 174, 386, 6 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
CamembertTokenizationTest
true
the I to a and of in was it me that be he for with my not is s you
[ "▁the", "▁I", "▁to", "▁a", "▁and", "▁of", "▁in", "▁was", "▁it", "▁me", "▁that", "▁be", "▁he", "▁for", "▁with", "▁my", "▁not", "▁is", "▁s", "▁you" ]
[ 9, 12, 13, 14, 16, 17, 23, 25, 29, 35, 38, 40, 41, 44, 45, 47, 48, 50, 51, 52 ]
[ 5, 9, 12, 13, 14, 16, 17, 23, 25, 29, 35, 38, 40, 41, 44, 45, 47, 48, 50, 51, 52, 6 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
MBartTokenizationTest
true
This is a test
[ "▁This", "▁is", "▁a", "▁t", "est" ]
[ 286, 47, 11, 171, 383 ]
[ 286, 47, 11, 171, 383, 2, 1004 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
MBartTokenizationTest
true
I was born in 92000, and this is falsé.
[ "▁I", "▁was", "▁b", "or", "n", "▁in", "▁", "9", "2", "0", "0", "0", ",", "▁and", "▁this", "▁is", "▁f", "al", "s", "é", "." ]
[ 9, 22, 85, 56, 25, 20, 8, 3, 603, 348, 348, 348, 4, 13, 67, 47, 73, 81, 7, 3, 5 ]
[ 9, 22, 85, 56, 25, 20, 8, 3, 603, 348, 348, 348, 4, 13, 67, 47, 73, 81, 7, 3, 5, 2, 1004 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
MBart50TokenizationTest
true
This is a test
[ "▁This", "▁is", "▁a", "▁t", "est" ]
[ 286, 47, 11, 171, 383 ]
[ 1004, 286, 47, 11, 171, 383, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
MBart50TokenizationTest
true
I was born in 92000, and this is falsé.
[ "▁I", "▁was", "▁b", "or", "n", "▁in", "▁", "9", "2", "0", "0", "0", ",", "▁and", "▁this", "▁is", "▁f", "al", "s", "é", "." ]
[ 9, 22, 85, 56, 25, 20, 8, 3, 603, 348, 348, 348, 4, 13, 67, 47, 73, 81, 7, 3, 5 ]
[ 1004, 9, 22, 85, 56, 25, 20, 8, 3, 603, 348, 348, 348, 4, 13, 67, 47, 73, 81, 7, 3, 5, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
NllbTokenizationTest
true
This is a test
[ "▁This", "▁is", "▁a", "▁t", "est" ]
[ 286, 47, 11, 171, 383 ]
[ 1048, 286, 47, 11, 171, 383, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
NllbTokenizationTest
true
I was born in 92000, and this is falsé.
[ "▁I", "▁was", "▁b", "or", "n", "▁in", "▁", "9", "2", "0", "0", "0", ",", "▁and", "▁this", "▁is", "▁f", "al", "s", "é", "." ]
[ 9, 22, 85, 56, 25, 20, 8, 3, 603, 348, 348, 348, 4, 13, 67, 47, 73, 81, 7, 3, 5 ]
[ 1048, 9, 22, 85, 56, 25, 20, 8, 3, 603, 348, 348, 348, 4, 13, 67, 47, 73, 81, 7, 3, 5, 2 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
RemBertTokenizationTest
true
This is a test
[ "▁This", "▁is", "▁a", "▁t", "est" ]
[ 285, 46, 10, 170, 382 ]
[ 1000, 285, 46, 10, 170, 382, 1001 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
RemBertTokenizationTest
true
I was born in 92000, and this is falsé.
[ "▁I", "▁was", "▁b", "or", "n", "▁in", "▁", "9", "2", "0", "0", "0", ",", "▁and", "▁this", "▁is", "▁f", "al", "s", "é", "." ]
[ 8, 21, 84, 55, 24, 19, 7, 0, 602, 347, 347, 347, 3, 12, 66, 46, 72, 80, 6, 0, 4 ]
[ 1000, 8, 21, 84, 55, 24, 19, 7, 0, 602, 347, 347, 347, 3, 12, 66, 46, 72, 80, 6, 0, 4, 1001 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
RemBertTokenizationTest
true
the I to a and of in was it me that be he for with my not is s you
[ "▁the", "▁I", "▁to", "▁a", "▁and", "▁of", "▁in", "▁was", "▁it", "▁me", "▁that", "▁be", "▁he", "▁for", "▁with", "▁my", "▁not", "▁is", "▁s", "▁you" ]
[ 5, 8, 9, 10, 12, 13, 19, 21, 25, 31, 34, 36, 37, 40, 41, 43, 44, 46, 47, 48 ]
[ 1000, 5, 8, 9, 10, 12, 13, 19, 21, 25, 31, 34, 36, 37, 40, 41, 43, 44, 46, 47, 48, 1001 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
SeamlessM4TTokenizationTest
true
This is a test
[ "▁This", "▁is", "▁a", "▁t", "est" ]
[ 286, 47, 11, 171, 383 ]
[ 3, 1, 286, 47, 11, 171, 383, 3 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
SeamlessM4TTokenizationTest
true
I was born in 92000, and this is falsé.
[ "▁I", "▁was", "▁b", "or", "n", "▁in", "▁", "9", "2", "0", "0", "0", ",", "▁and", "▁this", "▁is", "▁f", "al", "s", "é", "." ]
[ 9, 22, 85, 56, 25, 20, 8, 1, 603, 348, 348, 348, 4, 13, 67, 47, 73, 81, 7, 1, 5 ]
[ 3, 1, 9, 22, 85, 56, 25, 20, 8, 1, 603, 348, 348, 348, 4, 13, 67, 47, 73, 81, 7, 1, 5, 3 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }
XLNetTokenizationTest
true
I was born in 92000, and this is falsé.
[ "▁I", "▁was", "▁b", "or", "n", "▁in", "▁", "9", "2", "0", "0", "0", ",", "▁and", "▁this", "▁is", "▁f", "al", "s", "é", "." ]
[ 8, 21, 84, 55, 24, 19, 7, 0, 602, 347, 347, 347, 3, 12, 66, 46, 72, 80, 6, 0, 4 ]
[ 8, 21, 84, 55, 24, 19, 7, 0, 602, 347, 347, 347, 3, 12, 66, 46, 72, 80, 6, 0, 4, 1000, 1002 ]
{ "add_prefix_space": null, "do_lower_case": null }
{ "boxes": null, "text": null, "xpaths": null }