compositional_test / transformers /utils /test_module /custom_tokenization_fast.py
chendl's picture
add requirements
a1d409e
raw
history blame contribute delete
193 Bytes
from transformers import BertTokenizerFast
from .custom_tokenization import CustomTokenizer
class CustomTokenizerFast(BertTokenizerFast):
slow_tokenizer_class = CustomTokenizer
pass