Update bert_tokenizer.py
Browse files- bert_tokenizer.py +1 -0
bert_tokenizer.py
CHANGED
@@ -43,6 +43,7 @@ class ChineseBertTokenizer(BertTokenizerFast):
|
|
43 |
self.max_length = 512
|
44 |
|
45 |
download_file('vocab.txt')
|
|
|
46 |
self.tokenizer = BertWordPieceTokenizer(vocab_file)
|
47 |
|
48 |
# load pinyin map dict
|
|
|
43 |
self.max_length = 512
|
44 |
|
45 |
download_file('vocab.txt')
|
46 |
+
print("vocab_file:", vocab_file)
|
47 |
self.tokenizer = BertWordPieceTokenizer(vocab_file)
|
48 |
|
49 |
# load pinyin map dict
|