changed tokenization
Browse files- ref_seg_ger.py +1 -1
ref_seg_ger.py
CHANGED
@@ -229,7 +229,7 @@ class RefSeg(datasets.GeneratorBasedBuilder):
|
|
229 |
|
230 |
# tokenized_input = row['token'].split(' ')
|
231 |
tkn = self.TOKENIZER.pre_tokenize_str(row['token'])
|
232 |
-
if row['label'] == '
|
233 |
row['label'] = 'other'
|
234 |
if not tkn:
|
235 |
continue
|
|
|
229 |
|
230 |
# tokenized_input = row['token'].split(' ')
|
231 |
tkn = self.TOKENIZER.pre_tokenize_str(row['token'])
|
232 |
+
if row['label'] == 'identifier':
|
233 |
row['label'] = 'other'
|
234 |
if not tkn:
|
235 |
continue
|