Yaxin commited on
Commit
0dc993a
·
1 Parent(s): e2f39ed

Update SemEval2016Task5NLTK.py

Browse files
Files changed (1) hide show
  1. SemEval2016Task5NLTK.py +3 -3
SemEval2016Task5NLTK.py CHANGED
@@ -102,8 +102,8 @@ class SemEval2016Task5NLTK(datasets.GeneratorBasedBuilder):
102
  'to': datasets.Value(dtype='string')}
103
  ],
104
  'tokens': datasets.Sequence(datasets.Value(dtype='string')),
105
- 'ATESP_BIEOS_tags': datasets.Sequence(datasets.ClassLabel(num_classes=13, names=['B-NEG', 'B-NEU', 'B-POS', 'I-NEG', 'I-NEU', 'I-POS', 'E-NEG', 'E-NEU', 'E-POS', 'S-NEG', 'S-NEU', 'S-POS', 'O'])),
106
- 'ATESP_BIO_tags': datasets.Sequence(datasets.ClassLabel(num_classes=7, names=['B-NEG', 'B-NEU', 'B-POS', 'I-NEG', 'I-NEU', 'I-POS', 'O'])),
107
  'ATE_BIEOS_tags': datasets.Sequence(datasets.ClassLabel(num_classes=5, names=['B', 'I', 'E', 'O', 'S'])),
108
  'ATE_BIO_tags': datasets.Sequence(datasets.ClassLabel(num_classes=3, names=['B', 'I', 'O'])),
109
 
@@ -252,7 +252,7 @@ def addTokenAndLabel(example):
252
  BIES_tokens = nltk.word_tokenize(text[int(aspect['from']): int(aspect['to'])])
253
  tokens.extend(BIES_tokens)
254
 
255
- assert len(BIES_tokens) > 0, print('error in BIES_tokens length')
256
 
257
  if len(BIES_tokens)==1:
258
  labels.append('S-'+polarity)
 
102
  'to': datasets.Value(dtype='string')}
103
  ],
104
  'tokens': datasets.Sequence(datasets.Value(dtype='string')),
105
+ 'ATESP_BIEOS_tags': datasets.Sequence(datasets.ClassLabel(num_classes=17, names=['B-NEG', 'B-NEU', 'B-POS', 'B-CON', 'I-NEG', 'I-NEU', 'I-POS', 'I-CON', 'E-NEG', 'E-NEU', 'E-POS', 'E-CON', 'S-NEG', 'S-NEU', 'S-POS', 'S-CON', 'O'])),
106
+ 'ATESP_BIO_tags': datasets.Sequence(datasets.ClassLabel(num_classes=9, names=['B-NEG', 'B-NEU', 'B-POS', 'B-CON', 'I-NEG', 'I-NEU', 'I-POS', 'I-CON', 'O'])),
107
  'ATE_BIEOS_tags': datasets.Sequence(datasets.ClassLabel(num_classes=5, names=['B', 'I', 'E', 'O', 'S'])),
108
  'ATE_BIO_tags': datasets.Sequence(datasets.ClassLabel(num_classes=3, names=['B', 'I', 'O'])),
109
 
 
252
  BIES_tokens = nltk.word_tokenize(text[int(aspect['from']): int(aspect['to'])])
253
  tokens.extend(BIES_tokens)
254
 
255
+ assert len(BIES_tokens) > 0, print(f'error in BIES_tokens length: {tokens}')
256
 
257
  if len(BIES_tokens)==1:
258
  labels.append('S-'+polarity)