asahi417 commited on
Commit
1b182d0
·
1 Parent(s): b9ac9ba
experiments/model_finetuning_topic.py CHANGED
@@ -81,7 +81,7 @@ def main(
81
  tokenized_datasets = dataset.map(
82
  lambda x: tokenizer(x["text"], padding="max_length", truncation=True, max_length=256), batched=True
83
  )
84
- tokenized_datasets.rename_column("gold_label_list", "label")
85
  metric_accuracy = evaluate.load("accuracy", "multilabel")
86
  metric_f1 = evaluate.load("f1", "multilabel")
87
 
 
81
  tokenized_datasets = dataset.map(
82
  lambda x: tokenizer(x["text"], padding="max_length", truncation=True, max_length=256), batched=True
83
  )
84
+ tokenized_datasets = tokenized_datasets.rename_column("gold_label_list", "label")
85
  metric_accuracy = evaluate.load("accuracy", "multilabel")
86
  metric_f1 = evaluate.load("f1", "multilabel")
87