kmfoda commited on
Commit
65eea3a
·
1 Parent(s): c397e21

Fix config error

Browse files
Files changed (1) hide show
  1. modeling_gpt_optimized.py +1 -1
modeling_gpt_optimized.py CHANGED
@@ -196,5 +196,5 @@ class GPTOptim(GPT2PreTrainedModel):
196
  logits = self.model.lm_head(x) # (B, T, vocab_size)
197
  loss = None
198
  if labels is not None:
199
- loss = F.cross_entropy(logits.view(-1, logits.size(-1)), labels.view(-1), ignore_index=config.eos_token_id)
200
  return logits, loss
 
196
  logits = self.model.lm_head(x) # (B, T, vocab_size)
197
  loss = None
198
  if labels is not None:
199
+ loss = F.cross_entropy(logits.view(-1, logits.size(-1)), labels.view(-1), ignore_index=self.config.eos_token_id)
200
  return logits, loss