aksell commited on
Commit
b65afcb
·
1 Parent(s): 6d4dc1d

Fix T5 bug with tokens returned

Browse files
Files changed (1) hide show
  1. hexviz/attention.py +1 -1
hexviz/attention.py CHANGED
@@ -191,7 +191,7 @@ def get_attention(
191
  if remove_special_tokens:
192
  # Remove attention to </s> (last) token
193
  attentions = [attention[:, :, :-1, :-1] for attention in attentions]
194
- tokenized_sequence = inputs[:-1]
195
  attentions = torch.stack([attention.squeeze(0) for attention in attentions])
196
 
197
  else:
 
191
  if remove_special_tokens:
192
  # Remove attention to </s> (last) token
193
  attentions = [attention[:, :, :-1, :-1] for attention in attentions]
194
+ tokenized_sequence = tokenized_sequence[:-1]
195
  attentions = torch.stack([attention.squeeze(0) for attention in attentions])
196
 
197
  else: