Spaces:
Sleeping
Sleeping
Fix T5 bug with tokens returned
Browse files- hexviz/attention.py +1 -1
hexviz/attention.py
CHANGED
@@ -191,7 +191,7 @@ def get_attention(
|
|
191 |
if remove_special_tokens:
|
192 |
# Remove attention to </s> (last) token
|
193 |
attentions = [attention[:, :, :-1, :-1] for attention in attentions]
|
194 |
-
tokenized_sequence =
|
195 |
attentions = torch.stack([attention.squeeze(0) for attention in attentions])
|
196 |
|
197 |
else:
|
|
|
191 |
if remove_special_tokens:
|
192 |
# Remove attention to </s> (last) token
|
193 |
attentions = [attention[:, :, :-1, :-1] for attention in attentions]
|
194 |
+
tokenized_sequence = tokenized_sequence[:-1]
|
195 |
attentions = torch.stack([attention.squeeze(0) for attention in attentions])
|
196 |
|
197 |
else:
|