kcarnold commited on
Commit
47f3f6e
1 Parent(s): abc9e3b

Avoid crash on empty translation

Browse files
Files changed (1) hide show
  1. app.py +9 -6
app.py CHANGED
@@ -92,11 +92,14 @@ with tokenizer.as_target_tokenizer():
92
  'cumulative probability': probs_for_likely_tokens.cumsum(0)
93
  })
94
 
 
95
  st.write(probs_table)
96
 
97
- loss_table = pd.DataFrame({
98
- 'token': [tokenizer.decode(token_id) for token_id in decoder_input_ids[1:]],
99
- 'loss': F.cross_entropy(model_output.logits[0, :-1], torch.tensor(decoder_input_ids[1:]).to(device), reduction='none').cpu()
100
- })
101
- st.write(loss_table)
102
- st.write("Total loss so far:", loss_table.loss.sum())
 
 
 
92
  'cumulative probability': probs_for_likely_tokens.cumsum(0)
93
  })
94
 
95
+ st.subheader("Most likely next tokens")
96
  st.write(probs_table)
97
 
98
+ if len(decoder_input_ids) > 1:
99
+ st.subheader("Loss by token")
100
+ loss_table = pd.DataFrame({
101
+ 'token': [tokenizer.decode(token_id) for token_id in decoder_input_ids[1:]],
102
+ 'loss': F.cross_entropy(model_output.logits[0, :-1], torch.tensor(decoder_input_ids[1:]).to(device), reduction='none').cpu()
103
+ })
104
+ st.write(loss_table)
105
+ st.write("Total loss so far:", loss_table.loss.sum())