grosenthal commited on
Commit
91b09b3
·
1 Parent(s): fd4b691
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -218,7 +218,7 @@ tokenize_morph_simplified = lambda t: tokenize(morph_simplified_tokenizer, t)
218
  tokenize_case = lambda t: tokenize(case_tokenizer, t)
219
 
220
  def translate(model, tokenizer, text):
221
- translated = model.generate(**tokenizer(text, return_tensors="pt", padding=True, truncation=True).to('cuda'))
222
  translated_line = [tokenizer.decode(t, skip_special_tokens=True) for t in translated]
223
  return translated_line
224
 
 
218
  tokenize_case = lambda t: tokenize(case_tokenizer, t)
219
 
220
  def translate(model, tokenizer, text):
221
+ translated = model.generate(**tokenizer(text, return_tensors="pt", padding=True, truncation=True))
222
  translated_line = [tokenizer.decode(t, skip_special_tokens=True) for t in translated]
223
  return translated_line
224