Simon Salmon commited on
Commit
37dea39
·
1 Parent(s): aba01d1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -5
app.py CHANGED
@@ -2,6 +2,12 @@ import torch
2
  from transformers import T5ForConditionalGeneration,T5Tokenizer, AutoTokenizer, AutoModelForSeq2SeqLM, PegasusTokenizer, PegasusForConditionalGeneration
3
  import streamlit as st
4
 
 
 
 
 
 
 
5
  model_name = 'tuner007/pegasus_paraphrase'
6
  tokenizer = PegasusTokenizer.from_pretrained(model_name)
7
  model = PegasusForConditionalGeneration.from_pretrained(model_name)
@@ -14,11 +20,6 @@ def get_response(text):
14
  tgt_text = tokenizer.batch_decode(translated, skip_special_tokens=True)
15
  return tgt_text
16
 
17
- st.title("Auto Translate (To English)")
18
- text = st.text_input("Okay")
19
- st.text("What you wrote: ")
20
- st.write(text)
21
- st.text("English Translation: ")
22
  if text:
23
  translated_text = get_response(text)
24
  st.write(translated_text if translated_text else "No translation found")
 
2
  from transformers import T5ForConditionalGeneration,T5Tokenizer, AutoTokenizer, AutoModelForSeq2SeqLM, PegasusTokenizer, PegasusForConditionalGeneration
3
  import streamlit as st
4
 
5
+ st.title("Auto Translate (To English)")
6
+ text = st.text_input("Okay")
7
+ st.text("What you wrote: ")
8
+ st.write(text)
9
+ st.text("English Translation: ")
10
+
11
  model_name = 'tuner007/pegasus_paraphrase'
12
  tokenizer = PegasusTokenizer.from_pretrained(model_name)
13
  model = PegasusForConditionalGeneration.from_pretrained(model_name)
 
20
  tgt_text = tokenizer.batch_decode(translated, skip_special_tokens=True)
21
  return tgt_text
22
 
 
 
 
 
 
23
  if text:
24
  translated_text = get_response(text)
25
  st.write(translated_text if translated_text else "No translation found")