FahadAlam commited on
Commit
90f3db5
·
1 Parent(s): 40cab55

Create new file

Browse files
Files changed (1) hide show
  1. app.py +19 -0
app.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import torch
3
+ from transformers import PegasusForConditionalGeneration, PegasusTokenizer
4
+
5
+ model_name = 'tuner007/pegasus_paraphrase'
6
+ torch_device = 'cuda' if torch.cuda.is_available() else 'cpu'
7
+ tokenizer = PegasusTokenizer.from_pretrained(model_name)
8
+ model = PegasusForConditionalGeneration.from_pretrained(model_name).to(torch_device)
9
+
10
+ def paraphrase_text(input_text, max_length):
11
+ batch = tokenizer([input_text],truncation=True,padding='longest',max_length=int(max_length), return_tensors="pt").to(torch_device)
12
+ translated = model.generate(**batch,max_length=int(max_length),num_beams=3, num_return_sequences=3, temperature=1.5)
13
+ tgt_text = tokenizer.batch_decode(translated, skip_special_tokens=True)
14
+ return tgt_text[0], tgt_text[1], tgt_text[2]
15
+
16
+ examples = [["Begin your professional career by learning data science skills with Data science Dojo, a globally recognized e-learning platform where we teach students how to learn data science, data analytics, machine learning and more.", "45"], ["Hello, I am a paraphrasing tool. How can I help you?", "30"]]
17
+
18
+ demo = gr.Interface(fn=paraphrase_text, inputs=["text", "text"], outputs=["text", "text", "text"], title="Paraphrase", examples=examples)
19
+ demo.launch( debug = True )