eloi-goncalves commited on
Commit
1afaaf2
·
1 Parent(s): 79b9061

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -8
app.py CHANGED
@@ -103,10 +103,11 @@ def generateDistlGPT2(starting_text):
103
  # grad.Interface(generateDistlGPT2, inputs=txt, outputs=out).launch()
104
 
105
  #Text Generation
106
- from transformers import AutoModelWithLMHead, AutoTokenizer
107
- import gradio as grad
108
- text2text_tkn = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
109
- mdl = AutoModelWithLMHead.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
 
110
  def text2text(context,answer):
111
  input_text = "answer: %s context: %s </s>" % (answer, context)
112
  features = text2text_tkn ([input_text], return_tensors='pt')
@@ -115,7 +116,28 @@ def text2text(context,answer):
115
  max_length=64)
116
  response=text2text_tkn.decode(output[0])
117
  return response
118
- context=grad.Textbox(lines=10, label="English", placeholder="Context")
119
- ans=grad.Textbox(lines=1, label="Answer")
120
- out=grad.Textbox(lines=1, label="Genereated Question")
121
- grad.Interface(text2text, inputs=[context,ans], outputs=out).launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
103
  # grad.Interface(generateDistlGPT2, inputs=txt, outputs=out).launch()
104
 
105
  #Text Generation
106
+ #Question Generation
107
+ # from transformers import AutoModelWithLMHead, AutoTokenizer
108
+ # import gradio as grad
109
+ # text2text_tkn = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
110
+ # mdl = AutoModelWithLMHead.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
111
  def text2text(context,answer):
112
  input_text = "answer: %s context: %s </s>" % (answer, context)
113
  features = text2text_tkn ([input_text], return_tensors='pt')
 
116
  max_length=64)
117
  response=text2text_tkn.decode(output[0])
118
  return response
119
+ # context=grad.Textbox(lines=10, label="English", placeholder="Context")
120
+ # ans=grad.Textbox(lines=1, label="Answer")
121
+ # out=grad.Textbox(lines=1, label="Genereated Question")
122
+ # grad.Interface(text2text, inputs=[context,ans], outputs=out).launch()
123
+
124
+ #T5 summaryzer
125
+ from transformers import AutoTokenizer, AutoModelWithLMHead
126
+ import gradio as grad
127
+ text2text_tkn = AutoTokenizer.from_pretrained("deep-learning-analytics/wikihow-t5-small")
128
+ mdl = AutoModelWithLMHead.from_pretrained("deep-learning-analytics/wikihow-t5-small")
129
+ def text2text_summary(para):
130
+ initial_txt = para.strip().replace("\n","")
131
+ tkn_text = text2text_tkn.encode(initial_txt, return_tensors="pt")
132
+ tkn_ids = mdl.generate(
133
+ tkn_text,
134
+ max_length=250,
135
+ num_beams=5,
136
+ repetition_penalty=2.5,
137
+ early_stopping=True
138
+ )
139
+ response = text2text_tkn.decode(tkn_ids[0], skip_special_tokens=True)
140
+ return response
141
+ para=grad.Textbox(lines=10, label="Paragraph", placeholder="Copy paragraph")
142
+ out=grad.Textbox(lines=1, label="Summary")
143
+ grad.Interface(text2text_summary, inputs=para, outputs=out).launch()