Spaces:
Sleeping
Sleeping
Commit
·
15b206d
1
Parent(s):
1afaaf2
Update app.py
Browse files
app.py
CHANGED
@@ -122,10 +122,10 @@ def text2text(context,answer):
|
|
122 |
# grad.Interface(text2text, inputs=[context,ans], outputs=out).launch()
|
123 |
|
124 |
#T5 summaryzer
|
125 |
-
from transformers import AutoTokenizer, AutoModelWithLMHead
|
126 |
-
import gradio as grad
|
127 |
-
text2text_tkn = AutoTokenizer.from_pretrained("deep-learning-analytics/wikihow-t5-small")
|
128 |
-
mdl = AutoModelWithLMHead.from_pretrained("deep-learning-analytics/wikihow-t5-small")
|
129 |
def text2text_summary(para):
|
130 |
initial_txt = para.strip().replace("\n","")
|
131 |
tkn_text = text2text_tkn.encode(initial_txt, return_tensors="pt")
|
@@ -138,6 +138,21 @@ def text2text_summary(para):
|
|
138 |
)
|
139 |
response = text2text_tkn.decode(tkn_ids[0], skip_special_tokens=True)
|
140 |
return response
|
141 |
-
para=grad.Textbox(lines=10, label="Paragraph", placeholder="Copy paragraph")
|
142 |
-
out=grad.Textbox(lines=1, label="Summary")
|
143 |
-
grad.Interface(text2text_summary, inputs=para, outputs=out).launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
122 |
# grad.Interface(text2text, inputs=[context,ans], outputs=out).launch()
|
123 |
|
124 |
#T5 summaryzer
|
125 |
+
# from transformers import AutoTokenizer, AutoModelWithLMHead
|
126 |
+
# import gradio as grad
|
127 |
+
# text2text_tkn = AutoTokenizer.from_pretrained("deep-learning-analytics/wikihow-t5-small")
|
128 |
+
# mdl = AutoModelWithLMHead.from_pretrained("deep-learning-analytics/wikihow-t5-small")
|
129 |
def text2text_summary(para):
|
130 |
initial_txt = para.strip().replace("\n","")
|
131 |
tkn_text = text2text_tkn.encode(initial_txt, return_tensors="pt")
|
|
|
138 |
)
|
139 |
response = text2text_tkn.decode(tkn_ids[0], skip_special_tokens=True)
|
140 |
return response
|
141 |
+
# para=grad.Textbox(lines=10, label="Paragraph", placeholder="Copy paragraph")
|
142 |
+
# out=grad.Textbox(lines=1, label="Summary")
|
143 |
+
# grad.Interface(text2text_summary, inputs=para, outputs=out).launch()
|
144 |
+
|
145 |
+
# T5 Translate
|
146 |
+
from transformers import T5ForConditionalGeneration, T5Tokenizer
|
147 |
+
import gradio as grad
|
148 |
+
text2text_tkn= T5Tokenizer.from_pretrained("t5-small")
|
149 |
+
mdl = T5ForConditionalGeneration.from_pretrained("t5-small")
|
150 |
+
def text2text_translation(text):
|
151 |
+
inp = "translate English to Portuguese:: "+text
|
152 |
+
enc = text2text_tkn(inp, return_tensors="pt")
|
153 |
+
tokens = mdl.generate(**enc)
|
154 |
+
response=text2text_tkn.batch_decode(tokens)
|
155 |
+
return response
|
156 |
+
para=grad.Textbox(lines=1, label="English Text", placeholder="Text in English")
|
157 |
+
out=grad.Textbox(lines=1, label="Portuguese Translation")
|
158 |
+
grad.Interface(text2text_translation, inputs=para, outputs=out).launch()
|