datasciencedojo commited on
Commit
5825863
·
1 Parent(s): cd348f4

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +24 -0
  2. requirements.txt +4 -0
app.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoModelWithLMHead, AutoTokenizer
3
+
4
+ tokenizer = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
5
+ model = AutoModelWithLMHead.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
6
+
7
+ def get_question(context, answer, max_length=64):
8
+ input_text = "answer: %s context: %s </s>" % (answer, context)
9
+ features = tokenizer([input_text], return_tensors='pt')
10
+
11
+ output = model.generate(input_ids=features['input_ids'],
12
+ attention_mask=features['attention_mask'],
13
+ max_length=max_length)
14
+
15
+ return tokenizer.decode(output[0])[16:-4]
16
+
17
+ examples = [["The world's first piece of software was written by a computer scientist named Tom Kilburn in 1948.", "1948"], ["The world's first piece of software was written by a computer scientist named Tom Kilburn in 1948.", "Tom Kilburn"], ["The world's first piece of software was written by a computer scientist named Tom Kilburn in 1948.", "computer scientist"]]
18
+
19
+ css = """
20
+ .footer {display:none !important}
21
+ """
22
+
23
+ demo = gr.Interface(fn=get_question, inputs=[gr.Textbox(lines=3, placeholder="Enter text here", label="Context"), gr.Textbox(lines=1, label="Answer")], outputs=gr.Textbox(label="Generated Question"), examples=examples, css=css)
24
+ demo.launch()
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ torch
2
+ timm
3
+ sentencepiece
4
+ transformers