ahabb commited on
Commit
81389c5
·
verified ·
1 Parent(s): fcda8d3

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -0
app.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import T5ForConditionalGeneration, T5Tokenizer
3
+ from textwrap import fill
4
+
5
+ # Load the finetuned model and tokenizer
6
+ last_checkpoint = "model/checkpoint-1000"
7
+ finetuned_model = T5ForConditionalGeneration.from_pretrained(last_checkpoint)
8
+ tokenizer = T5Tokenizer.from_pretrained(last_checkpoint)
9
+
10
+ def answer_question(question):
11
+ inputs = "Answer this question truthfully: " + question
12
+ tokenized_inputs = tokenizer(inputs, return_tensors="pt", padding=True, truncation=True)
13
+ outputs = finetuned_model.generate(**tokenized_inputs)
14
+ answer = tokenizer.decode(outputs[0], skip_special_tokens=True)
15
+
16
+ # Assuming 'actual' answer is predefined for demonstration
17
+ actual = "Very low Mg2+ levels correspond to low PTH levels which in turn results in low Ca2+ levels."
18
+
19
+ return fill(answer, width=80), fill(actual, width=80)
20
+
21
+ # Create Gradio interface
22
+ iface = gr.Interface(
23
+ fn=answer_question,
24
+ inputs="text",
25
+ outputs=["text", "text"],
26
+ title="Medical Question Answering",
27
+ description="Enter a medical question to get a truthful answer from the finetuned T5 model.",
28
+ examples=[["What is the relationship between very low Mg2+ levels, PTH levels, and Ca2+ levels?"]]
29
+ )
30
+
31
+ # Launch the app
32
+ iface.launch()