Update app.py
Browse files
app.py
CHANGED
@@ -1,27 +1,17 @@
|
|
1 |
import gradio as gr
|
2 |
-
from transformers import pipeline
|
3 |
|
4 |
-
#
|
5 |
-
qa_pipeline = pipeline("question-answering", model="deepset/roberta-base-squad2")
|
6 |
-
|
7 |
-
def get_answer(question):
|
8 |
-
# Predefined factual context
|
9 |
-
context = """
|
10 |
-
Washington, D.C. is the capital of the United States of America.
|
11 |
-
New Delhi is the capital of India.
|
12 |
-
London is the capital of the United Kingdom.
|
13 |
-
"""
|
14 |
-
|
15 |
-
answer = qa_pipeline(question=question, context=context)
|
16 |
-
return answer["answer"]
|
17 |
-
|
18 |
-
# Create Gradio Interface
|
19 |
iface = gr.Interface(
|
20 |
-
fn=
|
21 |
-
inputs=
|
22 |
-
|
23 |
-
|
24 |
-
|
|
|
|
|
|
|
|
|
25 |
)
|
26 |
|
|
|
27 |
iface.launch()
|
|
|
1 |
import gradio as gr
|
|
|
2 |
|
3 |
+
# Define the Gradio interface
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
iface = gr.Interface(
|
5 |
+
fn=generate_response,
|
6 |
+
inputs=[
|
7 |
+
gr.inputs.Textbox(lines=2, label="Input Prompt"),
|
8 |
+
gr.inputs.Slider(minimum=50, maximum=200, step=10, default=100, label="Max Length"),
|
9 |
+
gr.inputs.Slider(minimum=0.1, maximum=1.0, step=0.1, default=0.7, label="Temperature")
|
10 |
+
],
|
11 |
+
outputs=gr.outputs.Textbox(label="Generated Response"),
|
12 |
+
title="Llama 2 Chatbot",
|
13 |
+
description="Interact with the Llama 2 model using Gradio."
|
14 |
)
|
15 |
|
16 |
+
# Launch the interface
|
17 |
iface.launch()
|