File size: 1,494 Bytes
f782c07
61febee
f782c07
61febee
 
 
 
 
 
 
 
 
 
 
 
 
 
ecdb02d
 
61febee
fc3da96
61febee
ecdb02d
1619dde
fc3da96
61febee
 
 
1619dde
61febee
 
 
 
2613527
fc3da96
2613527
ecdb02d
 
 
 
2613527
ecdb02d
2613527
 
 
 
ecdb02d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import gradio as gr
import requests

API_URL = "https://api-inference.huggingface.co/models/tiiuae/falcon-7b-instruct"
headers = {"Authorization": "Bearer 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"}

def query(payload):
    response = requests.post(API_URL, headers=headers, json=payload)
    return response.json()

API_URL2 = "https://api-inference.huggingface.co/models/valhalla/longformer-base-4096-finetuned-squadv1"
headers2 = {"Authorization": "Bearer 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"}

def query2(payload):
    response = requests.post(API_URL2, headers=headers2, json=payload)
    return response.json()

def detect_context_from_question(question_textbox, context_textbox):
    question = question_textbox.value
    output = query({
        "inputs": f"context for '{question}' is:",
    })
    context_textbox.value = output['context']

def ask_question(question, context):
    output2 = query2({
        "inputs": {
            "question": question,
            "context": context
        },
    })
    return output2

iface = gr.Interface(
    fn=ask_question,
    inputs=[
        gr.Textbox("Enter your question", type="text", name="a"),
        gr.Textbox("Enter context", type="text", name="b"),
        gr.Button(detect_context_from_question, text="Detect Context", name="detect_button"),
        gr.Button("Ask", name="ask_button")
    ],
    outputs=gr.Textbox("Output", type="text", name="b"),  # Show context in the same Textbox as input
    live=True
)

iface.launch()