File size: 1,281 Bytes
f782c07
61febee
f782c07
61febee
 
 
 
 
 
 
 
 
 
 
 
 
 
1619dde
61febee
1619dde
61febee
1619dde
 
 
 
 
61febee
 
 
1619dde
61febee
 
 
 
1619dde
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import gradio as gr
import requests

API_URL = "https://api-inference.huggingface.co/models/tiiuae/falcon-7b-instruct"
headers = {"Authorization": "Bearer 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"}

def query(payload):
    response = requests.post(API_URL, headers=headers, json=payload)
    return response.json()

API_URL2 = "https://api-inference.huggingface.co/models/valhalla/longformer-base-4096-finetuned-squadv1"
headers2 = {"Authorization": "Bearer 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"}

def query2(payload):
    response = requests.post(API_URL2, headers=headers2, json=payload)
    return response.json()

def inference_ui(question):
    output = query({
        "inputs": f"context for '{question}' is:",
    })
    return output['context']

def ask_question_ui():
    question = gr.textbox("Enter your question:")
    context = gr.textbox("Enter context:")
    output2 = query2({
        "inputs": {
            "question": question,
            "context": context
        },
    })
    return output2

iface_context = gr.Interface(fn=inference_ui, inputs="text", outputs="text", live=True)
iface_ask_question = gr.Interface(fn=ask_question_ui, inputs=["text", "text"], outputs="text", live=True)

iface_context.launch(share=True)
iface_ask_question.launch(share=True)