import gradio as gr import requests API_URL = "https://api-inference.huggingface.co/models/tiiuae/falcon-7b-instruct" headers = {"Authorization": "Bearer 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"} def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.json() API_URL2 = "https://api-inference.huggingface.co/models/valhalla/longformer-base-4096-finetuned-squadv1" headers2 = {"Authorization": "Bearer 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"} def query2(payload): response = requests.post(API_URL2, headers=headers2, json=payload) return response.json() def detect_context_from_question(question): output = query({ "inputs": f"context for '{question}' is:", }) return output def ask_question(question, context): output2 = query2({ "inputs": { "question": question, "context": context }, }) return output2 iface = gr.Interface( fn=ask_question, inputs=[ gr.Textbox("Enter your question", type="text", key="question"), gr.Textbox("Enter context", type="text", key="context"), "Detect Context", gr.Button(detect_context_from_question, key="detect_context_button"), "Ask", gr.Button("Ask", key="ask_button") ], outputs=gr.Textbox("Output", type="text"), live=True ) iface.launch()