Spaces:
Sleeping
Sleeping
File size: 3,064 Bytes
d440c70 6f45939 d440c70 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 |
import os
from openai import OpenAI
import requests
import gradio as gr
# Initialize OpenAI client
client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
# Vector database search function
def search_document(query, k=5):
url = "http://154.12.226.68:8000/search"
payload = {
"text": query,
"k": k
}
headers = {
"Content-Type": "application/json"
}
try:
response = requests.post(url, json=payload, headers=headers)
response.raise_for_status()
return response.json()
except requests.exceptions.RequestException as e:
return f"An error occurred: {e}"
# Function to query OpenAI
def query_openai(prompt):
try:
response = client.chat.completions.create(
model="gpt-4o-mini",
messages=[
{"role": "system", "content": "You are a helpful assistant. Answer the question based on the provided context."},
{"role": "user", "content": prompt}
]
)
return response.choices[0].message.content
except Exception as e:
return f"An error occurred while querying OpenAI: {e}"
# Function to perform vector search and format results
def vector_search(query):
results = search_document(query)
if isinstance(results, str): # Error occurred
return results
if not isinstance(results, dict) or 'results' not in results:
return "Unexpected format in vector database response."
formatted_results = ""
for i, result in enumerate(results['results'], 1):
content = result['metadata']['content']
source = f"Source {i}: {result['metadata'].get('source', 'Unknown source')}, page {result['metadata'].get('page', 'Unknown page')}"
metadata = ", ".join([f"{k}: {v}" for k, v in result['metadata'].items() if k != 'content'])
formatted_results += f"{source}\nMetadata: {metadata}\nContent: {content}\n\n"
return formatted_results
# Combined function for search and query
def search_and_query(question):
# First, perform the vector search
search_results = vector_search(question)
# Then, use these results to query OpenAI
prompt = f"""Given the following context and question, provide a comprehensive and accurate answer. Use ONLY the information provided in the context to answer. If the context doesn't contain relevant information to answer the question, state that clearly.
Context:
{search_results}
Question: {question}
Answer:"""
openai_response = query_openai(prompt)
# Return both the search results and the OpenAI response
return search_results, openai_response
# Gradio interface
with gr.Blocks() as demo:
question_input = gr.Textbox(label="Enter your question")
search_output = gr.Textbox(label="Vector Search Results")
answer_output = gr.Textbox(label="OpenAI Answer")
query_button = gr.Button("Get Answer")
query_button.click(search_and_query, inputs=question_input, outputs=[search_output, answer_output])
demo.launch() |