LLMSearchEngine / app.py
codelion's picture
Update app.py
1cc26a0 verified
raw
history blame
7.61 kB
import gradio as gr
from openai import OpenAI
import os
import json
from functools import partial
# Initialize OpenAI client with API key and base URL from environment variables
client = OpenAI(
api_key=os.environ["OPENAI_API_KEY"],
base_url=os.environ["OPENAI_BASE_URL"]
)
# Define the number of results per page and total results to generate
RESULTS_PER_PAGE = 10
TOTAL_RESULTS = 30 # Generate 30 results to allow pagination
def fetch_search_results(query):
"""Fetch search results from the LLM based on the user's query."""
if not query.strip():
return None, "Please enter a search query."
prompt = f"""
You are a search engine that provides informative and relevant results. For the given query '{query}',
generate {TOTAL_RESULTS} search results. Each result should include:
- 'title': A concise, descriptive title of the result.
- 'snippet': A short summary (2-3 sentences) of the content.
- 'url': A plausible, clickable URL where the information might be found (e.g., a real or hypothetical website).
Format the response as a JSON array of objects, where each object has 'title', 'snippet', and 'url' fields.
Ensure the results are diverse, relevant to the query, and the URLs are realistic (e.g., https://example.com/page).
"""
try:
response = client.chat.completions.create(
model="gemini-2.0-flash-lite", # Updated model name
messages=[
{"role": "system", "content": "You are a helpful search engine."},
{"role": "user", "content": prompt}
],
response_format={"type": "json_object"}
)
content = response.choices[0].message.content
results = json.loads(content)
# Handle different possible JSON structures
if isinstance(results, dict) and "results" in results:
results = results["results"]
elif isinstance(results, list):
pass
else:
return None, "Error: Unexpected JSON structure."
return results, None
except Exception as e:
error_msg = str(e)
if "404" in error_msg:
return None, f"Error 404: Model or endpoint not found. Check OPENAI_BASE_URL ({os.environ['OPENAI_BASE_URL']}) and model name."
elif "401" in error_msg:
return None, "Error 401: Invalid API key. Check OPENAI_API_KEY."
else:
return None, f"Error: {error_msg}"
def generate_search_page(query, page=1):
"""Generate a full HTML search results page."""
results, error = fetch_search_results(query)
if error:
return f"""
<html>
<head><title>LLM Search Engine</title></head>
<body style="font-family: Arial, sans-serif;">
<h1>LLM Search Engine</h1>
<p style="color: red;">{error}</p>
</body>
</html>
"""
# Calculate pagination boundaries
start_idx = (page - 1) * RESULTS_PER_PAGE
end_idx = start_idx + RESULTS_PER_PAGE
total_pages = (len(results) + RESULTS_PER_PAGE - 1) // RESULTS_PER_PAGE
# Ensure indices are within bounds
if start_idx >= len(results):
return """
<html>
<head><title>LLM Search Engine</title></head>
<body style="font-family: Arial, sans-serif;">
<h1>LLM Search Engine</h1>
<p>No more results to display.</p>
</body>
</html>
"""
paginated_results = results[start_idx:end_idx]
# Generate full HTML page
html = """
<html>
<head>
<title>LLM Search Engine</title>
<style>
body {
font-family: Arial, sans-serif;
margin: 0;
padding: 20px;
max-width: 800px;
margin-left: auto;
margin-right: auto;
}
.search-box {
margin-bottom: 20px;
}
.search-box input[type="text"] {
width: 70%;
padding: 8px;
font-size: 16px;
border: 1px solid #dfe1e5;
border-radius: 4px;
}
.search-box input[type="submit"] {
padding: 8px 16px;
font-size: 14px;
background-color: #f8f9fa;
border: 1px solid #dfe1e5;
border-radius: 4px;
cursor: pointer;
}
.search-result {
margin-bottom: 20px;
}
.search-result a {
color: #1a0dab;
font-size: 18px;
text-decoration: none;
}
.search-result a:hover {
text-decoration: underline;
}
.search-result .url {
color: #006621;
font-size: 14px;
margin: 2px 0;
}
.search-result p {
color: #545454;
font-size: 14px;
margin: 2px 0;
}
.pagination {
margin-top: 20px;
text-align: center;
}
.pagination a {
color: #1a0dab;
margin: 0 10px;
text-decoration: none;
}
.pagination a:hover {
text-decoration: underline;
}
.pagination span {
color: #545454;
margin: 0 10px;
}
</style>
</head>
<body>
<h1>LLM Search Engine</h1>
<form class="search-box" method="get" action="/">
<input type="text" name="query" value="{query}" placeholder="Type your search here...">
<input type="submit" value="Search">
<input type="hidden" name="page" value="1">
</form>
<h2>Results for '{query}' (Page {page} of {total_pages})</h2>
"""
# Add search results
for result in paginated_results:
title = result.get("title", "No title")
snippet = result.get("snippet", "No snippet")
url = result.get("url", "#")
html += f"""
<div class="search-result">
<a href="{url}" target="_blank">{title}</a>
<div class="url">{url}</div>
<p>{snippet}</p>
</div>
"""
# Add pagination
html += '<div class="pagination">'
if page > 1:
html += f'<a href="/?query={query}&page={page - 1}">Previous</a>'
else:
html += '<span>Previous</span>'
html += f'<span>Page {page} of {total_pages}</span>'
if page < total_pages:
html += f'<a href="/?query={query}&page={page + 1}">Next</a>'
else:
html += '<span>Next</span>'
html += '</div>'
html += """
</body>
</html>
"""
return html
# Define the app with Blocks
with gr.Blocks(title="LLM Search Engine") as app:
# Custom route handler
def handle_request(query, page):
try:
page = int(page) if page else 1
except (ValueError, TypeError):
page = 1
return generate_search_page(query, page)
# Use a Route to serve raw HTML
app.route("/", inputs=[gr.Textbox(visible=False, value=""), gr.Number(visible=False, value=1)],
outputs=gr.HTML(), _js="() => [new URLSearchParams(window.location.search).get('query') || '', new URLSearchParams(window.location.search).get('page') || '1']",
fn=handle_request)
app.launch()