Spaces:
Running
Running
File size: 5,985 Bytes
4da8ab2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 |
import gradio as gr
import openai
import os
import json
# Set OpenAI API key and base URL from environment variables
openai.api_key = os.environ["OPENAI_API_KEY"]
openai.base_url = os.environ["OPENAI_BASE_URL"]
# Define the number of results per page and total results to generate
RESULTS_PER_PAGE = 10
TOTAL_RESULTS = 30 # Generate 30 results to allow pagination
def fetch_search_results(query):
"""Fetch search results from the LLM based on the user's query."""
if not query.strip():
return None, "Please enter a search query."
prompt = f"""
You are a search engine that provides informative and relevant results. For the given query '{query}',
generate {TOTAL_RESULTS} search results, each with a title and a snippet that summarizes the information.
Format the response as a JSON array of objects, where each object has 'title' and 'snippet' fields.
Ensure the results are diverse and relevant to the query.
"""
try:
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo", # Adjust model name as needed
messages=[
{"role": "system", "content": "You are a helpful search engine."},
{"role": "user", "content": prompt}
],
response_format="json_object"
)
content = response.choices[0].message.content
results = json.loads(content)
# Handle different possible JSON structures
if isinstance(results, dict) and "results" in results:
results = results["results"]
elif isinstance(results, list):
pass
else:
return None, "Error: Unexpected JSON structure."
return results, None
except openai.error.OpenAIError as e:
return None, f"Error: {str(e)}"
except json.JSONDecodeError:
return None, "Error: Failed to parse JSON response."
except Exception as e:
return None, f"Unexpected error: {str(e)}"
def display_search_results(query, page=1):
"""Display search results for the given query and page number."""
results, error = fetch_search_results(query)
if error:
return error, None, None
# Calculate pagination boundaries
start_idx = (page - 1) * RESULTS_PER_PAGE
end_idx = start_idx + RESULTS_PER_PAGE
total_pages = (len(results) + RESULTS_PER_PAGE - 1) // RESULTS_PER_PAGE
# Ensure indices are within bounds
if start_idx >= len(results):
return "No more results to display.", None, None
paginated_results = results[start_idx:end_idx]
# Format results into HTML
html = """
<style>
.search-result {
margin-bottom: 20px;
}
.search-result h3 {
color: blue;
font-size: 18px;
margin: 0;
}
.search-result p {
font-size: 14px;
margin: 5px 0 0 0;
}
.pagination {
margin-top: 20px;
}
</style>
<div>
"""
html += f"<h2>Search Results for '{query}' (Page {page} of {total_pages})</h2>"
html += "<ul>"
for result in paginated_results:
title = result.get("title", "No title")
snippet = result.get("snippet", "No snippet")
html += f'<li class="search-result"><h3>{title}</h3><p>{snippet}</p></li>'
html += "</ul>"
# Add pagination controls (simulated with buttons)
html += '<div class="pagination">'
if page > 1:
html += f'<button onclick="update_page({page - 1})">Previous</button>'
if page < total_pages:
html += f'<button onclick="update_page({page + 1})">Next</button>'
html += '</div></div>'
# Note: Gradio doesn't support interactive JS directly in HTML outputs,
# so we return page numbers for button functionality
return html, page - 1 if page > 1 else None, page + 1 if page < total_pages else None
def search_handler(query, page):
"""Handle search submission and pagination."""
html, prev_page, next_page = display_search_results(query, page)
return html
# Build Gradio interface with Blocks for state management
with gr.Blocks(title="LLM Search Engine") as app:
gr.Markdown("# LLM Search Engine")
gr.Markdown("Enter a query below to search using a large language model.")
query_input = gr.Textbox(label="Search Query", placeholder="Type your search here...")
search_button = gr.Button("Search")
output_html = gr.HTML()
# Hidden state to track current page
page_state = gr.State(value=1)
# Define submit behavior
def on_submit(query, page):
return search_handler(query, page), page
search_button.click(
fn=on_submit,
inputs=[query_input, page_state],
outputs=[output_html, page_state]
)
# Note: For full pagination, we simulate Previous/Next with additional buttons
with gr.Row():
prev_button = gr.Button("Previous", visible=False)
next_button = gr.Button("Next", visible=False)
def update_page(query, page, direction):
new_page = page + direction
html, prev_page, next_page = display_search_results(query, new_page)
return html, new_page, gr.update(visible=prev_page is not None), gr.update(visible=next_page is not None)
prev_button.click(
fn=lambda q, p: update_page(q, p, -1),
inputs=[query_input, page_state],
outputs=[output_html, page_state, prev_button, next_button]
)
next_button.click(
fn=lambda q, p: update_page(q, p, 1),
inputs=[query_input, page_state],
outputs=[output_html, page_state, prev_button, next_button]
)
# Update button visibility after search
search_button.click(
fn=lambda q, p: (search_handler(q, p), p, gr.update(visible=p > 1), gr.update(visible=True)),
inputs=[query_input, page_state],
outputs=[output_html, page_state, prev_button, next_button]
)
app.launch() |