import gradio as gr from openai import OpenAI import os import json from functools import partial # Initialize OpenAI client with API key and base URL from environment variables client = OpenAI( api_key=os.environ["OPENAI_API_KEY"], base_url=os.environ["OPENAI_BASE_URL"] ) # Define the number of results per page and total results to generate RESULTS_PER_PAGE = 10 TOTAL_RESULTS = 30 # Generate 30 results to allow pagination def fetch_search_results(query): """Fetch search results from the LLM based on the user's query.""" if not query.strip(): return None, "Please enter a search query." prompt = f""" You are a search engine that provides informative and relevant results. For the given query '{query}', generate {TOTAL_RESULTS} search results. Each result should include: - 'title': A concise, descriptive title of the result. - 'snippet': A short summary (2-3 sentences) of the content. - 'url': A plausible, clickable URL where the information might be found (e.g., a real or hypothetical website). Format the response as a JSON array of objects, where each object has 'title', 'snippet', and 'url' fields. Ensure the results are diverse, relevant to the query, and the URLs are realistic (e.g., https://example.com/page). """ try: response = client.chat.completions.create( model="gemini-2.0-flash-lite", # Updated model name messages=[ {"role": "system", "content": "You are a helpful search engine."}, {"role": "user", "content": prompt} ], response_format={"type": "json_object"} ) content = response.choices[0].message.content results = json.loads(content) # Handle different possible JSON structures if isinstance(results, dict) and "results" in results: results = results["results"] elif isinstance(results, list): pass else: return None, "Error: Unexpected JSON structure." return results, None except Exception as e: error_msg = str(e) if "404" in error_msg: return None, f"Error 404: Model or endpoint not found. Check OPENAI_BASE_URL ({os.environ['OPENAI_BASE_URL']}) and model name." elif "401" in error_msg: return None, "Error 401: Invalid API key. Check OPENAI_API_KEY." else: return None, f"Error: {error_msg}" def generate_search_page(query, page=1): """Generate a full HTML search results page.""" results, error = fetch_search_results(query) if error: return f""" LLM Search Engine

LLM Search Engine

{error}

""" # Calculate pagination boundaries start_idx = (page - 1) * RESULTS_PER_PAGE end_idx = start_idx + RESULTS_PER_PAGE total_pages = (len(results) + RESULTS_PER_PAGE - 1) // RESULTS_PER_PAGE # Ensure indices are within bounds if start_idx >= len(results): return """ LLM Search Engine

LLM Search Engine

No more results to display.

""" paginated_results = results[start_idx:end_idx] # Generate full HTML page html = """ LLM Search Engine

LLM Search Engine

Results for '{query}' (Page {page} of {total_pages})

""" # Add search results for result in paginated_results: title = result.get("title", "No title") snippet = result.get("snippet", "No snippet") url = result.get("url", "#") html += f"""
{title}
{url}

{snippet}

""" # Add pagination html += '' html += """ """ return html # Define the app with Blocks with gr.Blocks(title="LLM Search Engine") as app: # Custom route handler def handle_request(query, page): try: page = int(page) if page else 1 except (ValueError, TypeError): page = 1 return generate_search_page(query, page) # Use a Route to serve raw HTML app.route("/", inputs=[gr.Textbox(visible=False, value=""), gr.Number(visible=False, value=1)], outputs=gr.HTML(), _js="() => [new URLSearchParams(window.location.search).get('query') || '', new URLSearchParams(window.location.search).get('page') || '1']", fn=handle_request) app.launch()