from flask import Flask, request, render_template_string, Response from openai import OpenAI import os import json from urllib.parse import quote import html app = Flask(__name__) # Initialize OpenAI client with API key and base URL from environment variables client = OpenAI( api_key=os.environ["OPENAI_API_KEY"], base_url=os.environ["OPENAI_BASE_URL"] ) # Define constants for pagination RESULTS_PER_PAGE = 10 TOTAL_RESULTS = 30 # Generate 30 results to allow pagination def fetch_search_results(query, stream=False): """Fetch search results from the LLM, with optional streaming.""" if not query.strip(): return None, "Please enter a search query." prompt = f""" You are a search engine that provides informative and relevant results. For the given query '{query}', generate {TOTAL_RESULTS} search results. Each result should include: - 'title': A concise, descriptive title of the result. - 'snippet': A short summary (2-3 sentences) of the content. - 'url': A plausible, clickable URL where the information might be found (e.g., a real or hypothetical website). Format the response as a JSON array of objects, where each object has 'title', 'snippet', and 'url' fields. Ensure the results are diverse, relevant to the query, and the URLs are realistic (e.g., https://example.com/page). """ try: response = client.chat.completions.create( model="gemini-2.0-flash-lite", # Updated model name messages=[ {"role": "system", "content": "You are a helpful search engine."}, {"role": "user", "content": prompt} ], response_format={"type": "json_object"}, stream=stream # Enable streaming if requested ) if stream: return response, None # Return raw streaming response else: content = response.choices[0].message.content results = json.loads(content) # Handle different possible JSON structures if isinstance(results, dict) and "results" in results: results = results["results"] elif isinstance(results, list): pass else: return None, "Error: Unexpected JSON structure." return results, None except Exception as e: error_msg = str(e) if "404" in error_msg: return None, f"Error 404: Model or endpoint not found. Check OPENAI_BASE_URL ({os.environ['OPENAI_BASE_URL']}) and model name." elif "401" in error_msg: return None, "Error 401: Invalid API key. Check OPENAI_API_KEY." else: return None, f"Error: {error_msg}" def stream_search_results(query, page): """Stream search results incrementally.""" stream_response, error = fetch_search_results(query, stream=True) if error: yield f"

{error}

" return # Generate header header = """ LLM Search Engine

Results for '{{query}}' (Page {{page}})

""".replace('{{query}}', html.escape(query)).replace('{{page}}', str(page)) yield header # Stream and parse results buffer = "" results = [] for chunk in stream_response: if chunk.choices[0].delta.content: buffer += chunk.choices[0].delta.content try: # Try to parse the buffer as JSON incrementally temp_results = json.loads(buffer) if isinstance(temp_results, list): results = temp_results elif isinstance(temp_results, dict) and "results" in temp_results: results = temp_results["results"] # Process only new results for i, result in enumerate(results[len(results) - (len(results) % RESULTS_PER_PAGE):]): if i >= start_idx and i < end_idx: title = html.escape(result.get("title", "No title")) snippet = html.escape(result.get("snippet", "No snippet")) url = html.escape(result.get("url", "#")) yield f"""
{title}
{url}

{snippet}

""" except json.JSONDecodeError: continue # Keep buffering until complete JSON # Calculate pagination start_idx = (page - 1) * RESULTS_PER_PAGE end_idx = start_idx + RESULTS_PER_PAGE total_pages = (len(results) + RESULTS_PER_PAGE - 1) // RESULTS_PER_PAGE # Pagination links encoded_query = quote(query) prev_link = f'Previous' if page > 1 else 'Previous' next_link = f'Next' if page < total_pages else 'Next' yield f"""
""" @app.route('/', methods=['GET']) def search_page(): """Generate and serve the search results page styled like Google.""" query = request.args.get('query', '') page = request.args.get('page', '1') btn = request.args.get('btn', 'LLM Search') try: page = int(page) except ValueError: page = 1 if not query.strip(): html_content = """ LLM Search Engine
""" return render_template_string(html_content) if btn == "I'm Feeling Lucky": results, error = fetch_search_results(query, stream=False) if error: return render_template_string(f""" LLM Search Engine

LLM Search Engine

{error}

""") first_url = results[0].get("url", "#") if results else "#" return Response(f"""

Redirecting to {first_url}...

""", mimetype="text/html") # Stream results for "LLM Search" return Response(stream_search_results(query, page), mimetype="text/html") if __name__ == '__main__': app.run(debug=True, host='0.0.0.0', port=int(os.environ.get("PORT", 5000)))