from flask import Flask, request, render_template_string, Response from openai import OpenAI import os import json from urllib.parse import quote import html app = Flask(__name__) # Initialize OpenAI client with API key and base URL from environment variables client = OpenAI( api_key=os.environ["OPENAI_API_KEY"], base_url=os.environ["OPENAI_BASE_URL"] ) # Define constants for pagination RESULTS_PER_PAGE = 10 TOTAL_RESULTS = 30 # Generate 30 results to allow pagination def fetch_search_results(query, stream=False): """Fetch search results from the LLM, with optional streaming.""" if not query.strip(): return None, "Please enter a search query." prompt = f""" You are a search engine that provides informative and relevant results. For the given query '{query}', generate {TOTAL_RESULTS} search results. Each result should include: - 'title': A concise, descriptive title of the result. - 'snippet': A short summary (2-3 sentences) of the content. - 'url': A plausible, clickable URL where the information might be found (e.g., a real or hypothetical website). Format the response as a JSON array of objects, where each object has 'title', 'snippet', and 'url' fields. Ensure the results are diverse, relevant to the query, and the URLs are realistic (e.g., https://example.com/page). """ try: response = client.chat.completions.create( model="gemini-2.0-flash-lite", # Updated model name messages=[ {"role": "system", "content": "You are a helpful search engine."}, {"role": "user", "content": prompt} ], response_format={"type": "json_object"}, stream=stream # Enable streaming if requested ) if stream: return response, None # Return raw streaming response else: content = response.choices[0].message.content results = json.loads(content) # Handle different possible JSON structures if isinstance(results, dict) and "results" in results: results = results["results"] elif isinstance(results, list): pass else: return None, "Error: Unexpected JSON structure." return results, None except Exception as e: error_msg = str(e) if "404" in error_msg: return None, f"Error 404: Model or endpoint not found. Check OPENAI_BASE_URL ({os.environ['OPENAI_BASE_URL']}) and model name." elif "401" in error_msg: return None, "Error 401: Invalid API key. Check OPENAI_API_KEY." else: return None, f"Error: {error_msg}" def stream_search_results(query, page): """Stream search results incrementally.""" stream_response, error = fetch_search_results(query, stream=True) if error: yield f"
{error}
" return # Generate header header = """{error}
""") first_url = results[0].get("url", "#") if results else "#" return Response(f"""Redirecting to {first_url}...
""", mimetype="text/html") # Stream results for "LLM Search" return Response(stream_search_results(query, page), mimetype="text/html") if __name__ == '__main__': app.run(debug=True, host='0.0.0.0', port=int(os.environ.get("PORT", 5000)))