# helpers/ai_client.py import requests import os from typing import Optional, Dict, Any class AIClient: def __init__(self): # Load environment variables self.llm_api_url = os.getenv("LLM_API_URL") self.api_key = os.getenv("X_API_KEY") def send_prompt( self, prompt: str, system_message: str = "", model_id: str = "openai/gpt-4o-mini", conversation_id: str = "string", user_id: str = "string", api_key: Optional[str] = None ) -> Dict[str, Any]: """ Sends a prompt to the LLM API and returns the response. Args: prompt (str): The user's input prompt. system_message (str): Optional system message for the LLM. model_id (str): The model ID to use (default: "openai/gpt-4o-mini"). conversation_id (str): Unique ID for the conversation. user_id (str): Unique ID for the user. api_key (str): API key for authentication. Returns: Dict[str, Any]: The JSON response from the LLM API. Raises: HTTPException: If the API request fails. """ if api_key is None: api_key = self.api_key payload = { "prompt": prompt, "system_message": system_message, "model_id": model_id, "conversation_id": conversation_id, "user_id": user_id } headers = { "accept": "application/json", "X-API-Key": api_key, "Content-Type": "application/json" } # Use requests to call the external API response = requests.post(self.llm_api_url, json=payload, headers=headers) if response.status_code != 200: raise Exception(f"Error from LLM API: {response.status_code} - {response.text}") return response.json()