Spaces:
Sleeping
Sleeping
# helpers/ai_client.py | |
import requests | |
import os | |
from typing import Optional, Dict, Any | |
import logging | |
# Set up logging | |
logging.basicConfig(level=logging.INFO) | |
logger = logging.getLogger(__name__) | |
class AIClient: | |
def __init__(self): | |
# Load environment variables | |
self.llm_api_url = os.getenv("LLM_API_URL") | |
self.api_key = os.getenv("X_API_KEY") | |
def send_prompt( | |
self, | |
prompt: str, | |
system_message: str = "", | |
model_id: str = "openai/gpt-4o-mini", | |
conversation_id: str = "string", | |
user_id: str = "string", | |
api_key: Optional[str] = None | |
) -> Dict[str, Any]: | |
""" | |
Sends a prompt to the LLM API and returns the response. | |
Args: | |
prompt (str): The user's input prompt. | |
system_message (str): Optional system message for the LLM. | |
model_id (str): The model ID to use (default: "openai/gpt-4o-mini"). | |
conversation_id (str): Unique ID for the conversation. | |
user_id (str): Unique ID for the user. | |
api_key (str): API key for authentication. | |
Returns: | |
Dict[str, Any]: The JSON response from the LLM API. | |
Raises: | |
Exception: If the API request fails or the response is invalid. | |
""" | |
if api_key is None: | |
api_key = self.api_key | |
payload = { | |
"prompt": prompt, | |
"system_message": system_message, | |
"model_id": model_id, | |
"conversation_id": conversation_id, | |
"user_id": user_id | |
} | |
headers = { | |
"accept": "application/json", | |
"X-API-Key": api_key, | |
"Content-Type": "application/json" | |
} | |
try: | |
# Use requests to call the external API | |
response = requests.post(self.llm_api_url, json=payload, headers=headers) | |
logger.info(f"API Response Status Code: {response.status_code}") | |
logger.info(f"API Response Content: {response.text}") | |
# Check if the response is valid JSON | |
if response.status_code != 200: | |
raise Exception(f"Error from LLM API: {response.status_code} - {response.text}") | |
return response.json() | |
except requests.exceptions.JSONDecodeError as e: | |
logger.error(f"Failed to decode JSON response: {e}") | |
raise Exception(f"Invalid response from LLM API: {response.text}") | |
except Exception as e: | |
logger.error(f"Error in send_prompt: {e}") | |
raise |