pvanand's picture
Create routers/llm_chat.py
ed88192 verified
raw
history blame
1.67 kB
# routers/llm_chat.py
from fastapi import APIRouter, HTTPException, Header
import requests
import os
from pydantic import BaseModel
router = APIRouter(
prefix="/api/v1", # Prefix for all routes in this router
tags=["LLM Chat"], # Tag for OpenAPI documentation
)
# Load environment variables
LLM_API_URL = os.getenv("LLM_API_URL", "https://pvanand-audio-chat.hf.space/llm-agent")
API_KEY = os.getenv("API_KEY", "44d5c")
# Pydantic model for request validation
class LLMChatRequest(BaseModel):
prompt: str
system_message: str = ""
model_id: str = "openai/gpt-4o-mini"
conversation_id: str = "string"
user_id: str = "string"
@router.post("/llm-chat", summary="Send a prompt to the LLM", description="This endpoint sends a prompt to the LLM and returns the response.")
async def llm_chat(
request: LLMChatRequest,
x_api_key: str = Header(None, description="API Key for authentication")
):
if x_api_key != API_KEY:
raise HTTPException(status_code=403, detail="Invalid API Key")
payload = {
"prompt": request.prompt,
"system_message": request.system_message,
"model_id": request.model_id,
"conversation_id": request.conversation_id,
"user_id": request.user_id
}
headers = {
"accept": "application/json",
"X-API-Key": x_api_key,
"Content-Type": "application/json"
}
# Use requests to call the external API
response = requests.post(LLM_API_URL, json=payload, headers=headers)
if response.status_code != 200:
raise HTTPException(status_code=response.status_code, detail="Error from LLM API")
return response.json()