Spaces:
Sleeping
Sleeping
# main.py | |
from fastapi import FastAPI, HTTPException, Header | |
from fastapi.middleware.cors import CORSMiddleware | |
import requests | |
import os | |
from pydantic import BaseModel | |
app = FastAPI() | |
# Enable CORS for all origins | |
app.add_middleware( | |
CORSMiddleware, | |
allow_origins=["*"], # Allows all origins | |
allow_credentials=True, | |
allow_methods=["*"], # Allows all methods | |
allow_headers=["*"], # Allows all headers | |
) | |
# Load environment variables | |
LLM_API_URL = os.getenv("LLM_API_URL", "https://pvanand-audio-chat.hf.space/llm-agent") | |
API_KEY = os.getenv("X_API_KEY") | |
# Pydantic model for request validation | |
class LLMChatRequest(BaseModel): | |
prompt: str | |
system_message: str = "" | |
model_id: str = "openai/gpt-4o-mini" | |
conversation_id: str = "string" | |
user_id: str = "string" | |
async def llm_chat( | |
request: LLMChatRequest, | |
x_api_key: str = Header(None) | |
): | |
if x_api_key != API_KEY: | |
raise HTTPException(status_code=403, detail="Invalid API Key") | |
payload = { | |
"prompt": request.prompt, | |
"system_message": request.system_message, | |
"model_id": request.model_id, | |
"conversation_id": request.conversation_id, | |
"user_id": request.user_id | |
} | |
headers = { | |
"accept": "application/json", | |
"X-API-Key": x_api_key, | |
"Content-Type": "application/json" | |
} | |
# Use requests to call the external API | |
response = requests.post(LLM_API_URL, json=payload, headers=headers) | |
if response.status_code != 200: | |
raise HTTPException(status_code=response.status_code, detail="Error from LLM API") | |
return response.json() | |
if __name__ == "__main__": | |
import uvicorn | |
uvicorn.run(app, host="0.0.0.0", port=8000) |