pvanand commited on
Commit
ade0ea8
·
verified ·
1 Parent(s): e0a400b

Create main.py

Browse files
Files changed (1) hide show
  1. main.py +57 -7
main.py CHANGED
@@ -1,12 +1,62 @@
1
- # Dockerfile
2
- FROM python:3.9-slim
 
 
 
 
3
 
4
- WORKDIR /app
5
 
6
- COPY requirements.txt .
 
 
 
 
 
 
 
7
 
8
- RUN pip install --no-cache-dir -r requirements.txt
 
 
9
 
10
- COPY . .
 
 
 
 
 
 
11
 
12
- CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # main.py
2
+ from fastapi import FastAPI, HTTPException, Header
3
+ from fastapi.middleware.cors import CORSMiddleware
4
+ import requests
5
+ import os
6
+ from pydantic import BaseModel
7
 
8
+ app = FastAPI()
9
 
10
+ # Enable CORS for all origins
11
+ app.add_middleware(
12
+ CORSMiddleware,
13
+ allow_origins=["*"], # Allows all origins
14
+ allow_credentials=True,
15
+ allow_methods=["*"], # Allows all methods
16
+ allow_headers=["*"], # Allows all headers
17
+ )
18
 
19
+ # Load environment variables
20
+ LLM_API_URL = os.getenv("LLM_API_URL", "https://pvanand-audio-chat.hf.space/llm-agent")
21
+ API_KEY = os.getenv("X_API_KEY")
22
 
23
+ # Pydantic model for request validation
24
+ class LLMChatRequest(BaseModel):
25
+ prompt: str
26
+ system_message: str = ""
27
+ model_id: str = "openai/gpt-4o-mini"
28
+ conversation_id: str = "string"
29
+ user_id: str = "string"
30
 
31
+ @app.post("/llm-chat")
32
+ async def llm_chat(
33
+ request: LLMChatRequest,
34
+ x_api_key: str = Header(None)
35
+ ):
36
+ if x_api_key != API_KEY:
37
+ raise HTTPException(status_code=403, detail="Invalid API Key")
38
+
39
+ payload = {
40
+ "prompt": request.prompt,
41
+ "system_message": request.system_message,
42
+ "model_id": request.model_id,
43
+ "conversation_id": request.conversation_id,
44
+ "user_id": request.user_id
45
+ }
46
+
47
+ headers = {
48
+ "accept": "application/json",
49
+ "X-API-Key": x_api_key,
50
+ "Content-Type": "application/json"
51
+ }
52
+
53
+ # Use requests to call the external API
54
+ response = requests.post(LLM_API_URL, json=payload, headers=headers)
55
+ if response.status_code != 200:
56
+ raise HTTPException(status_code=response.status_code, detail="Error from LLM API")
57
+
58
+ return response.json()
59
+
60
+ if __name__ == "__main__":
61
+ import uvicorn
62
+ uvicorn.run(app, host="0.0.0.0", port=8000)