pvanand commited on
Commit
d82c1af
·
verified ·
1 Parent(s): ed88192

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +13 -45
main.py CHANGED
@@ -1,11 +1,17 @@
1
  # main.py
2
- from fastapi import FastAPI, HTTPException, Header
3
  from fastapi.middleware.cors import CORSMiddleware
4
- import requests
5
- import os
6
- from pydantic import BaseModel
7
 
8
- app = FastAPI()
 
 
 
 
 
 
 
9
 
10
  # Enable CORS for all origins
11
  app.add_middleware(
@@ -16,46 +22,8 @@ app.add_middleware(
16
  allow_headers=["*"], # Allows all headers
17
  )
18
 
19
- # Load environment variables
20
- LLM_API_URL = os.getenv("LLM_API_URL", "https://pvanand-audio-chat.hf.space/llm-agent")
21
- API_KEY = os.getenv("X_API_KEY")
22
-
23
- # Pydantic model for request validation
24
- class LLMChatRequest(BaseModel):
25
- prompt: str
26
- system_message: str = ""
27
- model_id: str = "openai/gpt-4o-mini"
28
- conversation_id: str = "string"
29
- user_id: str = "string"
30
-
31
- @app.post("/llm-chat")
32
- async def llm_chat(
33
- request: LLMChatRequest,
34
- x_api_key: str = Header(None)
35
- ):
36
- if x_api_key != API_KEY:
37
- raise HTTPException(status_code=403, detail="Invalid API Key")
38
-
39
- payload = {
40
- "prompt": request.prompt,
41
- "system_message": request.system_message,
42
- "model_id": request.model_id,
43
- "conversation_id": request.conversation_id,
44
- "user_id": request.user_id
45
- }
46
-
47
- headers = {
48
- "accept": "application/json",
49
- "X-API-Key": x_api_key,
50
- "Content-Type": "application/json"
51
- }
52
-
53
- # Use requests to call the external API
54
- response = requests.post(LLM_API_URL, json=payload, headers=headers)
55
- if response.status_code != 200:
56
- raise HTTPException(status_code=response.status_code, detail="Error from LLM API")
57
-
58
- return response.json()
59
 
60
  if __name__ == "__main__":
61
  import uvicorn
 
1
  # main.py
2
+ from fastapi import FastAPI
3
  from fastapi.middleware.cors import CORSMiddleware
4
+ from dotenv import load_dotenv
5
+ from routers.llm_chat import router as llm_chat_router
 
6
 
7
+ # Load environment variables from .env file
8
+ load_dotenv()
9
+
10
+ app = FastAPI(
11
+ title="LLM Chat API",
12
+ description="A FastAPI application to interact with an external LLM API.",
13
+ version="1.0.0",
14
+ )
15
 
16
  # Enable CORS for all origins
17
  app.add_middleware(
 
22
  allow_headers=["*"], # Allows all headers
23
  )
24
 
25
+ # Include the router
26
+ app.include_router(llm_chat_router)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
 
28
  if __name__ == "__main__":
29
  import uvicorn