pvanand commited on
Commit
e73c0a7
·
verified ·
1 Parent(s): 483491a

Update routers/llm_chat.py

Browse files
Files changed (1) hide show
  1. routers/llm_chat.py +17 -28
routers/llm_chat.py CHANGED
@@ -1,17 +1,15 @@
1
  # routers/llm_chat.py
2
  from fastapi import APIRouter, HTTPException, Header
3
- import requests
4
- import os
5
  from pydantic import BaseModel
 
6
 
7
  router = APIRouter(
8
  prefix="/api/v1", # Prefix for all routes in this router
9
  tags=["LLM Chat"], # Tag for OpenAPI documentation
10
  )
11
 
12
- # Load environment variables
13
- LLM_API_URL = os.getenv("LLM_API_URL")
14
- API_KEY = os.getenv("X_API_KEY")
15
 
16
  # Pydantic model for request validation
17
  class LLMChatRequest(BaseModel):
@@ -26,26 +24,17 @@ async def llm_chat(
26
  request: LLMChatRequest,
27
  x_api_key: str = Header(None, description="API Key for authentication")
28
  ):
29
- if x_api_key != API_KEY:
30
- raise HTTPException(status_code=403, detail="Invalid API Key")
31
-
32
- payload = {
33
- "prompt": request.prompt,
34
- "system_message": request.system_message,
35
- "model_id": request.model_id,
36
- "conversation_id": request.conversation_id,
37
- "user_id": request.user_id
38
- }
39
-
40
- headers = {
41
- "accept": "application/json",
42
- "X-API-Key": x_api_key,
43
- "Content-Type": "application/json"
44
- }
45
-
46
- # Use requests to call the external API
47
- response = requests.post(LLM_API_URL, json=payload, headers=headers)
48
- if response.status_code != 200:
49
- raise HTTPException(status_code=response.status_code, detail="Error from LLM API")
50
-
51
- return response.json()
 
1
  # routers/llm_chat.py
2
  from fastapi import APIRouter, HTTPException, Header
 
 
3
  from pydantic import BaseModel
4
+ from helpers.ai_client import AIClient
5
 
6
  router = APIRouter(
7
  prefix="/api/v1", # Prefix for all routes in this router
8
  tags=["LLM Chat"], # Tag for OpenAPI documentation
9
  )
10
 
11
+ # Initialize the AI client
12
+ ai_client = AIClient()
 
13
 
14
  # Pydantic model for request validation
15
  class LLMChatRequest(BaseModel):
 
24
  request: LLMChatRequest,
25
  x_api_key: str = Header(None, description="API Key for authentication")
26
  ):
27
+ try:
28
+ # Use the AI client to send the prompt
29
+ response = ai_client.send_prompt(
30
+ prompt=request.prompt,
31
+ system_message=request.system_message,
32
+ model_id=request.model_id,
33
+ conversation_id=request.conversation_id,
34
+ user_id=request.user_id,
35
+ api_key=x_api_key
36
+ )
37
+ return response
38
+ except Exception as e:
39
+ logger.error(f"Error in llm_chat: {e}")
40
+ raise HTTPException(status_code=500, detail=str(e))