Somnath3570 commited on
Commit
ca656ac
·
verified ·
1 Parent(s): 1d5e089

Create backend.py

Browse files
Files changed (1) hide show
  1. backend.py +47 -0
backend.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ from pydantic import BaseModel
3
+ from typing import List
4
+ from ai_agent import get_response_from_ai_agent
5
+
6
+ class RequestState(BaseModel):
7
+ model_name: str
8
+ model_provider: str
9
+ system_prompt: str
10
+ messages: List[str]
11
+ allow_search: bool
12
+
13
+ ALLOWED_MODEL_NAMES = ["llama-3.3-70b-versatile", "mixtral-8x7b-32768"]
14
+
15
+ app = FastAPI(title="LangGraph AI Agent")
16
+
17
+ @app.post("/chat")
18
+ async def chat_endpoint(request: RequestState):
19
+ """
20
+ API Endpoint to interact with the Chatbot using LangGraph and search tools.
21
+ """
22
+ try:
23
+ if request.model_provider != "Groq":
24
+ raise HTTPException(status_code=400, detail="Only Groq provider is supported")
25
+
26
+ if request.model_name not in ALLOWED_MODEL_NAMES:
27
+ raise HTTPException(status_code=400, detail="Invalid model name. Please select a valid Groq model")
28
+
29
+ if not request.messages:
30
+ raise HTTPException(status_code=400, detail="No message provided")
31
+
32
+ response = get_response_from_ai_agent(
33
+ llm_id=request.model_name,
34
+ query=request.messages[0],
35
+ allow_search=request.allow_search,
36
+ system_prompt=request.system_prompt or "You are a helpful AI assistant."
37
+ )
38
+
39
+ return {"response": response}
40
+
41
+ except Exception as e:
42
+ print(f"Debug - Error in chat_endpoint: {str(e)}")
43
+ raise HTTPException(status_code=500, detail=str(e))
44
+
45
+ if __name__ == "__main__":
46
+ import uvicorn
47
+ uvicorn.run(app, host="127.0.0.1", port=9999)