File size: 1,595 Bytes
ca656ac
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from typing import List
from ai_agent import get_response_from_ai_agent

class RequestState(BaseModel):
    model_name: str
    model_provider: str
    system_prompt: str
    messages: List[str]
    allow_search: bool

ALLOWED_MODEL_NAMES = ["llama-3.3-70b-versatile", "mixtral-8x7b-32768"]

app = FastAPI(title="LangGraph AI Agent")

@app.post("/chat")
async def chat_endpoint(request: RequestState):
    """
    API Endpoint to interact with the Chatbot using LangGraph and search tools.
    """
    try:
        if request.model_provider != "Groq":
            raise HTTPException(status_code=400, detail="Only Groq provider is supported")
            
        if request.model_name not in ALLOWED_MODEL_NAMES:
            raise HTTPException(status_code=400, detail="Invalid model name. Please select a valid Groq model")
        
        if not request.messages:
            raise HTTPException(status_code=400, detail="No message provided")
        
        response = get_response_from_ai_agent(
            llm_id=request.model_name,
            query=request.messages[0],
            allow_search=request.allow_search,
            system_prompt=request.system_prompt or "You are a helpful AI assistant."
        )
        
        return {"response": response}
        
    except Exception as e:
        print(f"Debug - Error in chat_endpoint: {str(e)}")
        raise HTTPException(status_code=500, detail=str(e))

if __name__ == "__main__":
    import uvicorn
    uvicorn.run(app, host="127.0.0.1", port=9999)