Spaces:
No application file
No application file
from fastapi import FastAPI, HTTPException | |
from pydantic import BaseModel | |
from typing import List | |
from ai_agent import get_response_from_ai_agent | |
class RequestState(BaseModel): | |
model_name: str | |
model_provider: str | |
system_prompt: str | |
messages: List[str] | |
allow_search: bool | |
ALLOWED_MODEL_NAMES = ["llama-3.3-70b-versatile", "mixtral-8x7b-32768"] | |
app = FastAPI(title="LangGraph AI Agent") | |
async def chat_endpoint(request: RequestState): | |
""" | |
API Endpoint to interact with the Chatbot using LangGraph and search tools. | |
""" | |
try: | |
if request.model_provider != "Groq": | |
raise HTTPException(status_code=400, detail="Only Groq provider is supported") | |
if request.model_name not in ALLOWED_MODEL_NAMES: | |
raise HTTPException(status_code=400, detail="Invalid model name. Please select a valid Groq model") | |
if not request.messages: | |
raise HTTPException(status_code=400, detail="No message provided") | |
response = get_response_from_ai_agent( | |
llm_id=request.model_name, | |
query=request.messages[0], | |
allow_search=request.allow_search, | |
system_prompt=request.system_prompt or "You are a helpful AI assistant." | |
) | |
return {"response": response} | |
except Exception as e: | |
print(f"Debug - Error in chat_endpoint: {str(e)}") | |
raise HTTPException(status_code=500, detail=str(e)) | |
if __name__ == "__main__": | |
import uvicorn | |
uvicorn.run(app, host="127.0.0.1", port=9999) |