vis / chatbot.py
Guhanselvam's picture
Update chatbot.py
1a1b05e verified
raw
history blame
881 Bytes
# app.py
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from chatbot import Chatbot
# Initialize FastAPI
app = FastAPI()
# Initialize the chatbot connected to the LLaMA API
chatbot = Chatbot(api_url="http://localhost:8001/llama/") # Adjust this if you change the port of api.py
class ChatRequest(BaseModel):
message: str
@app.post("/chat/")
async def chat(request: ChatRequest):
"""
This endpoint could be called from another service if needed.
Automatically generates a response.
"""
try:
# Automatically generate a response based on a default message or logic
bot_response = chatbot.auto_generate_response()
return {"response": bot_response}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
# To run the backend, use 'uvicorn app:app --reload' in the terminal.