File size: 881 Bytes
1a1b05e
 
 
 
a4420b8
1a1b05e
 
a4420b8
1a1b05e
 
a4420b8
1a1b05e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
# app.py
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from chatbot import Chatbot

# Initialize FastAPI
app = FastAPI()

# Initialize the chatbot connected to the LLaMA API
chatbot = Chatbot(api_url="http://localhost:8001/llama/")  # Adjust this if you change the port of api.py

class ChatRequest(BaseModel):
    message: str

@app.post("/chat/")
async def chat(request: ChatRequest):
    """
    This endpoint could be called from another service if needed.
    Automatically generates a response.
    """
    try:
        # Automatically generate a response based on a default message or logic
        bot_response = chatbot.auto_generate_response()
        return {"response": bot_response}
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))

# To run the backend, use 'uvicorn app:app --reload' in the terminal.