File size: 820 Bytes
6d64b51 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 |
from llamaLLM import get_response
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel # data validation
app = FastAPI()
@app.get("/")
async def read_main():
return {"msg": "Hello from Llama this side !!!!"}
class Message(BaseModel):
message: str
system_instruction = "you are a good chat model who has to act as a friend to the user."
convers = [{"role": "system", "content": system_instruction}]
@app.post("/api/predict")
async def predict(message: Message):
print(message)
user_input = message.message
if user_input.lower() in ["exit", "quit"]:
return {"response": "Exiting the chatbot. Goodbye!"}
global convers
print(len(convers))
response, convers = get_response(user_input, convers)
return {"response": response}
|