pankaj9075rawat's picture
Upload folder using huggingface_hub
6d64b51 verified
raw
history blame contribute delete
820 Bytes
from llamaLLM import get_response
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel # data validation
app = FastAPI()
@app.get("/")
async def read_main():
return {"msg": "Hello from Llama this side !!!!"}
class Message(BaseModel):
message: str
system_instruction = "you are a good chat model who has to act as a friend to the user."
convers = [{"role": "system", "content": system_instruction}]
@app.post("/api/predict")
async def predict(message: Message):
print(message)
user_input = message.message
if user_input.lower() in ["exit", "quit"]:
return {"response": "Exiting the chatbot. Goodbye!"}
global convers
print(len(convers))
response, convers = get_response(user_input, convers)
return {"response": response}