hypeconqueror1's picture
Update main.py
aad567b verified
raw
history blame
1.29 kB
from fastapi import FastAPI, File, UploadFile, Form
import os
from langchain_community.document_loaders import PyMuPDFLoader
from LoadLLM import Loadllm
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
from langchain.chains import ConversationalRetrievalChain
DB_FAISS_PATH = 'vectorstore/db_faiss'
app = FastAPI()
@app.get('/')
async def home():
return "API Server Running"
@app.post('/PromptBuddy')
async def PromptLLM(file: UploadFile = File(...), query: str = Form(...)):
with open(f"uploads/{file.filename}", "wb") as f:
f.write(file.file.read())
loader = PyMuPDFLoader(file_path=f"uploads/{file.filename}")
data = loader.load()
# Create embeddings using Sentence Transformers
embeddings = HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L6-v2')
# Create a FAISS vector store and save embeddings
db = FAISS.from_documents(data, embeddings)
db.save_local(DB_FAISS_PATH)
# Load the language model
llm = Loadllm.load_llm()
# Create a conversational chain
chain = ConversationalRetrievalChain.from_llm(llm=llm, retriever=db.as_retriever())
result = chain({"question": query, "chat_history": ''})
return result["answer"]