|
import streamlit as st |
|
from llm import load_llm, response_generator |
|
from vector_store import load_vector_store, process_pdf |
|
from uuid import uuid4 |
|
|
|
|
|
|
|
repo_id = "MaziyarPanahi/Qwen2.5-7B-Instruct-GGUF" |
|
filename = "Qwen2.5-7B-Instruct.Q4_K_S.gguf" |
|
|
|
llm = load_llm(repo_id, filename) |
|
|
|
st.title("PDF QA") |
|
|
|
if "messages" not in st.session_state: |
|
st.session_state.messages = [] |
|
|
|
|
|
for message in st.session_state.messages: |
|
with st.chat_message(message["role"]): |
|
if message["role"] == "user": |
|
st.markdown(message["content"]) |
|
else: |
|
st.code(message["content"]) |
|
|
|
|
|
if prompt := st.chat_input("What is up?"): |
|
|
|
st.session_state.messages.append({"role": "user", "content": prompt}) |
|
|
|
with st.chat_message("user"): |
|
st.markdown(prompt) |
|
|
|
|
|
with st.chat_message("assistant"): |
|
vector_store = load_vector_store() |
|
retriever = vector_store.as_retriever() |
|
docs = retriever.get_relevant_documents(prompt) |
|
|
|
response = response_generator(llm, st.session_state.messages, prompt, retriever) |
|
|
|
st.markdown(response["answer"]) |
|
|
|
|
|
st.session_state.messages.append( |
|
{"role": "assistant", "content": response["answer"]} |
|
) |
|
|
|
with st.sidebar: |
|
st.title("PDFs") |
|
st.write("Upload your pdfs here") |
|
uploaded_files = st.file_uploader( |
|
"Choose a PDF file", accept_multiple_files=True, type="pdf" |
|
) |
|
if uploaded_files is not None: |
|
vector_store = load_vector_store() |
|
for uploaded_file in uploaded_files: |
|
temp_file = f"./temp/{uploaded_file.name}-{uuid4()}.pdf" |
|
with open(temp_file, "wb") as file: |
|
file.write(uploaded_file.getvalue()) |
|
|
|
st.write("filename:", uploaded_file.name) |
|
process_pdf(temp_file, vector_store) |
|
st.success("PDFs uploaded successfully. ✅") |
|
|