Fawaz0ibra's picture
Create app.py
f83b9cd verified
# app.py
import streamlit as st
import os
# Local imports
from embedding import load_embeddings
from vectorstore import load_or_build_vectorstore
from chain_setup import build_conversational_chain
def main():
st.title("πŸ’¬ Conversational Chat - Data Management & Personal Data Protection")
# Paths and constants
local_file = "PoliciesEn001.pdf"
index_folder = "faiss_index"
# Step 1: Load Embeddings
embeddings = load_embeddings()
# Step 2: Build or load VectorStore
vectorstore = load_or_build_vectorstore(local_file, index_folder, embeddings)
# Step 3: Build the Conversational Retrieval Chain
qa_chain = build_conversational_chain(vectorstore)
# Step 4: Session State for UI Chat
if "messages" not in st.session_state:
st.session_state["messages"] = [
{"role": "assistant", "content": "πŸ‘‹ Hello! Ask me anything about Data Management & Personal Data Protection!"}
]
# Display existing messages
for msg in st.session_state["messages"]:
with st.chat_message(msg["role"]):
st.markdown(msg["content"])
# Step 5: Chat Input
user_input = st.chat_input("Type your question...")
# Step 6: Process user input
if user_input:
# a) Display user message
st.session_state["messages"].append({"role": "user", "content": user_input})
with st.chat_message("user"):
st.markdown(user_input)
# b) Run chain
response_dict = qa_chain({"question": user_input})
answer = response_dict["answer"]
# c) Display assistant response
st.session_state["messages"].append({"role": "assistant", "content": answer})
with st.chat_message("assistant"):
st.markdown(answer)
if __name__ == "__main__":
main()