|
|
|
|
|
import streamlit as st |
|
import os |
|
|
|
|
|
from embedding import load_embeddings |
|
from vectorstore import load_or_build_vectorstore |
|
from chain_setup import build_conversational_chain |
|
|
|
def main(): |
|
st.title("π¬ Conversational Chat - Data Management & Personal Data Protection") |
|
|
|
|
|
local_file = "PoliciesEn001.pdf" |
|
index_folder = "faiss_index" |
|
|
|
|
|
embeddings = load_embeddings() |
|
|
|
|
|
vectorstore = load_or_build_vectorstore(local_file, index_folder, embeddings) |
|
|
|
|
|
qa_chain = build_conversational_chain(vectorstore) |
|
|
|
|
|
if "messages" not in st.session_state: |
|
st.session_state["messages"] = [ |
|
{"role": "assistant", "content": "π Hello! Ask me anything about Data Management & Personal Data Protection!"} |
|
] |
|
|
|
|
|
for msg in st.session_state["messages"]: |
|
with st.chat_message(msg["role"]): |
|
st.markdown(msg["content"]) |
|
|
|
|
|
user_input = st.chat_input("Type your question...") |
|
|
|
|
|
if user_input: |
|
|
|
st.session_state["messages"].append({"role": "user", "content": user_input}) |
|
with st.chat_message("user"): |
|
st.markdown(user_input) |
|
|
|
|
|
response_dict = qa_chain({"question": user_input}) |
|
answer = response_dict["answer"] |
|
|
|
|
|
st.session_state["messages"].append({"role": "assistant", "content": answer}) |
|
with st.chat_message("assistant"): |
|
st.markdown(answer) |
|
|
|
if __name__ == "__main__": |
|
main() |