Spaces:
Sleeping
Sleeping
import faiss | |
from langchain_together.embeddings import TogetherEmbeddings | |
import numpy as np | |
import pickle | |
import os | |
import streamlit as st | |
os.environ["TOGETHER_API_KEY"] = st.secrets["together_api_key"] | |
def load_data(): | |
with open("list_of_texts.pkl", "rb") as f: | |
list_of_texts = pickle.load(f) | |
index = faiss.read_index("faiss.index") | |
return list_of_texts, index | |
def response(sentence, embeddings, list_of_texts, index, ): | |
vector = embeddings.embed_query(sentence) | |
vector = np.array([vector]).astype('float32') | |
k = 5 | |
D, I = index.search(vector, k) | |
nearest_texts = [list_of_texts[i] for i in I[0]] | |
return nearest_texts[0] | |
embeddings = TogetherEmbeddings(model="togethercomputer/m2-bert-80M-8k-retrieval") | |
list_of_texts, index = load_data() | |
st.title("Ship Document Retreiver") | |
# Initialize chat history | |
if "messages" not in st.session_state: | |
st.session_state.messages = [] | |
# Display chat messages from history on app rerun | |
for message in st.session_state.messages: | |
with st.chat_message(message["role"]): | |
st.markdown(message["content"]) | |
if prompt := st.chat_input("What is up?"): | |
# Display user message in chat message container | |
with st.chat_message("user"): | |
st.markdown(prompt) | |
# Add user message to chat history | |
st.session_state.messages.append({"role": "user", "content": prompt}) | |
get_response = response(prompt, embeddings, list_of_texts, index) | |
with st.chat_message("assistant"): | |
st.markdown(get_response) | |
st.session_state.messages.append({"role": "assistant", "content": response}) | |