Spaces:
Sleeping
Sleeping
File size: 1,617 Bytes
0d4c818 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
import faiss
from langchain_together.embeddings import TogetherEmbeddings
import numpy as np
import pickle
import os
import streamlit as st
os.environ["TOGETHER_API_KEY"] = st.secrets["together_api_key"]
@st.cache_data
def load_data():
with open("list_of_texts.pkl", "rb") as f:
list_of_texts = pickle.load(f)
index = faiss.read_index("faiss.index")
return list_of_texts, index
def response(sentence, embeddings, list_of_texts, index, ):
vector = embeddings.embed_query(sentence)
vector = np.array([vector]).astype('float32')
k = 5
D, I = index.search(vector, k)
nearest_texts = [list_of_texts[i] for i in I[0]]
return nearest_texts[0]
embeddings = TogetherEmbeddings(model="togethercomputer/m2-bert-80M-8k-retrieval")
list_of_texts, index = load_data()
st.title("Ship Document Retreiver")
# Initialize chat history
if "messages" not in st.session_state:
st.session_state.messages = []
# Display chat messages from history on app rerun
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("What is up?"):
# Display user message in chat message container
with st.chat_message("user"):
st.markdown(prompt)
# Add user message to chat history
st.session_state.messages.append({"role": "user", "content": prompt})
get_response = response(prompt, embeddings, list_of_texts, index)
with st.chat_message("assistant"):
st.markdown(get_response)
st.session_state.messages.append({"role": "assistant", "content": response})
|