Spaces:
Sleeping
Sleeping
File size: 1,720 Bytes
3ca0d47 3389487 3ca0d47 18e24a9 3ca0d47 dd801e4 3ca0d47 9666943 3ca0d47 acf3c0a 3ca0d47 8f571d8 3ca0d47 8f571d8 3ca0d47 3389487 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
import streamlit as st
from streamlit_chat import message
import openai
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores import DeepLake
from langchain.chat_models import ChatOpenAI
from langchain.chains import RetrievalQA
#load Embeddings
embeddings = OpenAIEmbeddings()
db = DeepLake(dataset_path="hub://shailfinaspirant/flowret-algorithm", read_only=True, embedding_function=embeddings)
retriever = db.as_retriever()
retriever.search_kwargs['distance_metric'] = 'cos'
retriever.search_kwargs['fetch_k'] = 100
retriever.search_kwargs['maximal_marginal_relevance'] = True
retriever.search_kwargs['k'] = 10
model = ChatOpenAI(model='gpt-3.5-turbo') # switch to 'gpt-4' with money
qa = RetrievalQA.from_llm(model, retriever=retriever)
# Return the result of the query
qa.run("What is the repository's name?")
st.title(f"Chat with GitHub Repository --> Flowret")
# Initialize the session state for placeholder messages.
if "generated" not in st.session_state:
st.session_state["generated"] = ["i am ready to help you with Flowret repo"]
if "past" not in st.session_state:
st.session_state["past"] = ["hello"]
# A field input to receive user queries
user_input = st.text_input("", key="input")
# Search the database and add the responses to state
if user_input:
output = qa.run(user_input)
st.session_state.past.append(user_input)
st.session_state.generated.append(output)
# Create the conversational UI using the previous states
if st.session_state["generated"]:
for i in range(len(st.session_state["generated"])):
message(st.session_state["past"][i], is_user=True, key=str(i) + "_user")
message(st.session_state["generated"][i], key=str(i))
|