import langchain from langchain.embeddings.openai import OpenAIEmbeddings # from langchain.vectorstores import Chroma from langchain.vectorstores import FAISS from langchain.text_splitter import CharacterTextSplitter from langchain.llms import OpenAI from langchain.chains import VectorDBQA from langchain.chains import RetrievalQA from langchain.document_loaders import DirectoryLoader from langchain.chains import ConversationalRetrievalChain from langchain.memory import ConversationBufferMemory from langchain.evaluation.qa import QAGenerateChain import magic import os import streamlit as st from streamlit_chat import message st.title("Welcome to AutoBot") if 'responses' not in st.session_state: st.session_state['responses'] = ["How can I assist you?"] if 'requests' not in st.session_state: st.session_state['requests'] = [] openai_api_key = os.getenv("OPENAI_API_KEY", "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") embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key) new_db = FAISS.load_local("faiss_index_diagnostics_RCV", embeddings) llm = OpenAI(openai_api_key=openai_api_key, temperature=0.0) # if 'buffer_memory' not in st.session_state: memory= ConversationBufferMemory(memory_key="chat_history", return_messages=True) retriever = new_db.as_retriever() chain = ConversationalRetrievalChain.from_llm(llm=llm, chain_type="stuff", memory= memory,retriever=retriever, verbose=False) # container for chat history response_container = st.container() # container for text box textcontainer = st.container() with textcontainer: query = st.text_input(label="Please Enter Your Prompt Here: ", placeholder="Ask me") if query: with st.spinner("Generating..."): # conversation_string = get_conversation_string() # st.code(conversation_string) # refined_query = query_refiner(conversation_string, query) # st.subheader("Refined Query:") # st.write(refined_query) # context = find_match(refined_query) # print(context) response = chain.run(query) st.session_state.requests.append(query) st.session_state.responses.append(response) with response_container: if st.session_state['responses']: for i in range(len(st.session_state['responses'])): message(st.session_state['responses'][i],key=str(i)) if i < len(st.session_state['requests']): message(st.session_state["requests"][i], is_user=True,key=str(i)+ '_user') # with st.expander('Message history'): # st.info(memory.buffer)