import streamlit as st import pinecone from makechain import get_chain from langchain.vectorstores.pinecone import Pinecone from langchain.embeddings.openai import OpenAIEmbeddings import os PINECONE_INDEX_NAME = os.environ.get("PINECONE_INDEX_NAME") PINECONE_ENVIRONMENT = os.environ.get("PINECONE_ENVIRONMENT") PINECONE_API_KEY = os.environ.get("PINECONE_API_KEY") OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY") st.title("Ask the Black@Stanford Exhibit") st.sidebar.header("You can ask questions of interviews with Black Stanford students and faculty from the University " "Archives") st.sidebar.info( '''This is a web application that allows you to interact with the Stanford Archives. Enter a **Question** in the **text box** and **press enter** to receive a **response** from our ChatBot. ''' ) # create Vectorstore pinecone.init( api_key=PINECONE_API_KEY, # find at app.pinecone.io environment=PINECONE_ENVIRONMENT # next to api key in console ) index = pinecone.Index(index_name=PINECONE_INDEX_NAME) embed = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY) text_field = "text" vectorStore = Pinecone( index, embed.embed_query, text_field ) # create chain qa_chain = get_chain(vectorStore) def main(): global query user_query = st.text_input("Enter your question here") if user_query != ":q" and user_query != "": # Pass the query to the ChatGPT function query = user_query.strip().replace('\n', ' ') response = qa_chain( { 'query': query, } ) st.write(f"{response['result']}") st.write("Sources: ") documents = response['source_documents'] for document in documents: page_content = document.page_content source_url = document.metadata['source'] st.write("Page Content") st.write(page_content) st.write("Source URL:") st.write(source_url) st.markdown("""---""") try: main() except Exception as e: st.write("An error occurred while running the application: ", e)