Spaces:
Runtime error
Runtime error
import os | |
import openai | |
import gradio as gr | |
from langchain.embeddings.openai import OpenAIEmbeddings | |
from langchain.vectorstores.pgvector import PGVector | |
from langchain.chat_models import ChatOpenAI | |
from langchain.chains import ConversationalRetrievalChain | |
from langchain.memory import ConversationTokenBufferMemory | |
openai.api_key = os.environ['OPENAI_API_KEY'] | |
hf_api_key = os.environ['HF_API_KEY'] | |
host = os.environ['host'] | |
port = os.environ['port'] | |
database_name = os.environ['database_name'] | |
user = os.environ['user'] | |
passwd = os.environ['passwd'] | |
COLLECTION_NAME = os.environ['COLLECTION_NAME'] | |
memory_token_limit = os.environ['memory_token_limit'] | |
llm_name = os.environ['llm_name'] | |
bot_name = os.environ['bot_name'] | |
embedding = OpenAIEmbeddings() | |
CONNECTION_STRING = PGVector.connection_string_from_db_params( | |
driver=os.environ.get("PGVECTOR_DRIVER", "psycopg2"), | |
host=host | |
port=int(port), | |
database=database_name, | |
user=user, | |
password=passwd, | |
) | |
vectordb = PGVector(embedding_function=embedding, | |
collection_name=COLLECTION_NAME, | |
connection_string=CONNECTION_STRING, | |
) | |
# llm_name = "gpt-3.5-turbo" | |
# llm_name = "gpt-3.5-turbo-16k" | |
# llm_name = "gpt-4-32k" | |
llm = ChatOpenAI(model_name=llm_name, temperature=0) | |
retriever=vectordb.as_retriever() | |
memory = ConversationTokenBufferMemory( | |
llm = llm, | |
max_token_limit=memory_token_limit, | |
memory_key="chat_history", | |
return_messages=True | |
) | |
qa = ConversationalRetrievalChain.from_llm( | |
llm, | |
retriever=retriever, | |
memory=memory, | |
verbose=False | |
) | |
with gr.Blocks() as demo: | |
gr.Markdown(f"# {bot_name}") | |
chatbot = gr.Chatbot() | |
msg = gr.Textbox(label="Type your message (Shift + Enter to submit)", lines=6) | |
submit = gr.Button("Submit") | |
clear = gr.Button("Clear") | |
def respond(message, chat_history): | |
result = qa({"question": message}) | |
chat_history.append((message, result["answer"])) | |
return ("", chat_history) | |
msg.submit(respond, [msg, chatbot], [msg, chatbot], queue=False) | |
submit.click(respond, [msg, chatbot], [msg, chatbot], queue=False) | |
clear.click(lambda: None, None, chatbot, queue=False) | |
gr.close_all() | |
demo.queue() | |
demo.launch(share=False) | |
# gr.close_all() | |
# demo.close() | |
# demo.clear() | |