Spaces:
Runtime error
Runtime error
File size: 2,364 Bytes
e7d9fc5 ee98642 784d46d ee98642 4762b53 e7d9fc5 d40491e 784d46d e7d9fc5 db6f7db e7d9fc5 db6f7db e7d9fc5 2dee946 e7d9fc5 55eac91 e7d9fc5 dbe6be4 e7d9fc5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 |
import os
import openai
import gradio as gr
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores.pgvector import PGVector
from langchain.chat_models import ChatOpenAI
from langchain.chains import ConversationalRetrievalChain
from langchain.memory import ConversationTokenBufferMemory
openai.api_key = os.environ['OPENAI_API_KEY']
hf_api_key = os.environ['HF_API_KEY']
host = os.environ['host']
port = os.environ['port']
database_name = os.environ['database_name']
user = os.environ['user']
passwd = os.environ['passwd']
COLLECTION_NAME = os.environ['COLLECTION_NAME']
memory_token_limit = os.environ['memory_token_limit']
llm_name = os.environ['llm_name']
bot_name = os.environ['bot_name']
bot_desc = os.environ['bot_desc']
embedding = OpenAIEmbeddings()
CONNECTION_STRING = PGVector.connection_string_from_db_params(
driver=os.environ.get("PGVECTOR_DRIVER", "psycopg2"),
host=host,
port=int(port),
database=database_name,
user=user,
password=passwd,
)
vectordb = PGVector(embedding_function=embedding,
collection_name=COLLECTION_NAME,
connection_string=CONNECTION_STRING,
)
# llm_name = "gpt-3.5-turbo"
# llm_name = "gpt-3.5-turbo-16k"
# llm_name = "gpt-4-32k"
llm = ChatOpenAI(model_name=llm_name, temperature=0)
retriever=vectordb.as_retriever()
memory = ConversationTokenBufferMemory(
llm = llm,
max_token_limit=int(memory_token_limit),
memory_key="chat_history",
return_messages=True
)
qa = ConversationalRetrievalChain.from_llm(
llm,
retriever=retriever,
memory=memory,
verbose=False
)
with gr.Blocks() as demo:
gr.Markdown(f"# {bot_name}\n\n{bot_desc}")
chatbot = gr.Chatbot()
msg = gr.Textbox(label="Type your message (Shift + Enter to submit)", lines=6)
submit = gr.Button("Submit")
clear = gr.Button("Clear")
def respond(message, chat_history):
result = qa({"question": message})
chat_history.append((message, result["answer"]))
return ("", chat_history)
msg.submit(respond, [msg, chatbot], [msg, chatbot], queue=False)
submit.click(respond, [msg, chatbot], [msg, chatbot], queue=False)
clear.click(lambda: None, None, chatbot, queue=False)
gr.close_all()
demo.queue()
demo.launch(share=False)
# gr.close_all()
# demo.close()
# demo.clear()
|