ShawnAI's picture
Update app.py
723f467
raw
history blame
5.97 kB
import gradio as gr
import random
import time
from langchain.llms import OpenAI, OpenAIChat
from langchain.chat_models import ChatOpenAI
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.vectorstores import Pinecone
from langchain.chains.retrieval_qa.base import RetrievalQA
from langchain.chains.question_answering import load_qa_chain
import pinecone
import os
os.environ["TOKENIZERS_PARALLELISM"] = "false"
OPENAI_KEY = ""
OPENAI_TEMP = 0
PINECONE_KEY = os.environ["PINECONE_KEY"]
PINECONE_ENV = "asia-northeast1-gcp"
PINECONE_INDEX = "3gpp"
# return top-k text chunk from vector store
VECTOR_SEARCH_TOP_K = 10
# LLM input history length
LLM_HISTORY_LEN = 3
BUTTON_MIN_WIDTH = 150
MODEL_STATUS = "Wait for API Key to Initialize."
MODEL_LOADED = "Model Loaded"
MODEL_WARNING = "Please paste your OpenAI API Key from openai.com to initialize this application!"
webui_title = """
# 3GPP OpenAI Chatbot for Hackathon Demo
"""
init_message = """Welcome to use 3GPP Chatbot
This demo toolkit is based on OpenAI with langchain and pinecone
Please insert your question and click 'Submit'
"""
def init_model(openai_key):
try:
os.environ["OPENAI_API_KEY"] = openai_key
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2")
pinecone.init(api_key = PINECONE_KEY,
environment = PINECONE_ENV)
llm = OpenAI(temperature=OPENAI_TEMP,
model_name="gpt-3.5-turbo-0301")
# ChatOpenAI(temperature = OPENAI_TEMP, openai_api_key = openai_key)
global db
db = Pinecone.from_existing_index(index_name = PINECONE_INDEX,
embedding = embeddings)
global chain
chain = load_qa_chain(llm, chain_type="stuff")
global MODEL_STATUS
MODEL_STATUS = MODEL_LOADED
return openai_key, ""
except Exception as e:
print(e)
return "",""
def get_chat_history(inputs) -> str:
res = []
for human, ai in inputs:
res.append(f"Human: {human}\nAI: {ai}")
return "\n".join(res)
css = """.bigbox {
min-height:200px;
}"""
with gr.Blocks(css=css) as demo:
gr.Markdown(webui_title)
gr.Markdown(init_message)
if OPENAI_KEY and OPENAI_KEY.startswith("sk-") and len(OPENAI_KEY) > 50:
api_textbox_ph = "API Founded in Environment Variable: sk-..." + OPENAI_KEY[-4:]
api_textbox_edit = False
init_model(OPENAI_KEY)
else:
api_textbox_ph = "Paste Your OpenAI API Key (sk-...) and Hit ENTER"
api_textbox_edit = True
api_textbox = gr.Textbox(placeholder = api_textbox_ph,
interactive = api_textbox_edit,
show_label=False, lines=1, type='password')
with gr.Tab("Chatbot"):
with gr.Row():
with gr.Column(scale=10):
chatbot = gr.Chatbot(elem_classes="bigbox")
'''
with gr.Column(scale=1, min_width=BUTTON_MIN_WIDTH):
temp = gr.Slider(0,
2,
value=OPENAI_TEMP,
step=0.1,
label="temperature",
interactive=True)
init = gr.Button("Init")
'''
with gr.Row():
with gr.Column(scale=10):
query = gr.Textbox(label="Question:",
lines=2)
ref = gr.Textbox(label="Reference(optional):")
with gr.Column(scale=1, min_width=BUTTON_MIN_WIDTH):
clear = gr.Button("Clear")
submit = gr.Button("Submit",variant="primary")
with gr.Tab("Details"):
top_k = gr.Slider(1,
20,
value=VECTOR_SEARCH_TOP_K,
step=1,
label="Vector similarity top_k",
interactive=True)
detail_panel = gr.Chatbot(label="Related Docs")
def user(user_message, history):
return "", history+[[user_message, None]]
def bot(box_message, ref_message, top_k):
if MODEL_STATUS != MODEL_LOADED:
box_message[-1][1] = MODEL_WARNING
return box_message, "", ""
# bot_message = random.choice(["Yes", "No"])
# 0 is user question, 1 is bot response
question = box_message[-1][0]
history = box_message[:-1]
if not ref_message:
ref_message = question
details = f"Q: {question}"
else:
details = f"Q: {question}\nR: {ref_message}"
#print(question, ref_message)
#print(history)
#print(get_chat_history(history))
docsearch = db.as_retriever(search_kwargs={'k':top_k})
docs = docsearch.get_relevant_documents(ref_message)
all_output = chain({"input_documents": docs,
"question": question,
"chat_history": get_chat_history(history)})
bot_message = all_output['output_text']
#print(docs)
source = "".join([f"""<details> <summary>{doc.metadata["source"]}</summary>
{doc.page_content}
</details>""" for i, doc in enumerate(docs)])
#print(source)
box_message[-1][1] = bot_message
return box_message, "", [[details, source]]
submit.click(user, [query, chatbot], [query, chatbot], queue=False).then(
bot, [chatbot, ref, top_k], [chatbot, ref, detail_panel]
)
api_textbox.submit(init_model, api_textbox, [api_textbox, chatbot])
clear.click(lambda: (None,None,None), None, [query, ref, chatbot], queue=False)
if __name__ == "__main__":
demo.launch(share=False, inbrowser=True)