
cache_examples="lazy" to allow flagging `.setup(...)` to be executed before the `.flag(...)` call
98e4883
verified
import urllib.request | |
import gradio as gr | |
from huggingface_hub import get_token | |
from chatbot import get_retrieval_qa | |
from flagging import myHuggingFaceDatasetSaver as HuggingFaceDatasetSaver | |
#from gradio.flagging import HuggingFaceDatasetSaver | |
# get the html data and save it to a file | |
def download_html(_url: str, _filename: str): | |
html = urllib.request.urlopen(_url).read() | |
with open(_filename, "wb") as f: | |
f.write(html) | |
url = "https://sea.ai/faq" | |
filename = "FAQ_SEA.AI.html" | |
download_html(url, filename) | |
# load the retrieval QA model | |
qa = get_retrieval_qa(filename) | |
# dataset callback | |
dataset_name = "SEA-AI/seadog-chat-history" | |
hf_writer = HuggingFaceDatasetSaver(get_token(), dataset_name) | |
def answer_question(message, history, system): | |
# concatenate the history, message and system | |
query = " ".join([message, system]) | |
retrieval_qa = qa.invoke(query) | |
result = retrieval_qa["result"] # "query" and "source_documents" are also available | |
result = result.replace('"', "").strip() # clean up the result | |
# save the query and result to the dataset | |
print("flagging...") | |
hf_writer.flag(flag_data=[query, result]) | |
return result | |
title = "✨ SEA Dog" | |
description = """ | |
<p align="center"> | |
DISCLAIMERS | |
<br> | |
I can't remember conversations yet, be patient with me. | |
<br> | |
Your queries will be saved to | |
<a href='https://huggingface.co/datasets/SEA-AI/seadog-chat-history'>this dataset</a> | |
for analytics purposes. | |
</p> | |
""" | |
css = """ | |
h1 { | |
text-align: center; | |
display: block; | |
} | |
""" | |
theme = gr.themes.Default(primary_hue=gr.themes.colors.indigo) | |
chatbot = gr.Chatbot( | |
value=[ | |
[ | |
None, | |
"I have memorized the entire SEA.AI FAQ page. Ask me anything about it! 🧠", | |
], | |
], | |
show_label=False, | |
show_copy_button=True, | |
likeable=True, | |
) | |
def on_like(evt: gr.LikeData): | |
print(f"{evt.index=}, {evt.value=}, {evt.liked=}") | |
with gr.ChatInterface( | |
answer_question, | |
chatbot=chatbot, | |
title=title, | |
description=description, | |
additional_inputs=[gr.Textbox("", label="SYSTEM")], | |
examples=[ | |
["Can SEA.AI see at night?", "You are a helpful assistant."], | |
["Can SEA.AI see at night?", "Reply with sailor slang."], | |
], | |
submit_btn=None, | |
retry_btn=None, | |
undo_btn=None, | |
clear_btn=None, | |
css=css, | |
theme=theme, | |
) as demo: | |
# on page load, download the html and save it to a file | |
demo.load(lambda: download_html(url, filename)) | |
# This needs to be called prior to the first call to callback.flag() | |
hf_writer.setup([demo.textbox, demo.chatbot], "flagged") | |
print("flagging setup done!") | |
# like callback | |
chatbot.like(on_like, None, None) | |
if __name__ == "__main__": | |
demo.launch(cache_examples="lazy") | |