File size: 2,662 Bytes
b6f892b 273f5f9 b6f892b 273f5f9 b6f892b 273f5f9 b6f892b 273f5f9 b6f892b 273f5f9 b6f892b 273f5f9 b6f892b c259d19 b6f892b 273f5f9 c259d19 273f5f9 b6f892b 84f5caf c259d19 273f5f9 b6f892b c259d19 b6f892b c259d19 b6f892b 84f5caf 273f5f9 c259d19 273f5f9 b6f892b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 |
import urllib.request
import gradio as gr
from huggingface_hub import get_token
from chatbot import get_retrieval_qa
from flagging import myHuggingFaceDatasetSaver
# get the html data and save it to a file
def download_html(_url: str, _filename: str):
html = urllib.request.urlopen(_url).read()
with open(_filename, "wb") as f:
f.write(html)
url = "https://sea.ai/faq"
filename = "FAQ_SEA.AI.html"
download_html(url, filename)
# load the retrieval QA model
qa = get_retrieval_qa(filename)
# dataset callback
dataset_name = "SEA-AI/seadog-chat-history"
hf_writer = myHuggingFaceDatasetSaver(get_token(), dataset_name)
def answer_question(message, history, system):
# concatenate the history, message and system
query = " ".join([message, system])
retrieval_qa = qa.invoke(query)
result = retrieval_qa["result"]
result = result.replace('"', "").strip() # clean up the result
# query = retrieval_qa["query"]
# source_documents = retrieval_qa["source_documents"]
# save the query and result to the dataset
hf_writer.flag([query, result])
return result
title = "✨ SEA Dog"
description = """
<p align="center">
DISCLAIMERS
<br>
I can't remember conversations yet, be patient with me.
<br>
Your queries will be saved to
<a href='https://huggingface.co/datasets/SEA-AI/seadog-chat-history'>this dataset</a>
for analytics purposes.
</p>
"""
css = """
h1 {
text-align: center;
display: block;
}
"""
theme = gr.themes.Default(primary_hue=gr.themes.colors.indigo)
chatbot = gr.Chatbot(
value=[
[
None,
"I have memorized the entire SEA.AI FAQ page. Ask me anything about it! 🧠",
],
],
show_label=False,
show_copy_button=True,
likeable=True,
)
def on_like(evt: gr.LikeData):
print(f"{evt.index=}, {evt.value=}, {evt.liked=}")
with gr.ChatInterface(
answer_question,
chatbot=chatbot,
title=title,
description=description,
additional_inputs=[gr.Textbox("", label="SYSTEM")],
examples=[
["Can SEA.AI see at night?", "You are a helpful assistant."],
["Can SEA.AI see at night?", "Reply with sailor slang."],
],
submit_btn=None,
retry_btn=None,
undo_btn=None,
clear_btn=None,
css=css,
theme=theme,
) as demo:
# on page load, download the html and save it to a file
demo.load(lambda: download_html(url, filename))
# This needs to be called prior to the first call to callback.flag()
hf_writer.setup([demo.textbox, demo.chatbot], "flagged")
# like callback
chatbot.like(on_like, None, None)
if __name__ == "__main__":
demo.launch()
|