Commit
·
8b14269
1
Parent(s):
fc0b160
update dependencies, cache_examples=False in gr.ChatInterface
Browse files- app.py +7 -6
- flagging.py +8 -3
app.py
CHANGED
@@ -3,7 +3,8 @@ import gradio as gr
|
|
3 |
from huggingface_hub import get_token
|
4 |
|
5 |
from chatbot import get_retrieval_qa
|
6 |
-
from flagging import myHuggingFaceDatasetSaver
|
|
|
7 |
|
8 |
|
9 |
# get the html data and save it to a file
|
@@ -22,20 +23,18 @@ qa = get_retrieval_qa(filename)
|
|
22 |
|
23 |
# dataset callback
|
24 |
dataset_name = "SEA-AI/seadog-chat-history"
|
25 |
-
hf_writer =
|
26 |
|
27 |
|
28 |
def answer_question(message, history, system):
|
29 |
# concatenate the history, message and system
|
30 |
query = " ".join([message, system])
|
31 |
retrieval_qa = qa.invoke(query)
|
32 |
-
result = retrieval_qa["result"]
|
33 |
result = result.replace('"', "").strip() # clean up the result
|
34 |
-
# query = retrieval_qa["query"]
|
35 |
-
# source_documents = retrieval_qa["source_documents"]
|
36 |
|
37 |
# save the query and result to the dataset
|
38 |
-
hf_writer.flag([query, result])
|
39 |
return result
|
40 |
|
41 |
|
@@ -68,6 +67,7 @@ chatbot = gr.Chatbot(
|
|
68 |
"I have memorized the entire SEA.AI FAQ page. Ask me anything about it! 🧠",
|
69 |
],
|
70 |
],
|
|
|
71 |
show_label=False,
|
72 |
show_copy_button=True,
|
73 |
likeable=True,
|
@@ -88,6 +88,7 @@ with gr.ChatInterface(
|
|
88 |
["Can SEA.AI see at night?", "You are a helpful assistant."],
|
89 |
["Can SEA.AI see at night?", "Reply with sailor slang."],
|
90 |
],
|
|
|
91 |
submit_btn=None,
|
92 |
retry_btn=None,
|
93 |
undo_btn=None,
|
|
|
3 |
from huggingface_hub import get_token
|
4 |
|
5 |
from chatbot import get_retrieval_qa
|
6 |
+
from flagging import myHuggingFaceDatasetSaver as HuggingFaceDatasetSaver
|
7 |
+
#from gradio.flagging import HuggingFaceDatasetSaver
|
8 |
|
9 |
|
10 |
# get the html data and save it to a file
|
|
|
23 |
|
24 |
# dataset callback
|
25 |
dataset_name = "SEA-AI/seadog-chat-history"
|
26 |
+
hf_writer = HuggingFaceDatasetSaver(get_token(), dataset_name)
|
27 |
|
28 |
|
29 |
def answer_question(message, history, system):
|
30 |
# concatenate the history, message and system
|
31 |
query = " ".join([message, system])
|
32 |
retrieval_qa = qa.invoke(query)
|
33 |
+
result = retrieval_qa["result"] # "query" and "source_documents" are also available
|
34 |
result = result.replace('"', "").strip() # clean up the result
|
|
|
|
|
35 |
|
36 |
# save the query and result to the dataset
|
37 |
+
hf_writer.flag(flag_data=[query, result])
|
38 |
return result
|
39 |
|
40 |
|
|
|
67 |
"I have memorized the entire SEA.AI FAQ page. Ask me anything about it! 🧠",
|
68 |
],
|
69 |
],
|
70 |
+
label="SEA Dog",
|
71 |
show_label=False,
|
72 |
show_copy_button=True,
|
73 |
likeable=True,
|
|
|
88 |
["Can SEA.AI see at night?", "You are a helpful assistant."],
|
89 |
["Can SEA.AI see at night?", "Reply with sailor slang."],
|
90 |
],
|
91 |
+
cache_examples=False,
|
92 |
submit_btn=None,
|
93 |
retry_btn=None,
|
94 |
undo_btn=None,
|
flagging.py
CHANGED
@@ -3,6 +3,7 @@ from pathlib import Path
|
|
3 |
from typing import Any
|
4 |
import gradio as gr
|
5 |
from gradio.flagging import HuggingFaceDatasetSaver, client_utils
|
|
|
6 |
import huggingface_hub
|
7 |
|
8 |
|
@@ -40,13 +41,17 @@ class myHuggingFaceDatasetSaver(HuggingFaceDatasetSaver):
|
|
40 |
if isinstance(component, gr.Chatbot):
|
41 |
deserialized = sample # dirty fix
|
42 |
else:
|
43 |
-
deserialized =
|
|
|
|
|
44 |
|
45 |
# Add deserialized object to row
|
46 |
features[label] = {"dtype": "string", "_type": "Value"}
|
47 |
try:
|
48 |
-
|
49 |
-
|
|
|
|
|
50 |
except (AssertionError, TypeError, ValueError, OSError):
|
51 |
deserialized = "" if deserialized is None else str(deserialized)
|
52 |
row.append(deserialized)
|
|
|
3 |
from typing import Any
|
4 |
import gradio as gr
|
5 |
from gradio.flagging import HuggingFaceDatasetSaver, client_utils
|
6 |
+
from gradio import utils
|
7 |
import huggingface_hub
|
8 |
|
9 |
|
|
|
41 |
if isinstance(component, gr.Chatbot):
|
42 |
deserialized = sample # dirty fix
|
43 |
else:
|
44 |
+
deserialized = utils.simplify_file_data_in_str(
|
45 |
+
component.flag(sample, save_dir)
|
46 |
+
)
|
47 |
|
48 |
# Add deserialized object to row
|
49 |
features[label] = {"dtype": "string", "_type": "Value"}
|
50 |
try:
|
51 |
+
deserialized_path = Path(deserialized)
|
52 |
+
if not deserialized_path.exists():
|
53 |
+
raise FileNotFoundError(f"File {deserialized} not found")
|
54 |
+
row.append(str(deserialized_path.relative_to(self.dataset_dir)))
|
55 |
except (AssertionError, TypeError, ValueError, OSError):
|
56 |
deserialized = "" if deserialized is None else str(deserialized)
|
57 |
row.append(deserialized)
|