Spaces:
Runtime error
Runtime error
import os | |
import gradio as gr | |
from bagoodex_client import BagoodexClient | |
from r_types import ChatMessage | |
from prompts import ( | |
SYSTEM_PROMPT_FOLLOWUP, | |
SYSTEM_PROMPT_MAP, | |
SYSTEM_PROMPT_BASE, | |
SYSTEM_PROMPT_KNOWLEDGE_BASE | |
) | |
from helpers import ( | |
embed_video, | |
format_links, | |
embed_google_map, | |
format_knowledge, | |
format_followup_questions | |
) | |
client = BagoodexClient() | |
# ---------------------------- | |
# Chat & Follow-up Functions | |
# ---------------------------- | |
def chat_function(message, history, followup_state, chat_history_state): | |
""" | |
Process a new user message. | |
Appends the message and response to the conversation, | |
and retrieves follow-up questions. | |
""" | |
# complete_chat returns a new followup id and answer | |
followup_id_new, answer = client.complete_chat(message) | |
# Update conversation history (if history is None, use an empty list) | |
if history is None: | |
history = [] | |
updated_history = history + [ChatMessage({"role": "user", "content": message}), | |
ChatMessage({"role": "assistant", "content": answer})] | |
# Retrieve follow-up questions using the updated conversation | |
followup_questions_raw = client.base_qna( | |
messages=updated_history, system_prompt=SYSTEM_PROMPT_FOLLOWUP | |
) | |
# Format them using the helper | |
followup_md = format_followup_questions(followup_questions_raw) | |
return answer, followup_id_new, updated_history, followup_md | |
def handle_followup_click(question, followup_state, chat_history_state): | |
""" | |
When a follow-up question is clicked, send it as a new message. | |
""" | |
if not question: | |
return chat_history_state, followup_state, "" | |
# Process the follow-up question via complete_chat | |
followup_id_new, answer = client.complete_chat(question) | |
updated_history = chat_history_state + [ChatMessage({"role": "user", "content": question}), | |
ChatMessage({"role": "assistant", "content": answer})] | |
# Get new follow-up questions | |
followup_questions_raw = client.base_qna( | |
messages=updated_history, system_prompt=SYSTEM_PROMPT_FOLLOWUP | |
) | |
followup_md = format_followup_questions(followup_questions_raw) | |
return updated_history, followup_id_new, followup_md | |
def handle_local_map_click(followup_state, chat_history_state): | |
""" | |
On local map click, try to get a local map. | |
If issues occur, fall back to using the SYSTEM_PROMPT_MAP. | |
""" | |
if not followup_state: | |
return chat_history_state | |
try: | |
result = client.get_local_map(followup_state) | |
if result: | |
map_url = result.get('link', '') | |
# Use helper to produce an embedded map iframe | |
html = embed_google_map(map_url) | |
# Fall back: use the base_qna call with SYSTEM_PROMPT_MAP | |
result = client.base_qna( | |
messages=chat_history_state, system_prompt=SYSTEM_PROMPT_MAP | |
) | |
# Assume result contains a 'link' field | |
html = embed_google_map(result.get('link', '')) | |
new_message = ChatMessage({"role": "assistant", "content": html}) | |
return chat_history_state + [new_message] | |
except Exception: | |
return chat_history_state | |
def handle_knowledge_click(followup_state, chat_history_state): | |
""" | |
On knowledge base click, fetch and format knowledge content. | |
""" | |
if not followup_state: | |
return chat_history_state | |
try: | |
print('trying to get knowledge') | |
result = client.get_knowledge(followup_state) | |
knowledge_md = format_knowledge(result) | |
if knowledge_md == 0000: | |
print('falling back to base_qna') | |
# Fall back: use the base_qna call with SYSTEM_PROMPT_KNOWLEDGE_BASE | |
result = client.base_qna( | |
messages=chat_history_state, system_prompt=SYSTEM_PROMPT_KNOWLEDGE_BASE | |
) | |
knowledge_md = format_knowledge(result) | |
new_message = ChatMessage({"role": "assistant", "content": knowledge_md}) | |
return chat_history_state + [new_message] | |
except Exception: | |
return chat_history_state | |
# ---------------------------- | |
# Advanced Search Functions | |
# ---------------------------- | |
def perform_image_search(followup_state): | |
if not followup_state: | |
return [] | |
result = client.get_images(followup_state) | |
# For images we simply return a list of original URLs | |
return [item.get("original", "") for item in result] | |
def perform_video_search(followup_state): | |
if not followup_state: | |
return "<p>No followup ID available.</p>" | |
result = client.get_videos(followup_state) | |
# Use the helper to produce the embed iframes (supports multiple videos) | |
return embed_video(result) | |
def perform_links_search(followup_state): | |
if not followup_state: | |
return gr.Markdown("No followup ID available.") | |
result = client.get_links(followup_state) | |
return format_links(result) | |
# ---------------------------- | |
# UI Build | |
# ---------------------------- | |
css = """ | |
#chatbot { | |
height: 100%; | |
} | |
h1, h2, h3, h4, h5, h6 { | |
text-align: center; | |
display: block; | |
} | |
""" | |
# defautl query: how to make slingshot? | |
# who created light (e.g., electricity) Tesla or Edison in quick short? | |
with gr.Blocks() as demo: | |
gr.HTML(""" | |
<div style="text-align: center; font-size: 32px; font-weight: bold; margin-bottom: 20px;"> | |
Llike perplexity, but more precise. | |
</div> | |
<div style="text-align: center; display: flex; justify-content: center; align-items: center; margin-top: 1em; margin-bottom: .5em;"> | |
<span>Built by</span> | |
<a href="https://yaps.gg" target="_blank" style="display: flex; align-items: center; margin-right: .75em;"> | |
<img src="https://huggingface.co/spaces/AimlAPI/Bagoodex-Web-Search/resolve/main/assets/new-profile-photo%202.jpeg" width="40" style="margin-left: .75em; margin-right: .75em; background-color: white; border-radius: 10px; padding: 1px; margin-right: 1px;"/> | |
<span style="margin-left: .5em;">@abdibrokhim</span> | |
</a> | |
</div> | |
<div style="text-align: center;display: flex;justify-content: center;align-items: center;margin-top: 1em;margin-bottom: .5em;"> | |
<a href="https://aimlapi.com/models/bagoodex-search-v1-api/?utm_source=bagoodex&utm_medium=huggingface&utm_campaign=tutorials"> | |
<img src="https://huggingface.co/spaces/AimlAPI/Bagoodex-Web-Search/resolve/main/assets/powered%20by%20aimlapi.svg" width="160" style="margin-left: .75em; margin-right: .75em; background-color: white; border-radius: 10px; padding: 5px;"/> | |
</a> | |
</div> | |
""") | |
# State variables to hold followup ID and conversation history, plus follow-up questions text | |
followup_state = gr.State(None) | |
chat_history_state = gr.State([]) # holds conversation history as a list of messages | |
followup_md_state = gr.State("") # holds follow-up questions as Markdown text | |
with gr.Row(): | |
with gr.Column(scale=3): | |
with gr.Row(): | |
btn_local_map = gr.Button("Local Map Search (coming soon...)", variant="secondary", size="sm", interactive=False) | |
btn_knowledge = gr.Button("Knowledge Base (coming soon...)", variant="secondary", size="sm", interactive=False) | |
# The ChatInterface now uses additional outputs for both followup_state and conversation history, | |
# plus follow-up questions Markdown. | |
chat = gr.ChatInterface( | |
fn=chat_function, | |
type="messages", | |
additional_inputs=[followup_state, chat_history_state], | |
additional_outputs=[followup_state, chat_history_state, followup_md_state], | |
) | |
# Button callbacks to append local map and knowledge base results to chat | |
btn_local_map.click( | |
fn=handle_local_map_click, | |
inputs=[followup_state, chat_history_state], | |
outputs=chat.chatbot | |
) | |
btn_knowledge.click( | |
fn=handle_knowledge_click, | |
inputs=[followup_state, chat_history_state], | |
outputs=chat.chatbot | |
) | |
# Radio-based follow-up questions | |
followup_radio = gr.Radio( | |
choices=[], | |
label="Follow-up Questions (select one and click 'Send Follow-up')" | |
) | |
btn_send_followup = gr.Button("Send Follow-up") | |
# When the user clicks "Send Follow-up", the selected question is passed | |
# to handle_followup_click | |
btn_send_followup.click( | |
fn=handle_followup_click, | |
inputs=[followup_radio, followup_state, chat_history_state], | |
outputs=[chat.chatbot, followup_state, followup_md_state] | |
) | |
# Update the radio choices when followup_md_state changes | |
def update_followup_radio(md_text): | |
""" | |
Parse Markdown lines to extract questions starting with '- '. | |
""" | |
lines = md_text.splitlines() | |
questions = [] | |
for line in lines: | |
if line.startswith("- "): | |
questions.append(line[2:]) | |
return gr.update(choices=questions, value=None) | |
followup_md_state.change( | |
fn=update_followup_radio, | |
inputs=[followup_md_state], | |
outputs=[followup_radio] | |
) | |
with gr.Column(scale=1): | |
gr.Markdown("### Advanced Search Options") | |
with gr.Column(variant="panel"): | |
btn_images = gr.Button("Search Images") | |
btn_videos = gr.Button("Search Videos") | |
btn_links = gr.Button("Search Links") | |
gallery_output = gr.Gallery(label="Image Results", columns=2) | |
video_output = gr.HTML(label="Video Results") # HTML for embedded video iframes | |
links_output = gr.Markdown(label="Links Results") | |
btn_images.click( | |
fn=perform_image_search, | |
inputs=[followup_state], | |
outputs=[gallery_output] | |
) | |
btn_videos.click( | |
fn=perform_video_search, | |
inputs=[followup_state], | |
outputs=[video_output] | |
) | |
btn_links.click( | |
fn=perform_links_search, | |
inputs=[followup_state], | |
outputs=[links_output] | |
) | |
demo.launch() | |