|
from time import perf_counter |
|
|
|
import gradio as gr |
|
|
|
from gradio_app.backend.query_llm import * |
|
|
|
|
|
logging.basicConfig(level=logging.INFO) |
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
def add_text(history, text): |
|
history = [] if history is None else history |
|
history = history + [(text, "")] |
|
return history, gr.Textbox(value="", interactive=False) |
|
|
|
|
|
def bot(history): |
|
history[-1][1] = "" |
|
query = history[-1][0] |
|
|
|
if not query: |
|
raise gr.Error("Empty string was submitted") |
|
|
|
llm = 'gpt-4-turbo-preview' |
|
messages = get_message_constructor(llm)('', history) |
|
|
|
llm_gen = get_llm_generator(llm) |
|
logger.info('Generating answer...') |
|
t = perf_counter() |
|
for part in llm_gen(messages): |
|
history[-1][1] += part |
|
yield history |
|
else: |
|
t = perf_counter() - t |
|
logger.info(f'Finished Generating answer in {round(t, 2)} seconds...') |
|
|
|
|
|
with gr.Blocks() as demo: |
|
with gr.Row(): |
|
with gr.Column(): |
|
chatbot = gr.Chatbot( |
|
[], |
|
elem_id="chatbot", |
|
avatar_images=('https://aui.atlassian.com/aui/8.8/docs/images/avatar-person.svg', |
|
'https://huggingface.co/datasets/huggingface/brand-assets/resolve/main/hf-logo.svg'), |
|
bubble_full_width=False, |
|
show_copy_button=True, |
|
show_share_button=True, |
|
height=800 |
|
) |
|
with gr.Column(): |
|
input_textbox = gr.Textbox( |
|
interactive=True, |
|
show_label=False, |
|
placeholder="Enter text and press enter", |
|
container=False, |
|
autofocus=True, |
|
lines=35, |
|
max_lines=100, |
|
) |
|
txt_btn = gr.Button(value="Send", scale=1) |
|
|
|
|
|
txt_msg = input_textbox.submit( |
|
add_text, [chatbot, input_textbox], [chatbot, input_textbox], queue=False).then( |
|
bot, [chatbot], [chatbot] |
|
) |
|
|
|
txt_msg.then(lambda: gr.Textbox(interactive=True), None, [input_textbox], queue=False) |
|
|
|
|
|
txt_msg = txt_btn.click( |
|
add_text, [chatbot, input_textbox], [chatbot, input_textbox], queue=False).then( |
|
bot, [chatbot], [chatbot] |
|
) |
|
|
|
txt_msg.then(lambda: gr.Textbox(interactive=True), None, [input_textbox], queue=False) |
|
|
|
demo.queue() |
|
demo.launch(debug=True) |
|
|