Update app.py
Browse files
app.py
CHANGED
@@ -110,9 +110,9 @@ def get_answer(query, history, temperature, top_p, max_token_count):
|
|
110 |
Additionally, only if there is sufficient remaining token capacity, provide 2 or 3 related questions that the user might want to ask next based on the topic.
|
111 |
If possible, use the following structure for these suggested questions:
|
112 |
Here are some related questions you might be interested in:
|
113 |
-
<button style="background-color: #4CAF50; color: white; padding: 10px; margin: 5px; border: none; border-radius: 5px; cursor: pointer; font-size: 16px;">First related question</button>
|
114 |
-
<button style="background-color: #4CAF50; color: white; padding: 10px; margin: 5px; border: none; border-radius: 5px; cursor: pointer; font-size: 16px;">Second related question</button>
|
115 |
-
<button style="background-color: #4CAF50; color: white; padding: 10px; margin: 5px; border: none; border-radius: 5px; cursor: pointer; font-size: 16px;">Third related question</button>
|
116 |
|
117 |
Ensure these suggested questions are brief, relevant, and encourage further exploration on the topic.
|
118 |
Assistant:
|
@@ -186,7 +186,7 @@ def main():
|
|
186 |
with gr.Blocks() as demo:
|
187 |
chatbot = gr.Chatbot([[None, initial_message]], height=550)
|
188 |
|
189 |
-
msg = gr.Textbox(label="Question")
|
190 |
|
191 |
with gr.Accordion(label="Advanced options", open=False):
|
192 |
temperature = gr.Slider(label="Temperature", minimum=0.1, maximum=1, value=0.6, step=0.1)
|
@@ -194,7 +194,7 @@ def main():
|
|
194 |
max_token_count = gr.Slider(label="Max token count", minimum=1, maximum=1024, value=400, step=10)
|
195 |
prompt_and_time = gr.Textbox(label="Prompt and Time", interactive=False)
|
196 |
|
197 |
-
btn = gr.Button("Submit")
|
198 |
clear = gr.Button("Clear console")
|
199 |
|
200 |
btn.click(respond, inputs=[msg, chatbot, temperature, top_p, max_token_count], outputs=[msg, chatbot, prompt_and_time])
|
|
|
110 |
Additionally, only if there is sufficient remaining token capacity, provide 2 or 3 related questions that the user might want to ask next based on the topic.
|
111 |
If possible, use the following structure for these suggested questions:
|
112 |
Here are some related questions you might be interested in:
|
113 |
+
<button onclick="document.getElementById('input_txt').value = this.innerHTML; document.getElementById('btn_submit').click();" style="background-color: #4CAF50; color: white; padding: 10px; margin: 5px; border: none; border-radius: 5px; cursor: pointer; font-size: 16px;">First related question</button>
|
114 |
+
<button onclick="document.getElementById('input_txt').value = this.innerHTML; document.getElementById('btn_submit').click();" style="background-color: #4CAF50; color: white; padding: 10px; margin: 5px; border: none; border-radius: 5px; cursor: pointer; font-size: 16px;">Second related question</button>
|
115 |
+
<button onclick="document.getElementById('input_txt').value = this.innerHTML; document.getElementById('btn_submit').click();" style="background-color: #4CAF50; color: white; padding: 10px; margin: 5px; border: none; border-radius: 5px; cursor: pointer; font-size: 16px;">Third related question</button>
|
116 |
|
117 |
Ensure these suggested questions are brief, relevant, and encourage further exploration on the topic.
|
118 |
Assistant:
|
|
|
186 |
with gr.Blocks() as demo:
|
187 |
chatbot = gr.Chatbot([[None, initial_message]], height=550)
|
188 |
|
189 |
+
msg = gr.Textbox(label="Question", elem_id="input_txt")
|
190 |
|
191 |
with gr.Accordion(label="Advanced options", open=False):
|
192 |
temperature = gr.Slider(label="Temperature", minimum=0.1, maximum=1, value=0.6, step=0.1)
|
|
|
194 |
max_token_count = gr.Slider(label="Max token count", minimum=1, maximum=1024, value=400, step=10)
|
195 |
prompt_and_time = gr.Textbox(label="Prompt and Time", interactive=False)
|
196 |
|
197 |
+
btn = gr.Button("Submit", elem_id="btn_submit")
|
198 |
clear = gr.Button("Clear console")
|
199 |
|
200 |
btn.click(respond, inputs=[msg, chatbot, temperature, top_p, max_token_count], outputs=[msg, chatbot, prompt_and_time])
|