Spaces:
Running
Running
ray
commited on
Commit
·
61ec090
1
Parent(s):
89acc00
fix layout issue on hf space
Browse files
app.py
CHANGED
@@ -192,14 +192,7 @@ with gr.Blocks() as demo:
|
|
192 |
|
193 |
gr.Markdown("# Awesum Care demo")
|
194 |
|
195 |
-
|
196 |
-
"\nUsing model gpt-4-preview-1106, the most advanced model now in the market.\n"
|
197 |
-
"\n(Note that it can be much slower than gpt-3.5, openai's api can be unstable sometimes.)\n"
|
198 |
-
"\nThree Tabs:\n"
|
199 |
-
"1. Relevant context: retreiving relevant documents and send to ChatGPT.\n"
|
200 |
-
"2. Give tools to chatgpt to retrieve context: the most advanced, slowest (>30s to use the tools, before answering).\n"
|
201 |
-
"3. Vanilla ChatGPT: self-explanatory.\n"
|
202 |
-
)
|
203 |
|
204 |
# with gr.Row():
|
205 |
# model_selector = gr.Radio(
|
@@ -228,6 +221,14 @@ with gr.Blocks() as demo:
|
|
228 |
awesum_chatbot_simple.stream_chat,
|
229 |
examples=awesum_chatbot.CHAT_EXAMPLES)
|
230 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
231 |
# @model_selector.change(inputs=[model_selector, chatbot], outputs=[context_interface, function_call_interface, vanilla_interface])
|
232 |
# def switch_model(model_name, my_chatbot):
|
233 |
# print(model_name)
|
|
|
192 |
|
193 |
gr.Markdown("# Awesum Care demo")
|
194 |
|
195 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
196 |
|
197 |
# with gr.Row():
|
198 |
# model_selector = gr.Radio(
|
|
|
221 |
awesum_chatbot_simple.stream_chat,
|
222 |
examples=awesum_chatbot.CHAT_EXAMPLES)
|
223 |
|
224 |
+
gr.Markdown("instructions:\n"
|
225 |
+
"\nUsing model gpt-4-preview-1106, the most advanced model now in the market.\n"
|
226 |
+
"\n(Note that it can be much slower than gpt-3.5, openai's api can be unstable sometimes.)\n"
|
227 |
+
"\nThree Tabs:\n"
|
228 |
+
"1. Relevant context: retreiving relevant documents and send to ChatGPT.\n"
|
229 |
+
"2. Give tools to chatgpt to retrieve context: the most advanced, slowest (>30s to use the tools, before answering).\n"
|
230 |
+
"3. Vanilla ChatGPT: self-explanatory.\n"
|
231 |
+
)
|
232 |
# @model_selector.change(inputs=[model_selector, chatbot], outputs=[context_interface, function_call_interface, vanilla_interface])
|
233 |
# def switch_model(model_name, my_chatbot):
|
234 |
# print(model_name)
|