Update app.py
Browse files
app.py
CHANGED
@@ -68,11 +68,11 @@ with gr.Blocks() as demo:
|
|
68 |
with gr.Column():
|
69 |
gr.Markdown(f"""
|
70 |
## This demo is an unquantized GPU chatbot of [OpenOrcaxOpenChat-Preview2-13B](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B)
|
71 |
-
Brought to you by your friends at Alignment Lab AI,
|
72 |
""")
|
73 |
with gr.Tab("Chatbot"):
|
74 |
gr.Markdown("# π OpenOrca x OpenChat - Preview2 - 13B Playground Space! π")
|
75 |
-
chatbot = gr.Chatbot().style(height=
|
76 |
with gr.Row():
|
77 |
message = gr.Textbox(
|
78 |
label="What do you want to chat about?",
|
@@ -85,7 +85,7 @@ with gr.Blocks() as demo:
|
|
85 |
stop = gr.Button(value="Stop", variant="secondary").style(full_width=False)
|
86 |
with gr.Row():
|
87 |
with gr.Column():
|
88 |
-
max_tokens = gr.Slider(20, 1000, label="Max Tokens", step=20, value=
|
89 |
temperature = gr.Slider(0.2, 2.0, label="Temperature", step=0.1, value=0.8)
|
90 |
top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.95)
|
91 |
top_k = gr.Slider(0, 100, label="Top K", step=1, value=40)
|
@@ -105,4 +105,4 @@ with gr.Blocks() as demo:
|
|
105 |
)
|
106 |
stop.click(fn=None, inputs=None, outputs=None, cancels=[submit_click_event], queue=False)
|
107 |
|
108 |
-
demo.queue(max_size=
|
|
|
68 |
with gr.Column():
|
69 |
gr.Markdown(f"""
|
70 |
## This demo is an unquantized GPU chatbot of [OpenOrcaxOpenChat-Preview2-13B](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B)
|
71 |
+
Brought to you by your friends at Alignment Lab AI, OpenChat, and Open Access AI Collective!
|
72 |
""")
|
73 |
with gr.Tab("Chatbot"):
|
74 |
gr.Markdown("# π OpenOrca x OpenChat - Preview2 - 13B Playground Space! π")
|
75 |
+
chatbot = gr.Chatbot().style(height=500)
|
76 |
with gr.Row():
|
77 |
message = gr.Textbox(
|
78 |
label="What do you want to chat about?",
|
|
|
85 |
stop = gr.Button(value="Stop", variant="secondary").style(full_width=False)
|
86 |
with gr.Row():
|
87 |
with gr.Column():
|
88 |
+
max_tokens = gr.Slider(20, 1000, label="Max Tokens", step=20, value=500)
|
89 |
temperature = gr.Slider(0.2, 2.0, label="Temperature", step=0.1, value=0.8)
|
90 |
top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.95)
|
91 |
top_k = gr.Slider(0, 100, label="Top K", step=1, value=40)
|
|
|
105 |
)
|
106 |
stop.click(fn=None, inputs=None, outputs=None, cancels=[submit_click_event], queue=False)
|
107 |
|
108 |
+
demo.queue(max_size=128, concurrency_count=48).launch(debug=True, server_name="0.0.0.0", server_port=7860)
|