Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
MarziehFadaee
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -3,6 +3,8 @@ import cohere
|
|
3 |
import os
|
4 |
import re
|
5 |
import uuid
|
|
|
|
|
6 |
|
7 |
|
8 |
cohere_api_key = os.getenv("COHERE_API_KEY")
|
@@ -13,8 +15,11 @@ def trigger_example(example):
|
|
13 |
chat, updated_history = generate_response(example)
|
14 |
return chat, updated_history
|
15 |
|
16 |
-
def generate_response(user_message, cid, history=None):
|
17 |
-
|
|
|
|
|
|
|
18 |
if history is None:
|
19 |
history = []
|
20 |
if cid == "" or None:
|
@@ -78,7 +83,8 @@ custom_css = """
|
|
78 |
|
79 |
with gr.Blocks(analytics_enabled=False, css=custom_css) as demo:
|
80 |
cid = gr.State("")
|
81 |
-
|
|
|
82 |
with gr.Row():
|
83 |
with gr.Column(scale=1):
|
84 |
gr.Image("logoplus.png", elem_id="logo-img", show_label=False, show_share_button=False, show_download_button=False)
|
@@ -108,9 +114,9 @@ with gr.Blocks(analytics_enabled=False, css=custom_css) as demo:
|
|
108 |
|
109 |
history = gr.State([])
|
110 |
|
111 |
-
user_message.submit(fn=generate_response, inputs=[user_message, cid, history], outputs=[chatbot, history, cid], concurrency_limit=32)
|
112 |
-
|
113 |
-
|
114 |
clear_button.click(fn=clear_chat, inputs=None, outputs=[chatbot, history, cid], concurrency_limit=32)
|
115 |
|
116 |
user_message.submit(lambda x: gr.update(value=""), None, [user_message], queue=False)
|
@@ -127,6 +133,8 @@ with gr.Blocks(analytics_enabled=False, css=custom_css) as demo:
|
|
127 |
examples_per_page=100
|
128 |
)
|
129 |
|
|
|
|
|
130 |
if __name__ == "__main__":
|
131 |
# demo.launch(debug=True)
|
132 |
try:
|
|
|
3 |
import os
|
4 |
import re
|
5 |
import uuid
|
6 |
+
import secrets
|
7 |
+
|
8 |
|
9 |
|
10 |
cohere_api_key = os.getenv("COHERE_API_KEY")
|
|
|
15 |
chat, updated_history = generate_response(example)
|
16 |
return chat, updated_history
|
17 |
|
18 |
+
def generate_response(user_message, cid, token, history=None):
|
19 |
+
|
20 |
+
if not token:
|
21 |
+
raise gr.Error("Error loading.")
|
22 |
+
|
23 |
if history is None:
|
24 |
history = []
|
25 |
if cid == "" or None:
|
|
|
83 |
|
84 |
with gr.Blocks(analytics_enabled=False, css=custom_css) as demo:
|
85 |
cid = gr.State("")
|
86 |
+
token = gr.State(value=None)
|
87 |
+
|
88 |
with gr.Row():
|
89 |
with gr.Column(scale=1):
|
90 |
gr.Image("logoplus.png", elem_id="logo-img", show_label=False, show_share_button=False, show_download_button=False)
|
|
|
114 |
|
115 |
history = gr.State([])
|
116 |
|
117 |
+
user_message.submit(fn=generate_response, inputs=[user_message, cid, token, history], outputs=[chatbot, history, cid], concurrency_limit=32)
|
118 |
+
submit_button.click(fn=generate_response, inputs=[user_message, cid, token, history], outputs=[chatbot, history, cid], concurrency_limit=32)
|
119 |
+
|
120 |
clear_button.click(fn=clear_chat, inputs=None, outputs=[chatbot, history, cid], concurrency_limit=32)
|
121 |
|
122 |
user_message.submit(lambda x: gr.update(value=""), None, [user_message], queue=False)
|
|
|
133 |
examples_per_page=100
|
134 |
)
|
135 |
|
136 |
+
demo.load(lambda: secrets.token_hex(16), None, token)
|
137 |
+
|
138 |
if __name__ == "__main__":
|
139 |
# demo.launch(debug=True)
|
140 |
try:
|