Spaces:
Running
Running
Update apps/agents/agents.py
Browse files- apps/agents/agents.py +18 -27
apps/agents/agents.py
CHANGED
@@ -24,15 +24,14 @@ from typing import Any, Dict, List, Optional, Tuple, Union
|
|
24 |
|
25 |
import gradio as gr
|
26 |
import openai
|
27 |
-
|
28 |
-
|
29 |
import tenacity
|
30 |
|
31 |
from apps.agents.text_utils import split_markdown_code
|
32 |
from camel.agents import TaskSpecifyAgent
|
33 |
from camel.messages import BaseMessage
|
34 |
from camel.societies import RolePlaying
|
35 |
-
from camel.
|
36 |
|
37 |
REPO_ROOT = os.path.realpath(
|
38 |
os.path.join(os.path.dirname(os.path.abspath(__file__)), "../.."))
|
@@ -197,8 +196,8 @@ def role_playing_start(
|
|
197 |
extend_task_specify_meta_dict=meta_dict,
|
198 |
output_language=language,
|
199 |
)
|
200 |
-
except (RateLimitError, tenacity.RetryError,
|
201 |
-
RuntimeError
|
202 |
print("OpenAI API exception 0 " + str(ex))
|
203 |
return (state, str(ex), "", [], gr.update())
|
204 |
|
@@ -247,9 +246,9 @@ def role_playing_chat_init(state) -> \
|
|
247 |
|
248 |
try:
|
249 |
init_assistant_msg: BaseMessage
|
250 |
-
init_assistant_msg = session.init_chat()
|
251 |
-
except (RateLimitError, tenacity.RetryError,
|
252 |
-
RuntimeError
|
253 |
print("OpenAI API exception 1 " + str(ex))
|
254 |
state.session = None
|
255 |
return state, state.chat, gr.update()
|
@@ -290,8 +289,8 @@ def role_playing_chat_cont(state) -> \
|
|
290 |
try:
|
291 |
assistant_response, user_response = session.step(
|
292 |
state.saved_assistant_msg)
|
293 |
-
except (RateLimitError, tenacity.RetryError,
|
294 |
-
RuntimeError
|
295 |
print("OpenAI API exception 2 " + str(ex))
|
296 |
state.session = None
|
297 |
return state, state.chat, gr.update(), gr.update()
|
@@ -400,16 +399,14 @@ def construct_ui(blocks, api_key: Optional[str] = None) -> None:
|
|
400 |
assistant_dd = gr.Dropdown(default_society['assistant_roles'],
|
401 |
label="Example assistant roles",
|
402 |
value=default_society['assistant_role'],
|
403 |
-
interactive=True
|
404 |
-
allow_custom_value=True)
|
405 |
assistant_ta = gr.TextArea(label="Assistant role (EDIT ME)",
|
406 |
lines=1, interactive=True)
|
407 |
with gr.Column(scale=2):
|
408 |
user_dd = gr.Dropdown(default_society['user_roles'],
|
409 |
label="Example user roles",
|
410 |
value=default_society['user_role'],
|
411 |
-
interactive=True
|
412 |
-
allow_custom_value=True)
|
413 |
user_ta = gr.TextArea(label="User role (EDIT ME)", lines=1,
|
414 |
interactive=True)
|
415 |
with gr.Column(scale=2):
|
@@ -419,7 +416,8 @@ def construct_ui(blocks, api_key: Optional[str] = None) -> None:
|
|
419 |
"Github repo: [https://github.com/lightaime/camel]"
|
420 |
"(https://github.com/lightaime/camel)"
|
421 |
'<div style="display:flex; justify-content:center;">'
|
422 |
-
'<img src="https://raw.githubusercontent.com/
|
|
|
423 |
'</div>')
|
424 |
with gr.Row():
|
425 |
with gr.Column(scale=9):
|
@@ -460,14 +458,7 @@ def construct_ui(blocks, api_key: Optional[str] = None) -> None:
|
|
460 |
" based on the original (simplistic) idea", lines=1, interactive=False)
|
461 |
task_prompt_ta = gr.TextArea(label="Planned task prompt", lines=1,
|
462 |
interactive=False, visible=False)
|
463 |
-
|
464 |
-
chatbot = gr.Chatbot(
|
465 |
-
[],
|
466 |
-
elem_id="chatbot",
|
467 |
-
height=750,
|
468 |
-
show_label=False,
|
469 |
-
type="messages" # Use new message format
|
470 |
-
)
|
471 |
empty_state = State.empty()
|
472 |
session_state: gr.State = gr.State(empty_state)
|
473 |
|
@@ -487,10 +478,10 @@ def construct_ui(blocks, api_key: Optional[str] = None) -> None:
|
|
487 |
chatbot, progress_sl],
|
488 |
queue=False) \
|
489 |
.then(role_playing_chat_init, session_state,
|
490 |
-
[session_state, chatbot, progress_sl], queue=False)
|
491 |
-
|
492 |
-
|
493 |
-
|
494 |
|
495 |
clear_bn.click(stop_session, session_state,
|
496 |
[session_state, progress_sl, start_bn])
|
|
|
24 |
|
25 |
import gradio as gr
|
26 |
import openai
|
27 |
+
import openai.error
|
|
|
28 |
import tenacity
|
29 |
|
30 |
from apps.agents.text_utils import split_markdown_code
|
31 |
from camel.agents import TaskSpecifyAgent
|
32 |
from camel.messages import BaseMessage
|
33 |
from camel.societies import RolePlaying
|
34 |
+
from camel.typing import TaskType
|
35 |
|
36 |
REPO_ROOT = os.path.realpath(
|
37 |
os.path.join(os.path.dirname(os.path.abspath(__file__)), "../.."))
|
|
|
196 |
extend_task_specify_meta_dict=meta_dict,
|
197 |
output_language=language,
|
198 |
)
|
199 |
+
except (openai.error.RateLimitError, tenacity.RetryError,
|
200 |
+
RuntimeError) as ex:
|
201 |
print("OpenAI API exception 0 " + str(ex))
|
202 |
return (state, str(ex), "", [], gr.update())
|
203 |
|
|
|
246 |
|
247 |
try:
|
248 |
init_assistant_msg: BaseMessage
|
249 |
+
init_assistant_msg, _ = session.init_chat()
|
250 |
+
except (openai.error.RateLimitError, tenacity.RetryError,
|
251 |
+
RuntimeError) as ex:
|
252 |
print("OpenAI API exception 1 " + str(ex))
|
253 |
state.session = None
|
254 |
return state, state.chat, gr.update()
|
|
|
289 |
try:
|
290 |
assistant_response, user_response = session.step(
|
291 |
state.saved_assistant_msg)
|
292 |
+
except (openai.error.RateLimitError, tenacity.RetryError,
|
293 |
+
RuntimeError) as ex:
|
294 |
print("OpenAI API exception 2 " + str(ex))
|
295 |
state.session = None
|
296 |
return state, state.chat, gr.update(), gr.update()
|
|
|
399 |
assistant_dd = gr.Dropdown(default_society['assistant_roles'],
|
400 |
label="Example assistant roles",
|
401 |
value=default_society['assistant_role'],
|
402 |
+
interactive=True)
|
|
|
403 |
assistant_ta = gr.TextArea(label="Assistant role (EDIT ME)",
|
404 |
lines=1, interactive=True)
|
405 |
with gr.Column(scale=2):
|
406 |
user_dd = gr.Dropdown(default_society['user_roles'],
|
407 |
label="Example user roles",
|
408 |
value=default_society['user_role'],
|
409 |
+
interactive=True)
|
|
|
410 |
user_ta = gr.TextArea(label="User role (EDIT ME)", lines=1,
|
411 |
interactive=True)
|
412 |
with gr.Column(scale=2):
|
|
|
416 |
"Github repo: [https://github.com/lightaime/camel]"
|
417 |
"(https://github.com/lightaime/camel)"
|
418 |
'<div style="display:flex; justify-content:center;">'
|
419 |
+
'<img src="https://raw.githubusercontent.com/lightaime/camel/'
|
420 |
+
'master/misc/logo.png" alt="Logo" style="max-width:50%;">'
|
421 |
'</div>')
|
422 |
with gr.Row():
|
423 |
with gr.Column(scale=9):
|
|
|
458 |
" based on the original (simplistic) idea", lines=1, interactive=False)
|
459 |
task_prompt_ta = gr.TextArea(label="Planned task prompt", lines=1,
|
460 |
interactive=False, visible=False)
|
461 |
+
chatbot = gr.Chatbot(label="Chat between autonomous agents")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
462 |
empty_state = State.empty()
|
463 |
session_state: gr.State = gr.State(empty_state)
|
464 |
|
|
|
478 |
chatbot, progress_sl],
|
479 |
queue=False) \
|
480 |
.then(role_playing_chat_init, session_state,
|
481 |
+
[session_state, chatbot, progress_sl], queue=False)
|
482 |
+
|
483 |
+
blocks.load(role_playing_chat_cont, session_state,
|
484 |
+
[session_state, chatbot, progress_sl, start_bn], every=0.5)
|
485 |
|
486 |
clear_bn.click(stop_session, session_state,
|
487 |
[session_state, progress_sl, start_bn])
|