eaglelandsonce commited on
Commit
d3ae5b3
·
1 Parent(s): 80b30f1

Update query_solver.py

Browse files
Files changed (1) hide show
  1. query_solver.py +21 -14
query_solver.py CHANGED
@@ -14,23 +14,30 @@ def run_query(math_problem, api_key):
14
 
15
  autogen.ChatCompletion.start_logging()
16
 
17
- assistant = autogen.AssistantAgent(
18
- name="assistant",
19
- system_message="You are a helpful assistant.",
20
- llm_config={
21
- "request_timeout": 600,
22
- "seed": 42,
23
- "config_list": config_list,
24
- }
25
  )
26
-
27
- mathproxyagent = MathUserProxyAgent(
28
- name="mathproxyagent",
29
- human_input_mode="NEVER",
30
- code_execution_config={"use_docker": False},
 
 
 
31
  )
32
 
33
- return mathproxyagent.initiate_chat(assistant, problem=math_problem)
 
 
 
 
 
 
34
 
35
 
36
  if __name__ == "__main__":
 
14
 
15
  autogen.ChatCompletion.start_logging()
16
 
17
+ # Create user proxy agent, coder, product manager
18
+ user_proxy = autogen.UserProxyAgent(
19
+ name="User_proxy",
20
+ system_message="A human admin who will give the idea and run the code provided by Coder.",
21
+ code_execution_config={"last_n_messages": 2, "work_dir": "groupchat"},
22
+ human_input_mode="ALWAYS",
 
 
23
  )
24
+ coder = autogen.AssistantAgent(
25
+ name="Coder",
26
+ llm_config=llm_config,
27
+ )
28
+ pm = autogen.AssistantAgent(
29
+ name="product_manager",
30
+ system_message="You will help break down the initial idea into a well scoped requirement for the coder; Do not involve in future conversations or error fixing",
31
+ llm_config=llm_config,
32
  )
33
 
34
+ # Create groupchat
35
+ groupchat = autogen.GroupChat(
36
+ agents=[user_proxy, coder, pm], messages=[])
37
+ manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config)
38
+
39
+
40
+ return user_proxy.initiate_chat(manager, message=math_problem)
41
 
42
 
43
  if __name__ == "__main__":