Spaces:
Runtime error
Runtime error
File size: 1,624 Bytes
e6b2623 57d4d70 e6b2623 23e1297 57d4d70 23e1297 3e3ed73 23e1297 d3ae5b3 1e79b3e 23e1297 d3ae5b3 23e1297 d3ae5b3 57d4d70 23e1297 e6b2623 57d4d70 e6b2623 57d4d70 e6b2623 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
import sys
import json
import autogen
from autogen import config_list_from_json
# Function to run the query
def run_query(programming_problem, api_key):
config_list = [
{
'model': 'gpt-3.5-turbo',
'api_key': api_key,
},
]
llm_config = {"config_list": config_list, "seed": 42, "request_timeout": 120}
autogen.ChatCompletion.start_logging()
# Create user proxy agent, coder, product manager
user_proxy = autogen.UserProxyAgent(
name="User_proxy",
system_message="A human admin who will give the idea and run the code provided by Coder.",
code_execution_config={"last_n_messages": 2, "work_dir": "groupchat"},
human_input_mode="NEVER",
)
coder = autogen.AssistantAgent(
name="Coder",
llm_config=llm_config,
)
pm = autogen.AssistantAgent(
name="product_manager",
system_message="You will help break down the initial idea into a well scoped requirement for the coder; Do not involve in future conversations or error fixing",
llm_config=llm_config,
)
# Create groupchat
groupchat = autogen.GroupChat(
agents=[user_proxy, coder, pm], messages=[])
manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config)
return user_proxy.initiate_chat(manager, message=programming_problem)
if __name__ == "__main__":
input_data = json.loads(sys.stdin.read())
programming_problem = input_data['programming_problem']
api_key = input_data['api_key']
result = run_query(programming_problem, api_key)
print(result) |