Spaces:
Runtime error
Runtime error
| import sys | |
| import json | |
| import autogen | |
| from autogen import config_list_from_json | |
| # Function to run the query | |
| def run_query(programming_problem, api_key): | |
| config_list = [ | |
| { | |
| 'model': 'gpt-3.5-turbo', | |
| 'api_key': api_key, | |
| }, | |
| ] | |
| llm_config = {"config_list": config_list, "seed": 42, "request_timeout": 120} | |
| autogen.ChatCompletion.start_logging() | |
| # Create user proxy agent, coder, product manager | |
| user_proxy = autogen.UserProxyAgent( | |
| name="User_proxy", | |
| system_message="A human admin who will give the idea and run the code provided by Coder.", | |
| code_execution_config={"last_n_messages": 2, "work_dir": "groupchat"}, | |
| human_input_mode="NEVER", | |
| ) | |
| coder = autogen.AssistantAgent( | |
| name="Coder", | |
| llm_config=llm_config, | |
| ) | |
| pm = autogen.AssistantAgent( | |
| name="product_manager", | |
| system_message="You will help break down the initial idea into a well scoped requirement for the coder; Do not involve in future conversations or error fixing", | |
| llm_config=llm_config, | |
| ) | |
| # Create groupchat | |
| groupchat = autogen.GroupChat( | |
| agents=[user_proxy, coder, pm], messages=[]) | |
| manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config) | |
| return user_proxy.initiate_chat(manager, message=programming_problem) | |
| if __name__ == "__main__": | |
| input_data = json.loads(sys.stdin.read()) | |
| programming_problem = input_data['programming_problem'] | |
| api_key = input_data['api_key'] | |
| result = run_query(programming_problem, api_key) | |
| print(result) |