bstraehle's picture
Update multi_agent.py
8536ead verified
raw
history blame
2.96 kB
import autogen
def run_multi_agent(llm, task):
llm_config = {"model": llm}
user_proxy = autogen.ConversableAgent(
name="Admin",
system_message="Give the task, and send "
"instructions to writer to refine the blog post.",
code_execution_config=False,
llm_config=llm_config,
human_input_mode="NEVER",
)
planner = autogen.ConversableAgent(
name="Planner",
system_message="Given a task, please determine "
"what information is needed to complete the task. "
"Please note that the information will all be retrieved using"
" Python code. Please only suggest information that can be "
"retrieved using Python code. "
"After each step is done by others, check the progress and "
"instruct the remaining steps. If a step fails, try to "
"workaround",
description="Given a task, determine what "
"information is needed to complete the task. "
"After each step is done by others, check the progress and "
"instruct the remaining steps",
llm_config=llm_config,
)
engineer = autogen.AssistantAgent(
name="Engineer",
llm_config=llm_config,
description="Write code based on the plan "
"provided by the planner.",
)
writer = autogen.ConversableAgent(
name="Writer",
llm_config=llm_config,
system_message="Writer. "
"Please write blogs in markdown format (with relevant titles)"
" and put the content in pseudo ```md``` code block. "
"You take feedback from the admin and refine your blog.",
description="After all the info is available, "
"write blogs based on the code execution results and take "
"feedback from the admin to refine the blog. ",
)
executor = autogen.ConversableAgent(
name="Executor",
description="Execute the code written by the "
"engineer and report the result.",
human_input_mode="NEVER",
code_execution_config={
"last_n_messages": 3,
"work_dir": "coding",
"use_docker": False,
},
)
groupchat = autogen.GroupChat(
agents=[user_proxy, engineer, writer, executor, planner],
messages=[],
max_round=50,
allowed_or_disallowed_speaker_transitions={
user_proxy: [engineer, writer, executor, planner],
engineer: [user_proxy, executor],
writer: [user_proxy, planner],
executor: [user_proxy, engineer, planner],
planner: [user_proxy, engineer, writer],
},
speaker_transitions_type="allowed",
)
manager = autogen.GroupChatManager(
groupchat=groupchat, llm_config=llm_config
)
groupchat_result = user_proxy.initiate_chat(
manager,
message=task,
)
return groupchat_result