Spaces:
Sleeping
Sleeping
Fix groupchat none
Browse files
app.py
CHANGED
@@ -34,25 +34,42 @@ def get_description_text():
|
|
34 |
"""
|
35 |
|
36 |
|
37 |
-
template.main.append(
|
|
|
|
|
38 |
|
39 |
txt_model = TextInput(
|
40 |
-
name="Model Name",
|
|
|
|
|
|
|
41 |
)
|
42 |
pwd_openai_key = PasswordInput(
|
43 |
-
name="OpenAI API Key",
|
|
|
|
|
44 |
)
|
45 |
pwd_openai_url = PasswordInput(
|
46 |
-
name="OpenAI Base Url",
|
|
|
|
|
47 |
)
|
48 |
pwd_aoai_key = PasswordInput(
|
49 |
-
name="Azure OpenAI API Key",
|
|
|
|
|
50 |
)
|
51 |
pwd_aoai_url = PasswordInput(
|
52 |
-
name="Azure OpenAI Base Url",
|
|
|
|
|
53 |
)
|
54 |
file_cfg = pn.widgets.FileInput(filename="OAI_CONFIG_LIST", sizing_mode="stretch_width")
|
55 |
-
template.main.append(
|
|
|
|
|
|
|
|
|
56 |
|
57 |
|
58 |
def get_config(tmpfilename="OAI_CONFIG_LIST"):
|
@@ -106,8 +123,12 @@ def get_config(tmpfilename="OAI_CONFIG_LIST"):
|
|
106 |
|
107 |
btn_add = Button(name="+", button_type="success")
|
108 |
btn_remove = Button(name="-", button_type="danger")
|
109 |
-
switch_code = Switch(
|
110 |
-
|
|
|
|
|
|
|
|
|
111 |
template.main.append(
|
112 |
pn.Row(
|
113 |
pn.pane.Markdown("## Add or Remove Agents: "),
|
@@ -176,7 +197,8 @@ class myGroupChatManager(autogen.GroupChatManager):
|
|
176 |
content = autogen.OpenAIWrapper.instantiate(
|
177 |
content,
|
178 |
message["context"],
|
179 |
-
self.llm_config
|
|
|
180 |
)
|
181 |
if "function_call" in message:
|
182 |
function_call = dict(message["function_call"])
|
@@ -223,9 +245,16 @@ def init_groupchat(event, collection_name):
|
|
223 |
else False
|
224 |
)
|
225 |
agent = initialize_agents(
|
226 |
-
llm_config,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
227 |
)
|
228 |
-
agent.register_reply([autogen.Agent, None], reply_func=send_messages, config={"callback": None})
|
229 |
agents.append(agent)
|
230 |
if len(agents) >= 3:
|
231 |
groupchat = autogen.GroupChat(
|
@@ -238,11 +267,18 @@ def init_groupchat(event, collection_name):
|
|
238 |
manager = myGroupChatManager(groupchat=groupchat, llm_config=llm_config)
|
239 |
else:
|
240 |
manager = None
|
|
|
241 |
return agents, manager, groupchat
|
242 |
|
243 |
|
244 |
async def agents_chat(init_sender, manager, contents, agents):
|
245 |
-
recipient =
|
|
|
|
|
|
|
|
|
|
|
|
|
246 |
if isinstance(init_sender, (RetrieveUserProxyAgent, MathUserProxyAgent)):
|
247 |
await init_sender.a_initiate_chat(recipient, problem=contents)
|
248 |
else:
|
@@ -257,7 +293,10 @@ async def reply_chat(contents, user, instance):
|
|
257 |
instance.collection_name = collection_name
|
258 |
|
259 |
column_agents_list = [[a.value for a in agent[0]] for agent in column_agents]
|
260 |
-
if
|
|
|
|
|
|
|
261 |
agents, manager, groupchat = init_groupchat(None, collection_name)
|
262 |
instance.manager = manager
|
263 |
instance.agents = agents
|
@@ -328,10 +367,18 @@ btn_msg2.on_click(load_message)
|
|
328 |
btn_msg3.on_click(load_message)
|
329 |
|
330 |
|
331 |
-
btn_example1 = Button(
|
332 |
-
|
333 |
-
|
334 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
335 |
template.main.append(
|
336 |
pn.Row(
|
337 |
pn.pane.Markdown("## Agent Examples: ", sizing_mode="stretch_width"),
|
@@ -499,7 +546,9 @@ btn_example2.on_click(load_example)
|
|
499 |
btn_example3.on_click(load_example)
|
500 |
btn_example4.on_click(load_example)
|
501 |
|
502 |
-
code_editor = CodeEditor(
|
|
|
|
|
503 |
template.main.append(code_editor)
|
504 |
|
505 |
template.servable(title=TITLE)
|
|
|
34 |
"""
|
35 |
|
36 |
|
37 |
+
template.main.append(
|
38 |
+
pn.pane.Markdown(get_description_text(), sizing_mode="stretch_width")
|
39 |
+
)
|
40 |
|
41 |
txt_model = TextInput(
|
42 |
+
name="Model Name",
|
43 |
+
placeholder="Enter your model name here...",
|
44 |
+
value="gpt-35-turbo",
|
45 |
+
sizing_mode="stretch_width",
|
46 |
)
|
47 |
pwd_openai_key = PasswordInput(
|
48 |
+
name="OpenAI API Key",
|
49 |
+
placeholder="Enter your OpenAI API Key here...",
|
50 |
+
sizing_mode="stretch_width",
|
51 |
)
|
52 |
pwd_openai_url = PasswordInput(
|
53 |
+
name="OpenAI Base Url",
|
54 |
+
placeholder="Enter your OpenAI Base Url here...",
|
55 |
+
sizing_mode="stretch_width",
|
56 |
)
|
57 |
pwd_aoai_key = PasswordInput(
|
58 |
+
name="Azure OpenAI API Key",
|
59 |
+
placeholder="Enter your Azure OpenAI API Key here...",
|
60 |
+
sizing_mode="stretch_width",
|
61 |
)
|
62 |
pwd_aoai_url = PasswordInput(
|
63 |
+
name="Azure OpenAI Base Url",
|
64 |
+
placeholder="Enter your Azure OpenAI Base Url here...",
|
65 |
+
sizing_mode="stretch_width",
|
66 |
)
|
67 |
file_cfg = pn.widgets.FileInput(filename="OAI_CONFIG_LIST", sizing_mode="stretch_width")
|
68 |
+
template.main.append(
|
69 |
+
pn.Row(
|
70 |
+
txt_model, pwd_openai_key, pwd_openai_url, pwd_aoai_key, pwd_aoai_url, file_cfg
|
71 |
+
)
|
72 |
+
)
|
73 |
|
74 |
|
75 |
def get_config(tmpfilename="OAI_CONFIG_LIST"):
|
|
|
123 |
|
124 |
btn_add = Button(name="+", button_type="success")
|
125 |
btn_remove = Button(name="-", button_type="danger")
|
126 |
+
switch_code = Switch(
|
127 |
+
name="Run Code", sizing_mode="fixed", width=50, height=30, align="end"
|
128 |
+
)
|
129 |
+
select_speaker_method = pn.widgets.Select(
|
130 |
+
name="", options=["round_robin", "auto", "random"], value="round_robin"
|
131 |
+
)
|
132 |
template.main.append(
|
133 |
pn.Row(
|
134 |
pn.pane.Markdown("## Add or Remove Agents: "),
|
|
|
197 |
content = autogen.OpenAIWrapper.instantiate(
|
198 |
content,
|
199 |
message["context"],
|
200 |
+
self.llm_config
|
201 |
+
and self.llm_config.get("allow_format_str_template", False),
|
202 |
)
|
203 |
if "function_call" in message:
|
204 |
function_call = dict(message["function_call"])
|
|
|
245 |
else False
|
246 |
)
|
247 |
agent = initialize_agents(
|
248 |
+
llm_config,
|
249 |
+
agent_name,
|
250 |
+
system_msg,
|
251 |
+
agent_type,
|
252 |
+
retrieve_config,
|
253 |
+
code_execution_config,
|
254 |
+
)
|
255 |
+
agent.register_reply(
|
256 |
+
[autogen.Agent, None], reply_func=send_messages, config={"callback": None}
|
257 |
)
|
|
|
258 |
agents.append(agent)
|
259 |
if len(agents) >= 3:
|
260 |
groupchat = autogen.GroupChat(
|
|
|
267 |
manager = myGroupChatManager(groupchat=groupchat, llm_config=llm_config)
|
268 |
else:
|
269 |
manager = None
|
270 |
+
groupchat = None
|
271 |
return agents, manager, groupchat
|
272 |
|
273 |
|
274 |
async def agents_chat(init_sender, manager, contents, agents):
|
275 |
+
recipient = (
|
276 |
+
manager
|
277 |
+
if len(agents) > 2
|
278 |
+
else agents[1]
|
279 |
+
if agents[1] != init_sender
|
280 |
+
else agents[0]
|
281 |
+
)
|
282 |
if isinstance(init_sender, (RetrieveUserProxyAgent, MathUserProxyAgent)):
|
283 |
await init_sender.a_initiate_chat(recipient, problem=contents)
|
284 |
else:
|
|
|
293 |
instance.collection_name = collection_name
|
294 |
|
295 |
column_agents_list = [[a.value for a in agent[0]] for agent in column_agents]
|
296 |
+
if (
|
297 |
+
not hasattr(instance, "agent_list")
|
298 |
+
or instance.agents_list != column_agents_list
|
299 |
+
):
|
300 |
agents, manager, groupchat = init_groupchat(None, collection_name)
|
301 |
instance.manager = manager
|
302 |
instance.agents = agents
|
|
|
367 |
btn_msg3.on_click(load_message)
|
368 |
|
369 |
|
370 |
+
btn_example1 = Button(
|
371 |
+
name="General 2 agents", button_type="primary", sizing_mode="stretch_width"
|
372 |
+
)
|
373 |
+
btn_example2 = Button(
|
374 |
+
name="RAG 2 agents", button_type="primary", sizing_mode="stretch_width"
|
375 |
+
)
|
376 |
+
btn_example3 = Button(
|
377 |
+
name="Software Dev 3 agents", button_type="primary", sizing_mode="stretch_width"
|
378 |
+
)
|
379 |
+
btn_example4 = Button(
|
380 |
+
name="Research 6 agents", button_type="primary", sizing_mode="stretch_width"
|
381 |
+
)
|
382 |
template.main.append(
|
383 |
pn.Row(
|
384 |
pn.pane.Markdown("## Agent Examples: ", sizing_mode="stretch_width"),
|
|
|
546 |
btn_example3.on_click(load_example)
|
547 |
btn_example4.on_click(load_example)
|
548 |
|
549 |
+
code_editor = CodeEditor(
|
550 |
+
value="", sizing_mode="stretch_width", language="python", height=300
|
551 |
+
)
|
552 |
template.main.append(code_editor)
|
553 |
|
554 |
template.servable(title=TITLE)
|