File size: 21,850 Bytes
e11ba6d
bbdc1ce
aadc6da
bbdc1ce
e11ba6d
bbdc1ce
aadc6da
738a35e
 
 
 
 
 
 
bbdc1ce
 
 
e11ba6d
b78f0b0
bbdc1ce
 
 
7445a2b
bbdc1ce
 
b78f0b0
a5b165e
 
 
 
 
aadc6da
738a35e
a5b165e
 
 
738a35e
a5b165e
 
 
 
 
 
 
 
 
 
 
 
 
 
738a35e
a5b165e
 
 
 
738a35e
b78f0b0
bbdc1ce
aadc6da
bbdc1ce
 
 
aadc6da
933dd99
aadc6da
d49c8d6
 
 
3780874
f5bbb7d
5065d94
 
 
 
 
 
 
 
 
 
 
f5bbb7d
aadc6da
 
ccbd989
 
 
aadc6da
bbdc1ce
ccbd989
 
 
 
bbdc1ce
 
ccbd989
 
 
bbdc1ce
7445a2b
ccbd989
 
 
7445a2b
bbdc1ce
ccbd989
 
 
bbdc1ce
 
ccbd989
 
 
bbdc1ce
6cfe9c6
6c8f775
3af28c7
a464cf2
 
 
 
3e71b9a
a464cf2
ce3cb0d
28a3dc1
ce3cb0d
 
 
 
3ecc51a
fa7e14b
2d2d9e7
3af28c7
2d2d9e7
fa7e14b
 
86a6e5e
2d2d9e7
3ecc51a
3e71b9a
86a6e5e
 
3780874
bbdc1ce
 
 
 
 
 
 
 
 
 
 
 
 
aadc6da
f5bbb7d
 
 
 
 
a6a55f0
f5bbb7d
 
bbdc1ce
 
 
7445a2b
bbdc1ce
 
aadc6da
bbdc1ce
 
 
7445a2b
 
 
 
 
bbdc1ce
 
 
 
 
 
 
 
 
 
aadc6da
bbdc1ce
7445a2b
bbdc1ce
 
 
 
aadc6da
bbdc1ce
aadc6da
 
bbdc1ce
 
ccbd989
 
 
 
 
 
bbdc1ce
 
 
 
 
 
 
6c624af
 
bbdc1ce
aadc6da
 
bbdc1ce
 
 
 
7445a2b
bbdc1ce
 
 
 
 
 
 
 
 
 
7445a2b
bbdc1ce
 
 
 
aadc6da
 
bbdc1ce
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7445a2b
f76cd7e
bbdc1ce
 
 
 
7445a2b
 
 
 
 
 
 
 
 
 
 
 
 
ccbd989
 
7445a2b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bbdc1ce
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ccbd989
 
 
 
 
 
 
 
 
bbdc1ce
 
 
 
f76cd7e
 
 
6c624af
f76cd7e
7445a2b
 
bbdc1ce
 
ccbd989
6c624af
bbdc1ce
 
738a35e
 
933dd99
738a35e
 
 
 
 
 
 
933dd99
738a35e
ccbd989
 
 
 
 
 
 
738a35e
 
bbdc1ce
 
 
 
 
 
 
 
 
 
 
 
 
6c624af
ccbd989
 
 
 
6c624af
bbdc1ce
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e11ba6d
 
 
 
 
 
 
 
7445a2b
 
 
e11ba6d
bbdc1ce
 
6c624af
695b97d
bbdc1ce
 
 
 
 
aadc6da
 
 
bbdc1ce
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ccbd989
 
 
 
 
 
 
 
 
 
 
 
bbdc1ce
 
 
 
 
 
 
 
 
aadc6da
 
bbdc1ce
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7445a2b
bbdc1ce
 
 
 
 
 
 
 
 
 
7445a2b
bbdc1ce
 
 
 
 
 
 
 
 
7445a2b
bbdc1ce
 
 
 
 
 
 
 
 
 
7445a2b
bbdc1ce
 
 
 
 
 
 
 
 
7445a2b
bbdc1ce
 
 
 
 
 
 
 
 
7445a2b
bbdc1ce
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ccbd989
 
 
b78f0b0
 
bbdc1ce
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
import asyncio
import os
import random
import time
from functools import partial
import autogen
import panel as pn
import os
from unstructured.ingest.interfaces import PartitionConfig, ProcessorConfig, ReadConfig  
from unstructured.ingest.runner import LocalRunner  
from unstructured.partition.auto import partition  
from langchain.document_loaders import UnstructuredFileLoader  


from autogen_utils import (
    MathUserProxyAgent,
    RetrieveUserProxyAgent,
    check_termination_and_human_reply,
    generate_code,
    get_retrieve_config,
    initialize_agents,
)
from configs import DEFAULT_TERMINATE_MESSAGE, Q1, Q2, Q3, TIMEOUT, TITLE
from custom_widgets import RowAgentWidget
from panel.chat import ChatInterface
from panel.widgets import Button, CodeEditor, PasswordInput, Switch, TextInput
import os
from langchain.document_loaders import TextLoader, PythonLoader, UnstructuredFileLoader
from unstructured.ingest.runner import LocalRunner
from unstructured.ingest.interfaces import ProcessorConfig, ReadConfig, PartitionConfig
from unstructured.partition.auto import partition

def process_file_with_unstructured(file_path):
    # Determine the file extension
    _, file_extension = os.path.splitext(file_path)
    file_extension = file_extension.lower()

    # Initialize the document list
    docs = []

    # Choose the appropriate loader based on file extension
    if file_extension in ['.txt', '.md', '.html', '.rst']:
        loader = TextLoader(file_path)
        docs = loader.load()
    elif file_extension == '.py':
        loader = PythonLoader(file_path)
        docs = loader.load()
    else:
        # Default to UnstructuredFileLoader for other file types
        loader = UnstructuredFileLoader(file_path)
        docs = loader.load()

    # Process the loaded documents
    raw_text = "\n".join(doc.text for doc in docs)

    return raw_text

pn.extension("codeeditor")
template = pn.template.BootstrapTemplate(title=TITLE)

def get_description_text():
    return f"""
    # {TITLE}

    This is a [YI-6B-200K](https://huggingface.co/01-ai/Yi-6B-200K) AGI + Agent Factory built with [Panel](https://panel.holoviz.org/). Build Agents that use YI-200K and avoid context window overflows! . Scroll down to see the code for creating and using the agents.

    ## Join us : 
    🌟TeamTonic🌟 is always making cool demos! Join our active builder's🛠️community on 👻Discord: [Discord](https://discord.gg/GWpVpekp) On 🤗Huggingface: [TeamTonic](https://huggingface.co/TeamTonic) & [MultiTransformer](https://huggingface.co/MultiTransformer) On 🌐Github: [Polytonic](https://github.com/tonic-ai) & contribute to 🌟 [PolyGPT](https://github.com/tonic-ai/polygpt-alpha)"             
    """ 
# client = Client("https://tonic1-tulu.hf.space/--replicas/x4pxd/")  
  
# async def send_messages_with_tulua_pi(recipient,messages,sender):  
#       response_content =""  
#        
#      if len(messages) >0:  
#          message_text= messages[-1]['content']  
#          result await client.predict(message_text)  
#            
#          if 'message' in result :  
#              reponse_content=result['message']  
#                           
#      chatiface.send(reponse_content,user=sender.name,resend=False)  



template.main.append(
    pn.pane.Markdown(get_description_text(), sizing_mode="stretch_width")
)

txt_model = TextInput(
    name="Model Name",
    placeholder="Enter your model name here...",
    value="gpt-35-turbo",
    sizing_mode="stretch_width",
)
pwd_openai_key = PasswordInput(
    name="OpenAI API Key",
    placeholder="Enter your OpenAI API Key here...",
    sizing_mode="stretch_width",
)
pwd_openai_url = PasswordInput(
    name="OpenAI Base Url",
    placeholder="Enter your OpenAI Base Url here...",
    sizing_mode="stretch_width",
)
pwd_aoai_key = PasswordInput(
    name="Azure OpenAI API Key",
    placeholder="Enter your Azure OpenAI API Key here...",
    sizing_mode="stretch_width",
)
pwd_aoai_url = PasswordInput(
    name="Azure OpenAI Base Url",
    placeholder="Enter your Azure OpenAI Base Url here...",
    sizing_mode="stretch_width",
)
RAG = pn.widgets.FileInput(filename="RAG", sizing_mode="stretch_width", multiple=True)
file_cfg = pn.widgets.FileInput(name='Configuration File', accept='.json', sizing_mode="stretch_width")


def check_yi_mode(event):  
    # Make openai_row invisible when local YI Mode is enabled and vice versa.  
    openai_row.visible = not event.new  
    yi_widgets.visible = event.new  

YI_MODE = pn.widgets.Checkbox(name='Local Y.I. Mode', value=True)     
YI_ENDPOINT = pn.widgets.TextInput(name='Yi Endpoint', placeholder="https://tonic-easyyi.hf.space/--replicas/dlwnc/")    
  
# Add an observer to watch changes in yi_mode value (True/False)    
YI_MODE.param.watch(check_yi_mode, 'value')    


yi_widgets = pn.Row(  
  pn.pane.Markdown("### Local YI Mode: "),   
  YI_MODE,
  pn.pane.Markdown("### Yi Endpoint: "),   
  YI_ENDPOINT,
  pn.pane.Markdown("### Add Files: "),   
  RAG  
)  
template.main.append(yi_widgets)
RAG = pn.widgets.FileInput(filename="OAI_CONFIG_LIST", sizing_mode="stretch_width")
openai_row = pn.Row(  pn.pane.Markdown("### Local YI Mode: "), YI_MODE, txt_model, pwd_openai_key, pwd_openai_url, pwd_aoai_key, pwd_aoai_url, pn.pane.Markdown("### Add Files: "), RAG, pn.pane.Markdown("### Add Config: "), file_cfg)  
template.main.append( pn.Column(openai_row) )
  

def get_config(tmpfilename="OAI_CONFIG_LIST"):
    os.makedirs(".chromadb", exist_ok=True)
    if file_cfg.value:
        if os.path.exists(f".chromadb/{tmpfilename}"):
            os.remove(f".chromadb/{tmpfilename}")
        file_cfg.save(f".chromadb/{tmpfilename}")
        cfg_fpath = f".chromadb/{tmpfilename}"
    else:
        cfg_fpath = "OAI_CONFIG_LIST"  # for local testing
    config_list = autogen.config_list_from_json(
        cfg_fpath,
        file_location=".",
    )
    if YI_MODE == "local": 
        config_list = [
        {
            "model": "EasyYI",  
            "api_key": "None",
            "base_url": YI_ENDPOINT
        }
    ] 
    if not config_list:
        os.environ["MODEL"] = txt_model.value
        os.environ["OPENAI_API_KEY"] = pwd_openai_key.value
        os.environ["OPENAI_API_BASE"] = pwd_openai_url.value
        os.environ["AZURE_OPENAI_API_KEY"] = pwd_aoai_key.value
        os.environ["AZURE_OPENAI_API_BASE"] = pwd_aoai_url.value

        config_list = autogen.config_list_from_models(
            model_list=[os.environ.get("MODEL", "gpt-35-turbo")],
        )
        for cfg in config_list:
            if cfg.get("api_type", "open_ai") == "open_ai":
                base_url = os.environ.get("OPENAI_API_BASE", "").strip()
                if base_url:
                    cfg["base_url"] = base_url
    if not config_list:
        config_list = [
            {
                "api_key": "",
                "base_url": "",
                "api_type": "azure",
                "api_version": "2023-07-01-preview",
                "model": "gpt-35-turbo",
            }
        ]

    llm_config = {
        "timeout": TIMEOUT,
        "cache_seed": 42,
        "config_list": config_list,
        "temperature": 0,
    }

    return llm_config


btn_add = Button(name="+", button_type="success")
btn_remove = Button(name="-", button_type="danger")
switch_code = Switch(
    name="Run Code", sizing_mode="fixed", width=50, height=30, align="end"
)
select_speaker_method = pn.widgets.Select(
    name="", options=["round_robin", "auto", "random"], value="round_robin"
)
template.main.append(
    pn.Row(
        pn.pane.Markdown("## Add or Remove Agents: "),
        btn_add,
        btn_remove,
        pn.pane.Markdown("### Run Code: "),
        switch_code,
        pn.pane.Markdown("### Speaker Selection Method: "),
        select_speaker_method,
    )
)

column_agents = pn.Column(
    RowAgentWidget(
        value=[
            "User_Proxy",
            "",
            "UserProxyAgent",
            "",
        ]
    ),
    sizing_mode="stretch_width",
)
column_agents.append(
    RowAgentWidget(
        value=[
            "Assistant_Agent",
            "",
            "AssistantAgent",
            "",
        ]
    ),
)

template.main.append(column_agents)


def add_agent(event):
    column_agents.append(RowAgentWidget(value=["", "", "AssistantAgent", ""]))


def remove_agent(event):
    column_agents.pop(-1)


btn_add.on_click(add_agent)
btn_remove.on_click(remove_agent)


async def send_messages(recipient, messages, sender, config):
    # print(f"{sender.name} -> {recipient.name}: {messages[-1]['content']}")
    chatiface.send(messages[-1]["content"], user=sender.name, respond=False)
    return False, None  # required to ensure the agent communication flow continues


class myGroupChatManager(autogen.GroupChatManager):
    def _send_messages(self, message, sender, config):
        message = self._message_to_dict(message)

        if message.get("role") == "function":
            content = message["content"]
        else:
            content = message.get("content")
            if content is not None:
                if "context" in message:
                    content = autogen.OpenAIWrapper.instantiate(
                        content,
                        message["context"],
                        self.llm_config
                        and self.llm_config.get("allow_format_str_template", False),
                    )
            if "function_call" in message:
                function_call = dict(message["function_call"])
                content = f"Suggested function Call: {function_call.get('name', '(No function name found)')}"
        chatiface.send(content, user=sender.name, respond=False)
        return False, None  # required to ensure the agent communication flow continues

    def _process_received_message(self, message, sender, silent):
        message = self._message_to_dict(message)
        # When the agent receives a message, the role of the message is "user". (If 'role' exists and is 'function', it will remain unchanged.)
        valid = self._append_oai_message(message, "user", sender)
        if not valid:
            raise ValueError(
                "Received message can't be converted into a valid ChatCompletion message. Either content or function_call must be provided."
            )
        if not silent:
            self._print_received_message(message, sender)
            self._send_messages(message, sender, None)


def init_groupchat(event, collection_name):
    llm_config = get_config(collection_name)
    agents = []
    for row_agent in column_agents:
        agent_name = row_agent[0][0].value
        system_msg = row_agent[0][1].value
        agent_type = row_agent[0][2].value
        docs_path = row_agent[1].value
        retrieve_config = (
            get_retrieve_config(
                docs_path,
                txt_model.value,
                collection_name=collection_name,
            )
            if agent_type == "RetrieveUserProxyAgent"
            else None
        )
        code_execution_config = (
            {
                "work_dir": "coding",
                "use_docker": False,  # set to True or image name like "python:3" to use docker
            }
            if switch_code.value
            else False
        )
        agent = initialize_agents(
            llm_config,
            agent_name,
            system_msg,
            agent_type,
            retrieve_config,
            code_execution_config,
        )
        agent.register_reply(
            [autogen.Agent, None], reply_func=send_messages, config={"callback": None}
        )
        agents.append(agent)
    if len(agents) >= 3:
        groupchat = autogen.GroupChat(
            agents=agents,
            messages=[],
            max_round=12,
            speaker_selection_method=select_speaker_method.value,
            allow_repeat_speaker=False,
        )
        manager = myGroupChatManager(groupchat=groupchat, llm_config=llm_config)
    else:
        manager = None
        groupchat = None
    return agents, manager, groupchat


async def agents_chat(init_sender, manager, contents, agents, RAG):
    # Check if a file is uploaded
    if RAG and RAG.value:
        # Save the file and process it
        file_path = "path/to/saved/file"  # Define the path to save the uploaded file
        RAG.save(file_path)
        raw_text = process_file_with_unstructured(file_path)

        # Prepend the extracted text to the contents
        contents = raw_text + ' ' + contents
    
    # Determine the recipient
    recipient = (
        manager
        if len(agents) > 2
        else agents[1]
        if agents[1] != init_sender
        else agents[0]
    )

    # Initiate chat
    if isinstance(init_sender, (RetrieveUserProxyAgent, MathUserProxyAgent)):
        await init_sender.a_initiate_chat(recipient, problem=contents)
    else:
        await init_sender.a_initiate_chat(recipient, message=contents)


async def reply_chat(contents, user, instance):
    if hasattr(instance, "collection_name"):
        collection_name = instance.collection_name
    else:
        collection_name = f"{int(time.time())}_{random.randint(0, 100000)}"
        instance.collection_name = collection_name

    column_agents_list = [[a.value for a in agent[0]] for agent in column_agents]
    if (
        not hasattr(instance, "agent_list")
        or instance.agents_list != column_agents_list
    ):
        agents, manager, groupchat = init_groupchat(None, collection_name)
        instance.manager = manager
        instance.agents = agents
        instance.agents_list = column_agents_list
    else:
        agents = instance.agents
        manager = instance.manager

    if len(agents) <= 1:
        return "Please add more agents."

    init_sender = None
    for agent in agents:
        if "UserProxy" in str(type(agent)):
            init_sender = agent
            break
    for agent in agents:
        # Hack for get human input
        agent._reply_func_list.pop(1)
        agent.register_reply(
            [autogen.Agent, None],
            partial(check_termination_and_human_reply, instance=instance),
            1,
        )
    if manager is not None:
        for agent in agents:
            agent._reply_func_list.pop(0)

    if not init_sender:
        init_sender = agents[0]
    await generate_code(agents, manager, contents, code_editor, groupchat)
    await agents_chat(init_sender, manager, contents, agents, RAG)
    return "The task is done. Please start a new task."


chatiface = ChatInterface(
    callback=reply_chat,
    height=600,
)

template.main.append(chatiface)

btn_msg1 = Button(name=Q1, sizing_mode="stretch_width")
btn_msg2 = Button(name=Q2, sizing_mode="stretch_width")
btn_msg3 = Button(name=Q3, sizing_mode="stretch_width")
template.main.append(
    pn.Column(
        pn.pane.Markdown("## Message Examples: ", sizing_mode="stretch_width"),
        btn_msg1,
        btn_msg2,
        btn_msg3,
        sizing_mode="stretch_width",
    )
)


def load_message(event):
    if event.obj.name == Q1:
        chatiface.send(Q1)
    elif event.obj.name == Q2:
        chatiface.send(Q2)
    elif event.obj.name == Q3:
        chatiface.send(Q3)


btn_msg1.on_click(load_message)
btn_msg2.on_click(load_message)
btn_msg3.on_click(load_message)


btn_example1 = Button(
    name="General 2 agents", button_type="primary", sizing_mode="stretch_width"
)
btn_example2 = Button(
    name="RAG 2 agents", button_type="primary", sizing_mode="stretch_width"
)
btn_example3 = Button(
    name="Software Dev 3 agents", button_type="primary", sizing_mode="stretch_width"
)
btn_example4 = Button(
    name="Research 6 agents", button_type="primary", sizing_mode="stretch_width"
)
template.main.append(
    pn.Row(
        pn.pane.Markdown("## Agent Examples: ", sizing_mode="stretch_width"),
        btn_example1,
        btn_example2,
        btn_example3,
        btn_example4,
        sizing_mode="stretch_width",
    )
)


def clear_agents():
    while len(column_agents) > 0:
        column_agents.pop(-1)


def load_example(event):
    clear_agents()
    if event.obj.name == "RAG 2 agents":
        column_agents.append(
            RowAgentWidget(
                value=[
                    "Boss_Assistant",
                    "Assistant who has extra content retrieval power for solving difficult problems.",
                    "RetrieveUserProxyAgent",
                    "",
                ]
            ),
        )
        column_agents.append(
            RowAgentWidget(
                value=[
                    "Senior_Python_Engineer",
                    f"You are a senior python engineer. {DEFAULT_TERMINATE_MESSAGE}",
                    "RetrieveAssistantAgent",
                    "",
                ]
            ),
        )
    elif event.obj.name == "General 2 agents":
        column_agents.append(
            RowAgentWidget(
                value=[
                    "User_Proxy",
                    "",
                    "UserProxyAgent",
                    "",
                ]
            ),
        )
        column_agents.append(
            RowAgentWidget(
                value=[
                    "Assistant_Agent",
                    "",
                    "AssistantAgent",
                    "",
                ]
            ),
        )
    elif event.obj.name == "Software Dev 3 agents":
        column_agents.append(
            RowAgentWidget(
                value=[
                    "Boss",
                    f"The boss who ask questions and give tasks. {DEFAULT_TERMINATE_MESSAGE}",
                    "UserProxyAgent",
                    "",
                ]
            ),
        )
        column_agents.append(
            RowAgentWidget(
                value=[
                    "Senior_Python_Engineer",
                    f"You are a senior python engineer. {DEFAULT_TERMINATE_MESSAGE}",
                    "AssistantAgent",
                    "",
                ]
            ),
        )
        column_agents.append(
            RowAgentWidget(
                value=[
                    "Product_Manager",
                    f"You are a product manager. {DEFAULT_TERMINATE_MESSAGE}",
                    "AssistantAgent",
                    "",
                ]
            ),
        )
    elif event.obj.name == "Research 6 agents":
        column_agents.append(
            RowAgentWidget(
                value=[
                    "Admin",
                    "A human admin. Interact with the planner to discuss the plan. Plan execution needs to be approved by this admin.",
                    "UserProxyAgent",
                    "",
                ]
            ),
        )
        column_agents.append(
            RowAgentWidget(
                value=[
                    "Engineer",
                    """Engineer. You follow an approved plan. You write python/shell code to solve tasks. Wrap the code in a code block that specifies the script type. The user can't modify your code. So do not suggest incomplete code which requires others to modify. Don't use a code block if it's not intended to be executed by the executor.
Don't include multiple code blocks in one response. Do not ask others to copy and paste the result. Check the execution result returned by the executor.
If the result indicates there is an error, fix the error and output the code again. Suggest the full code instead of partial code or code changes. If the error can't be fixed or if the task is not solved even after the code is executed successfully, analyze the problem, revisit your assumption, collect additional info you need, and think of a different approach to try.
""",
                    "AssistantAgent",
                    "",
                ]
            ),
        )
        column_agents.append(
            RowAgentWidget(
                value=[
                    "Scientist",
                    """Scientist. You follow an approved plan. You are able to categorize papers after seeing their abstracts printed. You don't write code.""",
                    "AssistantAgent",
                    "",
                ]
            ),
        )
        column_agents.append(
            RowAgentWidget(
                value=[
                    "Planner",
                    """Planner. Suggest a plan. Revise the plan based on feedback from admin and critic, until admin approval.
The plan may involve an engineer who can write code and a scientist who doesn't write code.
Explain the plan first. Be clear which step is performed by an engineer, and which step is performed by a scientist.
""",
                    "AssistantAgent",
                    "",
                ]
            ),
        )
        column_agents.append(
            RowAgentWidget(
                value=[
                    "Critic",
                    "Critic. Double check plan, claims, code from other agents and provide feedback. Check whether the plan includes adding verifiable info such as source URL.",
                    "AssistantAgent",
                    "",
                ]
            ),
        )

        column_agents.append(
            RowAgentWidget(
                value=[
                    "Executor",
                    "Executor. Execute the code written by the engineer and report the result.",
                    "UserProxyAgent",
                    "",
                ]
            ),
        )


btn_example1.on_click(load_example)
btn_example2.on_click(load_example)
btn_example3.on_click(load_example)
btn_example4.on_click(load_example)

code_editor = CodeEditor(
    value="", sizing_mode="stretch_width", language="python", height=300
)
template.main.append(code_editor)

template.servable(title=TITLE)