awacke1 commited on
Commit
4131bf7
β€’
1 Parent(s): 715ad8b

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +320 -0
app.py ADDED
@@ -0,0 +1,320 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import os
3
+ from pathlib import Path
4
+ import shutil
5
+ import openai
6
+ import autogen
7
+ import chromadb
8
+ import multiprocessing as mp
9
+ from autogen.retrieve_utils import TEXT_FORMATS, get_file_from_url, is_url
10
+ from autogen.agentchat.contrib.retrieve_assistant_agent import RetrieveAssistantAgent
11
+ from autogen.agentchat.contrib.retrieve_user_proxy_agent import (
12
+ RetrieveUserProxyAgent,
13
+ PROMPT_CODE,
14
+ )
15
+
16
+ TIMEOUT = 60
17
+
18
+
19
+ def initialize_agents(config_list, docs_path=None):
20
+ if isinstance(config_list, gr.State):
21
+ _config_list = config_list.value
22
+ else:
23
+ _config_list = config_list
24
+ if docs_path is None:
25
+ docs_path = "https://raw.githubusercontent.com/microsoft/autogen/main/README.md"
26
+ autogen.ChatCompletion.start_logging()
27
+
28
+ assistant = RetrieveAssistantAgent(
29
+ name="assistant",
30
+ system_message="You are a helpful assistant.",
31
+ )
32
+
33
+ ragproxyagent = RetrieveUserProxyAgent(
34
+ name="ragproxyagent",
35
+ human_input_mode="NEVER",
36
+ max_consecutive_auto_reply=5,
37
+ retrieve_config={
38
+ "task": "code",
39
+ "docs_path": docs_path,
40
+ "chunk_token_size": 2000,
41
+ "model": _config_list[0]["model"],
42
+ "client": chromadb.PersistentClient(path="/tmp/chromadb"),
43
+ "embedding_model": "all-mpnet-base-v2",
44
+ "customized_prompt": PROMPT_CODE,
45
+ },
46
+ )
47
+
48
+ return assistant, ragproxyagent
49
+
50
+
51
+ def initiate_chat(config_list, problem, queue, n_results=3):
52
+ global assistant, ragproxyagent
53
+ if isinstance(config_list, gr.State):
54
+ _config_list = config_list.value
55
+ else:
56
+ _config_list = config_list
57
+ if len(_config_list[0].get("api_key", "")) < 2:
58
+ queue.put(
59
+ ["Hi, nice to meet you! Please enter your API keys in below text boxs."]
60
+ )
61
+ return
62
+ else:
63
+ llm_config = (
64
+ {
65
+ "request_timeout": TIMEOUT,
66
+ # "seed": 42,
67
+ "config_list": _config_list,
68
+ "use_cache": False,
69
+ },
70
+ )
71
+ assistant.llm_config.update(llm_config[0])
72
+ assistant.reset()
73
+ try:
74
+ ragproxyagent.initiate_chat(
75
+ assistant, problem=problem, silent=False, n_results=n_results
76
+ )
77
+ messages = ragproxyagent.chat_messages
78
+ messages = [messages[k] for k in messages.keys()][0]
79
+ messages = [m["content"] for m in messages if m["role"] == "user"]
80
+ print("messages: ", messages)
81
+ except Exception as e:
82
+ messages = [str(e)]
83
+ queue.put(messages)
84
+
85
+
86
+ def chatbot_reply(input_text):
87
+ """Chat with the agent through terminal."""
88
+ queue = mp.Queue()
89
+ process = mp.Process(
90
+ target=initiate_chat,
91
+ args=(config_list, input_text, queue),
92
+ )
93
+ process.start()
94
+ try:
95
+ # process.join(TIMEOUT+2)
96
+ messages = queue.get(timeout=TIMEOUT)
97
+ except Exception as e:
98
+ messages = [
99
+ str(e)
100
+ if len(str(e)) > 0
101
+ else "Invalid Request to OpenAI, please check your API keys."
102
+ ]
103
+ finally:
104
+ try:
105
+ process.terminate()
106
+ except:
107
+ pass
108
+ return messages
109
+
110
+
111
+ def get_description_text():
112
+ return """
113
+ # Microsoft AutoGen: Retrieve Chat Demo
114
+
115
+ This demo shows how to use the RetrieveUserProxyAgent and RetrieveAssistantAgent to build a chatbot.
116
+
117
+ #### [GitHub](https://github.com/microsoft/autogen) [Discord](https://discord.gg/pAbnFJrkgZ) [Blog](https://microsoft.github.io/autogen/blog/2023/10/18/RetrieveChat) [Paper](https://arxiv.org/abs/2308.08155)
118
+ """
119
+
120
+
121
+ global assistant, ragproxyagent
122
+
123
+ with gr.Blocks() as demo:
124
+ config_list, assistant, ragproxyagent = (
125
+ gr.State(
126
+ [
127
+ {
128
+ "api_key": "",
129
+ "api_base": "",
130
+ "api_type": "azure",
131
+ "api_version": "2023-07-01-preview",
132
+ "model": "gpt-35-turbo",
133
+ }
134
+ ]
135
+ ),
136
+ None,
137
+ None,
138
+ )
139
+ assistant, ragproxyagent = initialize_agents(config_list)
140
+
141
+ gr.Markdown(get_description_text())
142
+ chatbot = gr.Chatbot(
143
+ [],
144
+ elem_id="chatbot",
145
+ bubble_full_width=False,
146
+ avatar_images=(None, (os.path.join(os.path.dirname(__file__), "autogen.png"))),
147
+ # height=600,
148
+ )
149
+
150
+ txt_input = gr.Textbox(
151
+ scale=4,
152
+ show_label=False,
153
+ placeholder="Enter text and press enter",
154
+ container=False,
155
+ )
156
+
157
+ with gr.Row():
158
+
159
+ def update_config(config_list):
160
+ global assistant, ragproxyagent
161
+ config_list = autogen.config_list_from_models(
162
+ model_list=[os.environ.get("MODEL", "gpt-35-turbo")],
163
+ )
164
+ if not config_list:
165
+ config_list = [
166
+ {
167
+ "api_key": "",
168
+ "api_base": "",
169
+ "api_type": "azure",
170
+ "api_version": "2023-07-01-preview",
171
+ "model": "gpt-35-turbo",
172
+ }
173
+ ]
174
+ llm_config = (
175
+ {
176
+ "request_timeout": TIMEOUT,
177
+ # "seed": 42,
178
+ "config_list": config_list,
179
+ },
180
+ )
181
+ assistant.llm_config.update(llm_config[0])
182
+ ragproxyagent._model = config_list[0]["model"]
183
+ return config_list
184
+
185
+ def set_params(model, oai_key, aoai_key, aoai_base):
186
+ os.environ["MODEL"] = model
187
+ os.environ["OPENAI_API_KEY"] = oai_key
188
+ os.environ["AZURE_OPENAI_API_KEY"] = aoai_key
189
+ os.environ["AZURE_OPENAI_API_BASE"] = aoai_base
190
+ return model, oai_key, aoai_key, aoai_base
191
+
192
+ txt_model = gr.Dropdown(
193
+ label="Model",
194
+ choices=[
195
+ "gpt-4",
196
+ "gpt-35-turbo",
197
+ "gpt-3.5-turbo",
198
+ ],
199
+ allow_custom_value=True,
200
+ value="gpt-35-turbo",
201
+ container=True,
202
+ )
203
+ txt_oai_key = gr.Textbox(
204
+ label="OpenAI API Key",
205
+ placeholder="Enter key and press enter",
206
+ max_lines=1,
207
+ show_label=True,
208
+ value=os.environ.get("OPENAI_API_KEY", ""),
209
+ container=True,
210
+ type="password",
211
+ )
212
+ txt_aoai_key = gr.Textbox(
213
+ label="Azure OpenAI API Key",
214
+ placeholder="Enter key and press enter",
215
+ max_lines=1,
216
+ show_label=True,
217
+ value=os.environ.get("AZURE_OPENAI_API_KEY", ""),
218
+ container=True,
219
+ type="password",
220
+ )
221
+ txt_aoai_base_url = gr.Textbox(
222
+ label="Azure OpenAI API Base",
223
+ placeholder="Enter base url and press enter",
224
+ max_lines=1,
225
+ show_label=True,
226
+ value=os.environ.get("AZURE_OPENAI_API_BASE", ""),
227
+ container=True,
228
+ type="password",
229
+ )
230
+
231
+ clear = gr.ClearButton([txt_input, chatbot])
232
+
233
+ with gr.Row():
234
+
235
+ def upload_file(file):
236
+ return update_context_url(file.name)
237
+
238
+ upload_button = gr.UploadButton(
239
+ "Click to upload a context file or enter a url in the right textbox",
240
+ file_types=[f".{i}" for i in TEXT_FORMATS],
241
+ file_count="single",
242
+ )
243
+
244
+ txt_context_url = gr.Textbox(
245
+ label="Enter the url to your context file and chat on the context",
246
+ info=f"File must be in the format of [{', '.join(TEXT_FORMATS)}]",
247
+ max_lines=1,
248
+ show_label=True,
249
+ value="https://raw.githubusercontent.com/microsoft/autogen/main/README.md",
250
+ container=True,
251
+ )
252
+
253
+ txt_prompt = gr.Textbox(
254
+ label="Enter your prompt for Retrieve Agent and press enter to replace the default prompt",
255
+ max_lines=40,
256
+ show_label=True,
257
+ value=PROMPT_CODE,
258
+ container=True,
259
+ show_copy_button=True,
260
+ layout={"height": 20},
261
+ )
262
+
263
+ def respond(message, chat_history, model, oai_key, aoai_key, aoai_base):
264
+ global config_list
265
+ set_params(model, oai_key, aoai_key, aoai_base)
266
+ config_list = update_config(config_list)
267
+ messages = chatbot_reply(message)
268
+ _msg = (
269
+ messages[-1]
270
+ if len(messages) > 0 and messages[-1] != "TERMINATE"
271
+ else messages[-2]
272
+ if len(messages) > 1
273
+ else "Context is not enough for answering the question. Please press `enter` in the context url textbox to make sure the context is activated for the chat."
274
+ )
275
+ chat_history.append((message, _msg))
276
+ return "", chat_history
277
+
278
+ def update_prompt(prompt):
279
+ ragproxyagent.customized_prompt = prompt
280
+ return prompt
281
+
282
+ def update_context_url(context_url):
283
+ global assistant, ragproxyagent
284
+
285
+ file_extension = Path(context_url).suffix
286
+ print("file_extension: ", file_extension)
287
+ if file_extension.lower() not in [f".{i}" for i in TEXT_FORMATS]:
288
+ return f"File must be in the format of {TEXT_FORMATS}"
289
+
290
+ if is_url(context_url):
291
+ try:
292
+ file_path = get_file_from_url(
293
+ context_url,
294
+ save_path=os.path.join("/tmp", os.path.basename(context_url)),
295
+ )
296
+ except Exception as e:
297
+ return str(e)
298
+ else:
299
+ file_path = context_url
300
+ context_url = os.path.basename(context_url)
301
+
302
+ try:
303
+ shutil.rmtree("/tmp/chromadb/")
304
+ except:
305
+ pass
306
+ assistant, ragproxyagent = initialize_agents(config_list, docs_path=file_path)
307
+ return context_url
308
+
309
+ txt_input.submit(
310
+ respond,
311
+ [txt_input, chatbot, txt_model, txt_oai_key, txt_aoai_key, txt_aoai_base_url],
312
+ [txt_input, chatbot],
313
+ )
314
+ txt_prompt.submit(update_prompt, [txt_prompt], [txt_prompt])
315
+ txt_context_url.submit(update_context_url, [txt_context_url], [txt_context_url])
316
+ upload_button.upload(upload_file, upload_button, [txt_context_url])
317
+
318
+
319
+ if __name__ == "__main__":
320
+ demo.launch(share=True)