Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import re
|
2 |
import os
|
3 |
import panel as pn
|
|
|
4 |
from panel.io.mime_render import exec_with_return
|
5 |
from llama_index import (
|
6 |
VectorStoreIndex,
|
@@ -13,20 +14,16 @@ from llama_index.chat_engine import ContextChatEngine
|
|
13 |
from llama_index.embeddings import OpenAIEmbedding
|
14 |
from llama_index.llms import OpenAI
|
15 |
|
16 |
-
pn.extension("codeeditor", sizing_mode="stretch_width")
|
17 |
|
18 |
SYSTEM_PROMPT = (
|
19 |
-
"You are a
|
20 |
-
"
|
21 |
-
"
|
22 |
-
"
|
23 |
-
"with
|
24 |
-
"
|
25 |
-
"
|
26 |
-
"
|
27 |
-
"in code fences (```python). You absolutely "
|
28 |
-
"must have `hvplot_obj` as the last line of code. FYI,"
|
29 |
-
"Data columns: ['sepal_length', 'sepal_width', 'petal_length', 'petal_width', 'species']"
|
30 |
)
|
31 |
|
32 |
USER_CONTENT_FORMAT = """
|
@@ -48,6 +45,14 @@ hvplot_obj
|
|
48 |
""".strip()
|
49 |
|
50 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
def init_llm(event):
|
52 |
api_key = event.new
|
53 |
if not api_key:
|
@@ -110,13 +115,18 @@ def callback(content: str, user: str, instance: pn.chat.ChatInterface):
|
|
110 |
if llm_code.splitlines()[-1].strip() != "hvplot_obj":
|
111 |
llm_code += "\nhvplot_obj"
|
112 |
code_editor.value = llm_code
|
|
|
113 |
|
114 |
|
115 |
def update_plot(event):
|
116 |
-
|
117 |
-
hvplot_pane.object = exec_with_return(event.new)
|
118 |
-
|
119 |
-
|
|
|
|
|
|
|
|
|
120 |
|
121 |
|
122 |
# instantiate widgets and panes
|
@@ -147,6 +157,8 @@ code_editor = pn.widgets.CodeEditor(
|
|
147 |
language="python",
|
148 |
sizing_mode="stretch_both",
|
149 |
)
|
|
|
|
|
150 |
|
151 |
# watch for code changes
|
152 |
api_key_input.param.watch(init_llm, "value")
|
@@ -170,4 +182,5 @@ template = pn.template.FastListTemplate(
|
|
170 |
header_background="#fd7000",
|
171 |
title="Chat with Plot"
|
172 |
)
|
|
|
173 |
template.servable()
|
|
|
1 |
import re
|
2 |
import os
|
3 |
import panel as pn
|
4 |
+
from io import StringIO
|
5 |
from panel.io.mime_render import exec_with_return
|
6 |
from llama_index import (
|
7 |
VectorStoreIndex,
|
|
|
14 |
from llama_index.embeddings import OpenAIEmbedding
|
15 |
from llama_index.llms import OpenAI
|
16 |
|
|
|
17 |
|
18 |
SYSTEM_PROMPT = (
|
19 |
+
"You are a data visualization pro and expert in HoloViz hvplot + holoviews. "
|
20 |
+
"Your primary goal is to assist the user in editing based on user requests using best practices. "
|
21 |
+
"Simply provide code in code fences (```python). You must have `hvplot_obj` as the last line of code. "
|
22 |
+
"Note, data columns are ['sepal_length', 'sepal_width', 'petal_length', 'petal_width', 'species'] and "
|
23 |
+
"hvplot is built on top of holoviews--anything you can do with holoviews, you can do "
|
24 |
+
"with hvplot. First try to use hvplot **kwargs instead of opts, e.g. `legend='top_right'` "
|
25 |
+
"instead of `opts(legend_position='top_right')`. If you need to use opts, you can use "
|
26 |
+
"concise version, e.g. `opts(xlabel='Petal Length')` vs `opts(hv.Opts(xlabel='Petal Length'))`"
|
|
|
|
|
|
|
27 |
)
|
28 |
|
29 |
USER_CONTENT_FORMAT = """
|
|
|
45 |
""".strip()
|
46 |
|
47 |
|
48 |
+
def exception_handler(exc):
|
49 |
+
if retries.value == 0:
|
50 |
+
chat_interface.send(f"Can't figure this out: {exc}", respond=False)
|
51 |
+
return
|
52 |
+
chat_interface.send(f"Fix this error:\n```python\n{exc}\n```")
|
53 |
+
retries.value = retries.value - 1
|
54 |
+
|
55 |
+
|
56 |
def init_llm(event):
|
57 |
api_key = event.new
|
58 |
if not api_key:
|
|
|
115 |
if llm_code.splitlines()[-1].strip() != "hvplot_obj":
|
116 |
llm_code += "\nhvplot_obj"
|
117 |
code_editor.value = llm_code
|
118 |
+
retries.value = 2
|
119 |
|
120 |
|
121 |
def update_plot(event):
|
122 |
+
with StringIO() as buf:
|
123 |
+
hvplot_pane.object = exec_with_return(event.new, stderr=buf)
|
124 |
+
buf.seek(0)
|
125 |
+
errors = buf.read()
|
126 |
+
if errors:
|
127 |
+
exception_handler(errors)
|
128 |
+
|
129 |
+
pn.extension("codeeditor", sizing_mode="stretch_width", exception_handler=exception_handler)
|
130 |
|
131 |
|
132 |
# instantiate widgets and panes
|
|
|
157 |
language="python",
|
158 |
sizing_mode="stretch_both",
|
159 |
)
|
160 |
+
retries = pn.widgets.IntInput(value=2, visible=False)
|
161 |
+
error = pn.widgets.StaticText(visible=False)
|
162 |
|
163 |
# watch for code changes
|
164 |
api_key_input.param.watch(init_llm, "value")
|
|
|
182 |
header_background="#fd7000",
|
183 |
title="Chat with Plot"
|
184 |
)
|
185 |
+
|
186 |
template.servable()
|