use gr chatbot (#5)
Browse files- use gr chatbot (c23f3c29a6a5fa717023c1c6131e05bc78a1fe09)
Co-authored-by: AK <[email protected]>
app.py
CHANGED
@@ -11,8 +11,8 @@ MODELS = [
|
|
11 |
"Meta-Llama-3.1-8B-Instruct"
|
12 |
]
|
13 |
|
14 |
-
def create_client(api_key
|
15 |
-
if api_key
|
16 |
openai.api_key = api_key
|
17 |
openai.api_base = "https://api.sambanova.ai/v1" # Fixed Base URL
|
18 |
else:
|
@@ -48,11 +48,11 @@ def respond(message, chat_history, model, system_prompt, thinking_budget, api_ke
|
|
48 |
response = completion.choices[0].message['content']
|
49 |
thinking_time = time.time() - start_time
|
50 |
print("Response received from OpenAI API.")
|
51 |
-
|
52 |
except Exception as e:
|
53 |
error_message = f"Error: {str(e)}"
|
54 |
print(error_message)
|
55 |
-
|
56 |
|
57 |
def parse_response(response):
|
58 |
answer_match = re.search(r'<answer>(.*?)</answer>', response, re.DOTALL)
|
@@ -67,29 +67,23 @@ def parse_response(response):
|
|
67 |
|
68 |
def process_chat(message, history, model, system_prompt, thinking_budget, api_key):
|
69 |
print(f"Received message: {message}")
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
|
74 |
try:
|
75 |
formatted_system_prompt = system_prompt.format(budget=thinking_budget)
|
76 |
except KeyError as e:
|
77 |
error_msg = f"System prompt missing placeholder: {str(e)}"
|
78 |
print(error_msg)
|
79 |
-
return error_msg
|
80 |
|
81 |
-
|
82 |
-
thinking_time = 0
|
83 |
|
84 |
-
|
85 |
-
|
86 |
-
full_response = response
|
87 |
-
thinking_time = elapsed_time
|
88 |
|
89 |
-
|
90 |
-
return full_response
|
91 |
-
|
92 |
-
answer, reflection, steps = parse_response(full_response)
|
93 |
|
94 |
formatted_response = f"**Answer:** {answer}\n\n**Reflection:** {reflection}\n\n**Thinking Steps:**\n"
|
95 |
for i, step in enumerate(steps, 1):
|
@@ -98,8 +92,7 @@ def process_chat(message, history, model, system_prompt, thinking_budget, api_ke
|
|
98 |
formatted_response += f"\n**Thinking time:** {thinking_time:.2f} s"
|
99 |
|
100 |
print(f"Appended response: {formatted_response}")
|
101 |
-
history
|
102 |
-
return formatted_response
|
103 |
|
104 |
# Define the default system prompt
|
105 |
default_system_prompt = """
|
@@ -169,42 +162,42 @@ with gr.Blocks() as demo:
|
|
169 |
system_prompt = gr.Textbox(
|
170 |
label="System Prompt",
|
171 |
value=default_system_prompt,
|
172 |
-
lines=
|
|
|
173 |
)
|
174 |
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
|
|
181 |
|
182 |
-
|
183 |
-
label="
|
184 |
-
lines=20,
|
185 |
-
interactive=False
|
186 |
)
|
187 |
|
188 |
-
# Initialize chat history
|
189 |
-
chat_history = []
|
190 |
|
191 |
def handle_submit(message, history, model, system_prompt, thinking_budget, api_key):
|
192 |
-
|
193 |
-
return
|
194 |
|
195 |
def handle_clear():
|
196 |
-
return ""
|
197 |
|
198 |
submit.click(
|
199 |
handle_submit,
|
200 |
-
inputs=[msg,
|
201 |
-
outputs=
|
202 |
)
|
203 |
|
204 |
clear.click(
|
205 |
-
|
206 |
inputs=None,
|
207 |
-
outputs=
|
208 |
)
|
209 |
|
210 |
demo.launch()
|
|
|
11 |
"Meta-Llama-3.1-8B-Instruct"
|
12 |
]
|
13 |
|
14 |
+
def create_client(api_key=None):
|
15 |
+
if api_key:
|
16 |
openai.api_key = api_key
|
17 |
openai.api_base = "https://api.sambanova.ai/v1" # Fixed Base URL
|
18 |
else:
|
|
|
48 |
response = completion.choices[0].message['content']
|
49 |
thinking_time = time.time() - start_time
|
50 |
print("Response received from OpenAI API.")
|
51 |
+
return response, thinking_time
|
52 |
except Exception as e:
|
53 |
error_message = f"Error: {str(e)}"
|
54 |
print(error_message)
|
55 |
+
return error_message, time.time() - start_time
|
56 |
|
57 |
def parse_response(response):
|
58 |
answer_match = re.search(r'<answer>(.*?)</answer>', response, re.DOTALL)
|
|
|
67 |
|
68 |
def process_chat(message, history, model, system_prompt, thinking_budget, api_key):
|
69 |
print(f"Received message: {message}")
|
70 |
+
if not api_key:
|
71 |
+
print("API key missing")
|
72 |
+
return history + [("System", "Please provide your API Key before starting the chat.")]
|
73 |
|
74 |
try:
|
75 |
formatted_system_prompt = system_prompt.format(budget=thinking_budget)
|
76 |
except KeyError as e:
|
77 |
error_msg = f"System prompt missing placeholder: {str(e)}"
|
78 |
print(error_msg)
|
79 |
+
return history + [("System", error_msg)]
|
80 |
|
81 |
+
response, thinking_time = respond(message, history, model, formatted_system_prompt, thinking_budget, api_key)
|
|
|
82 |
|
83 |
+
if response.startswith("Error:"):
|
84 |
+
return history + [("System", response)]
|
|
|
|
|
85 |
|
86 |
+
answer, reflection, steps = parse_response(response)
|
|
|
|
|
|
|
87 |
|
88 |
formatted_response = f"**Answer:** {answer}\n\n**Reflection:** {reflection}\n\n**Thinking Steps:**\n"
|
89 |
for i, step in enumerate(steps, 1):
|
|
|
92 |
formatted_response += f"\n**Thinking time:** {thinking_time:.2f} s"
|
93 |
|
94 |
print(f"Appended response: {formatted_response}")
|
95 |
+
return history + [(message, formatted_response)]
|
|
|
96 |
|
97 |
# Define the default system prompt
|
98 |
default_system_prompt = """
|
|
|
162 |
system_prompt = gr.Textbox(
|
163 |
label="System Prompt",
|
164 |
value=default_system_prompt,
|
165 |
+
lines=15,
|
166 |
+
interactive=True
|
167 |
)
|
168 |
|
169 |
+
with gr.Row():
|
170 |
+
msg = gr.Textbox(
|
171 |
+
label="Type your message here...",
|
172 |
+
placeholder="Enter your message..."
|
173 |
+
)
|
174 |
+
submit = gr.Button("Submit")
|
175 |
+
clear = gr.Button("Clear Chat")
|
176 |
|
177 |
+
chatbot = gr.Chatbot(
|
178 |
+
label="Chat History"
|
|
|
|
|
179 |
)
|
180 |
|
181 |
+
# Initialize chat history as a Gradio state
|
182 |
+
chat_history = gr.State([])
|
183 |
|
184 |
def handle_submit(message, history, model, system_prompt, thinking_budget, api_key):
|
185 |
+
updated_history = process_chat(message, history, model, system_prompt, thinking_budget, api_key)
|
186 |
+
return updated_history, ""
|
187 |
|
188 |
def handle_clear():
|
189 |
+
return [], ""
|
190 |
|
191 |
submit.click(
|
192 |
handle_submit,
|
193 |
+
inputs=[msg, chat_history, model, system_prompt, thinking_budget, api_key],
|
194 |
+
outputs=[chatbot, msg]
|
195 |
)
|
196 |
|
197 |
clear.click(
|
198 |
+
handle_clear,
|
199 |
inputs=None,
|
200 |
+
outputs=[chatbot, msg]
|
201 |
)
|
202 |
|
203 |
demo.launch()
|