Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -5,6 +5,8 @@ import uuid
|
|
5 |
import concurrent.futures
|
6 |
from requests.exceptions import ChunkedEncodingError
|
7 |
import os
|
|
|
|
|
8 |
|
9 |
# Define the endpoints
|
10 |
host = os.getenv("BACKEND_URL")
|
@@ -92,21 +94,23 @@ tarot_cards = [
|
|
92 |
"Queen of Wands",
|
93 |
"King of Wands",
|
94 |
]
|
|
|
95 |
# Define the request payload structure
|
96 |
class ChatRequest:
|
97 |
-
def __init__(self, session_id, messages, model_id, temperature, seer_name):
|
98 |
self.session_id = session_id
|
99 |
self.messages = messages
|
100 |
self.model_id = model_id
|
101 |
self.temperature = temperature
|
102 |
self.seer_name = seer_name
|
|
|
103 |
|
104 |
class ChatRequestWithMemory(ChatRequest):
|
105 |
-
def __init__(self, session_id, messages, model_id, temperature, seer_name, summary_threshold):
|
106 |
-
super().__init__(session_id, messages, model_id, temperature, seer_name)
|
107 |
self.summary_threshold = summary_threshold
|
108 |
|
109 |
-
def compare_chatbots(session_id, messages, model_id, temperature, seer_name, summary_threshold, tarot_card):
|
110 |
# Convert messages list to a single string
|
111 |
# Prepare the payloads
|
112 |
print("tarot_card", tarot_card)
|
@@ -117,6 +121,7 @@ def compare_chatbots(session_id, messages, model_id, temperature, seer_name, sum
|
|
117 |
"temperature": temperature,
|
118 |
"tarot_card": tarot_card,
|
119 |
"seer_name": seer_name,
|
|
|
120 |
})
|
121 |
payload_memory = json.dumps({
|
122 |
"session_id": session_id + "_memory",
|
@@ -124,6 +129,7 @@ def compare_chatbots(session_id, messages, model_id, temperature, seer_name, sum
|
|
124 |
"model_id": model_id,
|
125 |
"temperature": temperature,
|
126 |
"seer_name": seer_name,
|
|
|
127 |
"tarot_card": tarot_card,
|
128 |
"summary_threshold": summary_threshold,
|
129 |
})
|
@@ -154,8 +160,8 @@ def compare_chatbots(session_id, messages, model_id, temperature, seer_name, sum
|
|
154 |
return response_default_text, response_memory_text
|
155 |
|
156 |
# Function to handle chat interaction
|
157 |
-
def chat_interaction(session_id, message, model_id, temperature, seer_name, summary_threshold, chat_history_default, chat_history_memory, tarot_card):
|
158 |
-
response_default, response_memory = compare_chatbots(session_id, message, model_id, temperature, seer_name, summary_threshold, tarot_card)
|
159 |
|
160 |
chat_history_default.append((message, response_default))
|
161 |
chat_history_memory.append((message, response_memory))
|
@@ -214,8 +220,9 @@ with gr.Blocks() as demo:
|
|
214 |
model_id = gr.Dropdown(label="Model ID", choices=model_id_choices, value=model_id_choices[0])
|
215 |
temperature = gr.Slider(0, 1, step=0.1, label="Temperature", value=0.5)
|
216 |
seer_name = gr.Textbox(label="Seer Name", value="แม่หมอแพตตี้")
|
|
|
217 |
tarot_card = gr.Dropdown(label="Tarot Card", value=[], choices=tarot_cards, multiselect=True)
|
218 |
-
summary_threshold = gr.Number(label="Summary Threshold", value=
|
219 |
|
220 |
with gr.Accordion("View History of Memory Chatbot", open=False):
|
221 |
session_id_memory = gr.Textbox(label="Session ID", value=f"{session_id_default}_memory")
|
@@ -223,18 +230,18 @@ with gr.Blocks() as demo:
|
|
223 |
chat_history_json = gr.JSON(label="Chat History") # New JSON field
|
224 |
|
225 |
submit_button.click(
|
226 |
-
lambda session_id, message, model_id, temperature, seer_name, summary_threshold, chatbot_default, chatbot_memory, tarot_card: chat_interaction(
|
227 |
-
session_id, message, model_id, temperature, seer_name, summary_threshold, chatbot_default, chatbot_memory, tarot_card
|
228 |
),
|
229 |
-
inputs=[session_id, message, model_id, temperature, seer_name, summary_threshold, chatbot_default, chatbot_memory, tarot_card],
|
230 |
outputs=[message, chatbot_default, chatbot_memory, tarot_card]
|
231 |
)
|
232 |
|
233 |
message.submit(
|
234 |
-
lambda session_id, message, model_id, temperature, seer_name, summary_threshold, chatbot_default, chatbot_memory, tarot_card: chat_interaction(
|
235 |
-
session_id, message, model_id, temperature, seer_name, summary_threshold, chatbot_default, chatbot_memory, tarot_card
|
236 |
),
|
237 |
-
inputs=[session_id, message, model_id, temperature, seer_name, summary_threshold, chatbot_default, chatbot_memory, tarot_card],
|
238 |
outputs=[message, chatbot_default, chatbot_memory, tarot_card]
|
239 |
)
|
240 |
|
|
|
5 |
import concurrent.futures
|
6 |
from requests.exceptions import ChunkedEncodingError
|
7 |
import os
|
8 |
+
from dotenv import load_dotenv
|
9 |
+
load_dotenv()
|
10 |
|
11 |
# Define the endpoints
|
12 |
host = os.getenv("BACKEND_URL")
|
|
|
94 |
"Queen of Wands",
|
95 |
"King of Wands",
|
96 |
]
|
97 |
+
|
98 |
# Define the request payload structure
|
99 |
class ChatRequest:
|
100 |
+
def __init__(self, session_id, messages, model_id, temperature, seer_name, seer_personality):
|
101 |
self.session_id = session_id
|
102 |
self.messages = messages
|
103 |
self.model_id = model_id
|
104 |
self.temperature = temperature
|
105 |
self.seer_name = seer_name
|
106 |
+
self.seer_personality = seer_personality
|
107 |
|
108 |
class ChatRequestWithMemory(ChatRequest):
|
109 |
+
def __init__(self, session_id, messages, model_id, temperature, seer_name, seer_personality, summary_threshold):
|
110 |
+
super().__init__(session_id, messages, model_id, temperature, seer_name, seer_personality)
|
111 |
self.summary_threshold = summary_threshold
|
112 |
|
113 |
+
def compare_chatbots(session_id, messages, model_id, temperature, seer_name, seer_personality, summary_threshold, tarot_card):
|
114 |
# Convert messages list to a single string
|
115 |
# Prepare the payloads
|
116 |
print("tarot_card", tarot_card)
|
|
|
121 |
"temperature": temperature,
|
122 |
"tarot_card": tarot_card,
|
123 |
"seer_name": seer_name,
|
124 |
+
"seer_personality": seer_personality,
|
125 |
})
|
126 |
payload_memory = json.dumps({
|
127 |
"session_id": session_id + "_memory",
|
|
|
129 |
"model_id": model_id,
|
130 |
"temperature": temperature,
|
131 |
"seer_name": seer_name,
|
132 |
+
"seer_personality": seer_personality,
|
133 |
"tarot_card": tarot_card,
|
134 |
"summary_threshold": summary_threshold,
|
135 |
})
|
|
|
160 |
return response_default_text, response_memory_text
|
161 |
|
162 |
# Function to handle chat interaction
|
163 |
+
def chat_interaction(session_id, message, model_id, temperature, seer_name, seer_personality, summary_threshold, chat_history_default, chat_history_memory, tarot_card):
|
164 |
+
response_default, response_memory = compare_chatbots(session_id, message, model_id, temperature, seer_name, seer_personality, summary_threshold, tarot_card)
|
165 |
|
166 |
chat_history_default.append((message, response_default))
|
167 |
chat_history_memory.append((message, response_memory))
|
|
|
220 |
model_id = gr.Dropdown(label="Model ID", choices=model_id_choices, value=model_id_choices[0])
|
221 |
temperature = gr.Slider(0, 1, step=0.1, label="Temperature", value=0.5)
|
222 |
seer_name = gr.Textbox(label="Seer Name", value="แม่หมอแพตตี้")
|
223 |
+
seer_personality = gr.Textbox(label="Seer Personality", value="You are a friend who is always ready to help.")
|
224 |
tarot_card = gr.Dropdown(label="Tarot Card", value=[], choices=tarot_cards, multiselect=True)
|
225 |
+
summary_threshold = gr.Number(label="Summary Threshold", value=7)
|
226 |
|
227 |
with gr.Accordion("View History of Memory Chatbot", open=False):
|
228 |
session_id_memory = gr.Textbox(label="Session ID", value=f"{session_id_default}_memory")
|
|
|
230 |
chat_history_json = gr.JSON(label="Chat History") # New JSON field
|
231 |
|
232 |
submit_button.click(
|
233 |
+
lambda session_id, message, model_id, temperature, seer_name, seer_personality, summary_threshold, chatbot_default, chatbot_memory, tarot_card: chat_interaction(
|
234 |
+
session_id, message, model_id, temperature, seer_name, seer_personality, summary_threshold, chatbot_default, chatbot_memory, tarot_card
|
235 |
),
|
236 |
+
inputs=[session_id, message, model_id, temperature, seer_name, seer_personality, summary_threshold, chatbot_default, chatbot_memory, tarot_card],
|
237 |
outputs=[message, chatbot_default, chatbot_memory, tarot_card]
|
238 |
)
|
239 |
|
240 |
message.submit(
|
241 |
+
lambda session_id, message, model_id, temperature, seer_name, seer_personality, summary_threshold, chatbot_default, chatbot_memory, tarot_card: chat_interaction(
|
242 |
+
session_id, message, model_id, temperature, seer_name, seer_personality, summary_threshold, chatbot_default, chatbot_memory, tarot_card
|
243 |
),
|
244 |
+
inputs=[session_id, message, model_id, temperature, seer_name, seer_personality, summary_threshold, chatbot_default, chatbot_memory, tarot_card],
|
245 |
outputs=[message, chatbot_default, chatbot_memory, tarot_card]
|
246 |
)
|
247 |
|