Spaces:
Runtime error
Runtime error
File size: 7,916 Bytes
2a67e1c f86e7a9 a626dc8 23536d8 2a67e1c 12f769d a626dc8 12f769d a626dc8 a891048 12f769d a891048 12f769d a891048 a626dc8 12f769d a626dc8 a891048 a626dc8 2a67e1c 23536d8 f86e7a9 a626dc8 f86e7a9 cf2a5aa f86e7a9 12f769d fdea788 f86e7a9 a626dc8 f86e7a9 a626dc8 f86e7a9 a626dc8 f86e7a9 a626dc8 12f769d fdea788 23536d8 fdea788 23536d8 fdea788 a891048 a626dc8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 |
import streamlit as st
import os
from openai import AzureOpenAI
from functions import call_function
import firebase_admin
from firebase_admin import credentials, firestore
st.title("SupportFlow Demo")
# when will my order be delivered?, [email protected] W123123
functions = [
{
"name": "lookup_order_status",
"description": "Retrieves the status, location, etc. of an order based on **both** the email address and order number.",
"parameters": {
"type": "object",
"properties": {
"email_address": {
"type": "string",
"description": "The email address associated with the order"
},
"order_number": {
"type": "integer",
"description": "The order number."
},
},
"required": ["email_address", "order_number"]
}
},
# {
# "name": "lookup_product",
# "description": "Returns a detailed list of products based on a product query.",
# "parameters": {
# "type": "object",
# "properties": {
# "query": {
# "type": "string",
# "description": "Product query to search for like drills, lights, or hammers"
# },
# },
# "required": ["query"]
# }
# },
# {
# "name": "get_product_listing",
# "description": "Returns information about the product based on the SKU.",
# "parameters": {
# "type": "object",
# "properties": {
# "sku": {
# "type": "integer",
# "description": "Product sku to search for like 123123"
# },
# },
# "required": ["sku"]
# }
# },
{
"name": "refer_to_human_agent",
"description": "Use this to refer the customer's question to a human agent. You should only call this "
"function if there is no way for you to answer their question.",
"parameters": {
"type": "object",
"properties": {
"conversation_summary": {
"type": "string",
"description": "A short summary of the current conversation so the human agent can quickly get up "
"to speed. Make sure you include all relevant details."
},
},
"required": ["conversation_summary"]
}
}
]
cred = credentials.Certificate("supportflow-4851d-firebase-adminsdk-cdrzu-bf620a4b52.json")
try:
app = firebase_admin.initialize_app(cred)
except Exception:
pass
db = firestore.client()
client = AzureOpenAI(
api_key=os.environ['OPENAI_API_KEY'],
api_version="2023-07-01-preview",
azure_endpoint=os.environ['AZURE_ENDPOINT'],
)
if "openai_model" not in st.session_state:
st.session_state["openai_model"] = "gpt-35-turbo"
if "messages" not in st.session_state:
st.session_state.messages = [{"role": "system", "content": "You are a helpful customer support agent for The Home "
"Depot. Your goal is to answer as many questions as "
"possible without escalating to a human agent. "
"However, if necessary, you can refer the customer to "
"a human agent if you do not know the answer to their "
"question. For example, you can help users track their orders, but you **cannot** help with returns."},]
for message in st.session_state.messages:
if message["role"] == "assistant" or message["role"] == "user":
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("How can we help you today?"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant", avatar="🏠"): # avatar=st.image('Home-Depot-Logo.png', width=50)):
message_placeholder = st.empty()
full_message = ""
func_call = {
"name": None,
"arguments": "",
}
for response in client.chat.completions.create(
model=st.session_state["openai_model"],
messages=[
{"role": m["role"], "content": m["content"], "name": m["name"]} if "name" in m else
{"role": m["role"], "content": m["content"]}
for m in st.session_state.messages
],
functions=functions,
function_call="auto",
stream=True,
):
if len(response.choices) > 0:
delta = response.choices[0].delta
full_message += (delta.content or "")
if delta.function_call is not None:
if delta.function_call.name is not None:
func_call["name"] = delta.function_call.name
if delta.function_call.arguments is not None:
func_call["arguments"] += delta.function_call.arguments
message_placeholder.markdown(full_message + "")
if func_call["name"] is not None and func_call["arguments"] != "":
print(f"Function generation requested, calling function")
function_response = call_function(st.session_state.messages, func_call)
print("function response")
print(function_response)
st.session_state.messages.append(function_response)
if function_response["name"] is not None and function_response["name"] == "refer_to_human_agent":
print("connect to human agent")
print(function_response["name"])
st.info('You will be connected with an agent shortly', icon="ℹ️")
# Get the document to update
doc_ref = db.collection('handoffs').document('conversation')
# Update the document
doc_ref.update({'summary': str(function_response["content"]), 'message_history': st.session_state.messages})
else:
message_placeholder = st.empty()
full_message = ""
for response in client.chat.completions.create(
model=st.session_state["openai_model"],
messages=[
{"role": m["role"], "content": m["content"], "name": m["name"]} if "name" in m else
{"role": m["role"], "content": m["content"]}
for m in st.session_state.messages
],
functions=functions,
function_call="auto",
stream=True,
):
if len(response.choices) > 0:
delta = response.choices[0].delta
full_message += (delta.content or "")
if delta.function_call is not None:
if delta.function_call.name is not None:
func_call["name"] = delta.function_call.name
if delta.function_call.arguments is not None:
func_call["arguments"] += delta.function_call.arguments
message_placeholder.markdown(full_message + "")
message_placeholder.markdown(full_message)
st.session_state.messages.append({"role": "assistant", "content": full_message})
|