Spaces:
Running
Running
from omegaconf import OmegaConf | |
from query import VectaraQuery | |
import streamlit as st | |
import os | |
topics = { | |
'Standardized testing in education': '266', | |
'Ethical implications of genetic editing': '267', | |
'Social media and mental health': '268' | |
} | |
def launch_bot(): | |
def generate_response(question, cfg): | |
response = vq.submit_query(question, cfg.bot_role, cfg.topic, cfg.style) | |
return response | |
def reset(): | |
st.session_state.messages = [{"role": "assistant", "content": "Please make your opening statement.", "avatar": 'π¦'}] | |
if 'cfg' not in st.session_state: | |
cfg = OmegaConf.create({ | |
'customer_id': str(os.environ['VECTARA_CUSTOMER_ID']), | |
'corpus_id': list(topics.values())[0], | |
'api_key': str(os.environ['VECTARA_API_KEY']), | |
'prompt_name': 'vectara-experimental-summary-ext-2023-12-11-large', | |
}) | |
st.session_state.cfg = cfg | |
st.session_state.vq = VectaraQuery(cfg.api_key, cfg.customer_id, cfg.corpus_id, cfg.prompt_name) | |
st.session_state.current_human_role = None | |
st.session_state.current_topic = None | |
st.session_state.style = None | |
cfg = st.session_state.cfg | |
vq = st.session_state.vq | |
st.set_page_config(page_title="Debate Bot", layout="wide") | |
# left side content | |
with st.sidebar: | |
st.markdown(f"## Welcome to Debate Bot.\n\n\n") | |
role_options = ['in opposition to', 'in support of'] | |
cfg.human_role = st.selectbox('Your are:', role_options) | |
cfg.bot_role = role_options[1] if cfg.human_role == role_options[0] else role_options[0] | |
if st.session_state.current_human_role != cfg.human_role: | |
st.session_state.current_human_role = cfg.human_role | |
reset() | |
topic_options = list(topics.keys()) | |
cfg.topic = st.selectbox('The topic:', topic_options) | |
vq.corpus_id = topics[cfg.topic] | |
if st.session_state.current_topic != cfg.topic: | |
st.session_state.current_topic = cfg.topic | |
reset() | |
st.markdown("\n") | |
debate_styles = ['Lincoln-Douglas', 'Spontaneous Argumentation', 'Parliamentary debates'] | |
cfg.style = st.selectbox('Debate Style:', debate_styles) | |
if st.session_state.style != cfg.style: | |
st.session_state.style = cfg.style | |
reset() | |
st.markdown("\n\n") | |
if st.button('Start Over'): | |
reset() | |
st.markdown("---") | |
st.markdown( | |
"## How this works?\n" | |
"This app was built with [Vectara](https://vectara.com).\n\n" | |
"It demonstrates the use of the Chat functionality along with custom prompts and GPT4-Turbo (as part of our [Scale plan](https://vectara.com/pricing/))" | |
) | |
st.markdown("---") | |
if "messages" not in st.session_state.keys(): | |
reset() | |
# Display chat messages | |
for message in st.session_state.messages: | |
with st.chat_message(message["role"], avatar=message["avatar"]): | |
st.write(message["content"]) | |
# User-provided prompt | |
if prompt := st.chat_input(): | |
st.session_state.messages.append({"role": "user", "content": prompt, "avatar": 'π§βπ»'}) | |
with st.chat_message("user", avatar='π§βπ»'): | |
st.write(prompt) | |
# Generate a new response if last message is not from assistant | |
if st.session_state.messages[-1]["role"] != "assistant": | |
with st.chat_message("assistant", avatar='π€'): | |
stream = generate_response(prompt, cfg) | |
response = st.write_stream(stream) | |
message = {"role": "assistant", "content": response, "avatar": 'π€'} | |
st.session_state.messages.append(message) | |
if __name__ == "__main__": | |
launch_bot() | |