File size: 2,218 Bytes
7818956 0e7cc61 143727a 7818956 e59ee58 178adb8 4d74cc9 5ac43ac 1f076f8 450f5f1 88f52d0 eaac40d 0e7cc61 bbd69b8 eaac40d 143727a eaac40d 143727a cb6604d eaac40d 35f80be e05958b 8b3496c 5e4b488 cf57fc5 88f52d0 8b3496c e59ee58 57c83ef 35f80be 0e7cc61 88adf39 9871c29 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
import streamlit as st
import uuid
import json
from components import PersonalIndexClient, LLMClient, WebSearcher
st.title("Talk with Mingzhe")
llm_client = LLMClient(api_key=st.secrets["OPENAI_API_KEY"], model_name="gpt-4-turbo-preview")
pinecone_client = PersonalIndexClient(index_token=st.secrets["PINECONE_API_KEY"], embedding_token=st.secrets["OPENAI_API_KEY"], embedding_model_name='text-embedding-3-large', index_name='mingzhe')
web_searcher = WebSearcher(you_api_key=st.secrets["YOU_API_KEY"], bing_api_key=st.secrets["BING_API_KEY"])
if "messages" not in st.session_state:
st.session_state.messages = []
st.session_state['sid'] = uuid.uuid1().hex
st.session_state['user'] = 'default'
# Display History
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# New Click
if prompt := st.chat_input("What's up?"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
print(f"Prompt: {prompt}")
memory = pinecone_client.query_conversation(messages=st.session_state.messages[:-5], user=st.session_state['user'], top_k=3)
print(f"Memory: {memory}")
web_query = llm_client.web_query_generator(query=prompt, history=st.session_state.messages)
print(f"Web Query: {web_query}")
web_result = web_searcher.query_bing(query=web_query) if web_query != 'None' else "None"
# web_result = web_searcher.query_web_llm(query=prompt, num_web_results=10) if web_query != 'None' else "None"
print(f"Web Result: {web_result}")
stream = llm_client.response_generate(prompt, st.session_state.messages, memory, web_result)
response = st.write_stream(stream)
print(f"Response: {response}")
st.session_state.messages.append({"role": "assistant", "content": response})
# Update seesion in Pinecone
pinecone_client.update_conversation(sid=st.session_state['sid'], messages=st.session_state.messages, user=st.session_state['user'])
print("\n===========================\n") |