import os import gradio as gr import pinecone from gpt_index import GPTIndexMemory, GPTPineconeIndex import streamlit as st from streamlit_chat import message as st_message from langchain.agents import Tool from langchain.chains.conversation.memory import ConversationBufferMemory from langchain import OpenAI from langchain.agents import initialize_agent OPENAI_API_KEY=os.environ.get("OPENAI_API_KEY") PINECONE_API_KEY=os.environ.get("PINECONE_API_KEY") PINECONE_ENV=os.environ.get("PINECONE_ENV") pointing_pinecone_index=pinecone.Index("sethgodin") main_index = GPTPineconeIndex([],pinecone_index=pointing_pinecone_index) tools = [ Tool( name = "GPT Index", func=lambda q: str(main_index.query(q)), description="useful for when you want to answer questions about the author. The input to this tool should be a complete english sentence.", return_direct=True ), ] memory = GPTIndexMemory(index=main_index, memory_key="chat_history", query_kwargs={"response_mode": "compact"}) llm=OpenAI(temperature=0) agent_chain = initialize_agent([], llm, agent="conversational-react-description", memory=memory) if "history" not in st.session_state: st.session_state.history = [] def generate_answer(): user_message=st.session_state.input_text message_bot = agent_chain.run(input=user_message) st.session_state.history.append({"message": user_message, "is_user": True}) st.session_state.history.append({"message": message_bot, "is_user": False}) st.text_input("Response", key="input_text", on_change=generate_answer) for chat in st.session_state.history: st_message(**chat) st.text_input("Response", key="input_text", on_change=generate_answer) for chat in st.session_state.history: st_message(**chat)