thoristhor's picture
Update app.py
9acfdb2
raw
history blame
1.51 kB
import os
import gradio as gr
import pinecone
from gpt_index import GPTIndexMemory, GPTPineconeIndex
from langchain.agents import Tool
from langchain.chains.conversation.memory import ConversationBufferMemory
from langchain import OpenAI
from langchain.agents import initialize_agent
OPENAI_API_KEY=os.environ.get("OPENAI_API_KEY")
PINECONE_API_KEY=os.environ.get("PINECONE_API_KEY")
PINECONE_ENV=os.environ.get("PINECONE_ENV")
pindex=pinecone.Index("sethgodin")
pinedex=GPTPineconeIndex([], pinecone_index=pindex)
tools = [
Tool(
name = "GPT Index",
func=lambda q: str(pinedex.query(q)),
description="useful for when you want to answer questions about the author. The input to this tool should be a complete english sentence.",
return_direct=True
),
]
memory = ConversationBufferMemory(memory_key="chat_history")
llm=OpenAI(temperature=0)
agent_chain = initialize_agent(tools, llm, agent="conversational-react-description", memory=memory)
def predict(input, history=[]):
# generate a response
history = agent_chain.run(input=input)
response = [(response[i], response[i+1]) for i in range(0, len(response)-1, 2)] # convert to tuples of list
return response, history
with gr.Blocks() as demo:
chatbot = gr.Chatbot()
state = gr.State([])
with gr.Row():
txt = gr.Textbox(show_label=False, placeholder="Enter text and press enter").style(container=False)
txt.submit(predict, [txt, state], [chatbot, state])
demo.launch()