thoristhor commited on
Commit
22ec340
β€’
1 Parent(s): 1bafea9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +93 -30
app.py CHANGED
@@ -2,9 +2,7 @@ import os
2
  import gradio as gr
3
  import pinecone
4
  from gpt_index import GPTIndexMemory, GPTPineconeIndex
5
- import streamlit as st
6
 
7
- from streamlit_chat import message as st_message
8
  from langchain.agents import Tool
9
  from langchain.chains.conversation.memory import ConversationBufferMemory
10
  from langchain import OpenAI
@@ -15,31 +13,96 @@ PINECONE_API_KEY=os.environ.get("PINECONE_API_KEY")
15
  PINECONE_ENV=os.environ.get("PINECONE_ENV")
16
 
17
  pointing_pinecone_index=pinecone.Index("sethgodin")
18
- main_index = GPTPineconeIndex([],pinecone_index=pointing_pinecone_index)
19
- tools = [
20
- Tool(
21
- name = "GPT Index",
22
- func=lambda q: str(main_index.query(q)),
23
- description="useful for when you want to answer questions about the author. The input to this tool should be a complete english sentence.",
24
- return_direct=True
25
- ),
26
- ]
27
-
28
- memory = GPTIndexMemory(index=main_index, memory_key="chat_history", query_kwargs={"response_mode": "compact"})
29
- llm=OpenAI(temperature=0)
30
- agent_chain = initialize_agent([], llm, agent="conversational-react-description", memory=memory)
31
-
32
- def generate_answer():
33
- user_message=st.session_state.input_text
34
- message_bot = agent_chain.run(input=user_message)
35
- st.session_state.history.append({"message": user_message, "is_user": True})
36
- st.session_state.history.append({"message": message_bot, "is_user": False})
37
-
38
- st.text_input("Response", key="input_text", on_change=generate_answer)
39
-
40
- for chat in st.session_state.history:
41
- st_message(**chat)
42
- st.text_input("Response", key="input_text", on_change=generate_answer)
43
-
44
- for chat in st.session_state.history:
45
- st_message(**chat)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  import gradio as gr
3
  import pinecone
4
  from gpt_index import GPTIndexMemory, GPTPineconeIndex
 
5
 
 
6
  from langchain.agents import Tool
7
  from langchain.chains.conversation.memory import ConversationBufferMemory
8
  from langchain import OpenAI
 
13
  PINECONE_ENV=os.environ.get("PINECONE_ENV")
14
 
15
  pointing_pinecone_index=pinecone.Index("sethgodin")
16
+ vectorstore=GPTPineconeIndex([],pinecone_index=pointing_pinecone_index)
17
+
18
+ def set_openai_api_key(api_key: str):
19
+ """Set the api key and return chain.
20
+ If no api_key, then None is returned.
21
+ """
22
+ if api_key:
23
+ os.environ["OPENAI_API_KEY"] = api_key
24
+ chain = get_chain(vectorstore)
25
+ os.environ["OPENAI_API_KEY"] = ""
26
+ return chain
27
+
28
+
29
+ class ChatWrapper:
30
+
31
+ def __init__(self):
32
+ self.lock = Lock()
33
+ def __call__(
34
+ self, api_key: str, inp: str, history: Optional[Tuple[str, str]], chain
35
+ ):
36
+ """Execute the chat functionality."""
37
+ self.lock.acquire()
38
+ try:
39
+ history = history or []
40
+ # If chain is None, that is because no API key was provided.
41
+ if chain is None:
42
+ history.append((inp, "Please paste your OpenAI key to use"))
43
+ return history, history
44
+ # Set OpenAI key
45
+ import openai
46
+ openai.api_key = api_key
47
+ # Run chain and append input.
48
+ output = chain({"question": inp, "chat_history": history})["answer"]
49
+ history.append((inp, output))
50
+ except Exception as e:
51
+ raise e
52
+ finally:
53
+ self.lock.release()
54
+ return history, history
55
+
56
+ chat = ChatWrapper()
57
+
58
+ block = gr.Blocks(css=".gradio-container {background-color: lightgray}")
59
+
60
+ with block:
61
+ with gr.Row():
62
+ gr.Markdown("<h3><center>Chat-Your-Data (State-of-the-Union)</center></h3>")
63
+
64
+ openai_api_key_textbox = gr.Textbox(
65
+ placeholder="Paste your OpenAI API key (sk-...)",
66
+ show_label=False,
67
+ lines=1,
68
+ type="password",
69
+ )
70
+
71
+ chatbot = gr.Chatbot()
72
+
73
+ with gr.Row():
74
+ message = gr.Textbox(
75
+ label="What's your question?",
76
+ placeholder="Ask questions about the most recent state of the union",
77
+ lines=1,
78
+ )
79
+ submit = gr.Button(value="Send", variant="secondary").style(full_width=False)
80
+
81
+ gr.Examples(
82
+ examples=[
83
+ "What did the president say about Kentaji Brown Jackson",
84
+ "Did he mention Stephen Breyer?",
85
+ "What was his stance on Ukraine",
86
+ ],
87
+ inputs=message,
88
+ )
89
+
90
+ gr.HTML("Demo application of a LangChain chain.")
91
+
92
+ gr.HTML(
93
+ "<center>Powered by <a href='https://github.com/hwchase17/langchain'>LangChain πŸ¦œοΈπŸ”—</a></center>"
94
+ )
95
+
96
+ state = gr.State()
97
+ agent_state = gr.State()
98
+
99
+ submit.click(chat, inputs=[openai_api_key_textbox, message, state, agent_state], outputs=[chatbot, state])
100
+ message.submit(chat, inputs=[openai_api_key_textbox, message, state, agent_state], outputs=[chatbot, state])
101
+
102
+ openai_api_key_textbox.change(
103
+ set_openai_api_key,
104
+ inputs=[openai_api_key_textbox],
105
+ outputs=[agent_state],
106
+ )
107
+
108
+ block.launch(debug=True)