Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -70,10 +70,8 @@ def get_conversation_chain(vectorstore, model_name):
|
|
70 |
memory=memory,
|
71 |
return_source_documents=True
|
72 |
)
|
73 |
-
|
74 |
-
result = conversation_chain
|
75 |
|
76 |
-
return
|
77 |
|
78 |
|
79 |
def handle_userinput(user_question):
|
@@ -81,6 +79,8 @@ def handle_userinput(user_question):
|
|
81 |
|
82 |
st.session_state.chat_history = response['chat_history']
|
83 |
|
|
|
|
|
84 |
for i, message in enumerate(st.session_state.chat_history):
|
85 |
if i % 2 == 0:
|
86 |
st.write(user_template.replace(
|
@@ -89,6 +89,14 @@ def handle_userinput(user_question):
|
|
89 |
st.write(bot_template.replace(
|
90 |
"{{MSG}}", message.content), unsafe_allow_html=True)
|
91 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
92 |
|
93 |
# main code
|
94 |
load_dotenv()
|
@@ -101,6 +109,8 @@ if "conversation" not in st.session_state:
|
|
101 |
st.session_state.conversation = None
|
102 |
if "chat_history" not in st.session_state:
|
103 |
st.session_state.chat_history = None
|
|
|
|
|
104 |
|
105 |
st.header("Chat with multiple PDFs :books:")
|
106 |
user_question = st.text_input("Ask a question about your documents:")
|
@@ -124,6 +134,6 @@ with st.sidebar:
|
|
124 |
vectorstore = get_vectorstore(text_chunks)
|
125 |
|
126 |
# create conversation chain
|
127 |
-
st.session_state.conversation
|
128 |
st.text_area(retrieved_docs)
|
129 |
|
|
|
70 |
memory=memory,
|
71 |
return_source_documents=True
|
72 |
)
|
|
|
|
|
73 |
|
74 |
+
return conversation_chain
|
75 |
|
76 |
|
77 |
def handle_userinput(user_question):
|
|
|
79 |
|
80 |
st.session_state.chat_history = response['chat_history']
|
81 |
|
82 |
+
st.session_state.retrieved_text = response['source_documents'][0]
|
83 |
+
|
84 |
for i, message in enumerate(st.session_state.chat_history):
|
85 |
if i % 2 == 0:
|
86 |
st.write(user_template.replace(
|
|
|
89 |
st.write(bot_template.replace(
|
90 |
"{{MSG}}", message.content), unsafe_allow_html=True)
|
91 |
|
92 |
+
for i, message in enumerate(st.session_state.retrieved_text):
|
93 |
+
if i % 2 == 0:
|
94 |
+
st.write(user_template.replace(
|
95 |
+
"{{MSG}}", message.content), unsafe_allow_html=True)
|
96 |
+
else:
|
97 |
+
st.write(bot_template.replace(
|
98 |
+
"{{MSG}}", message.content), unsafe_allow_html=True)
|
99 |
+
|
100 |
|
101 |
# main code
|
102 |
load_dotenv()
|
|
|
109 |
st.session_state.conversation = None
|
110 |
if "chat_history" not in st.session_state:
|
111 |
st.session_state.chat_history = None
|
112 |
+
if "retrieved_text" not in st.session_state:
|
113 |
+
st.session_state.retrieved_text = None
|
114 |
|
115 |
st.header("Chat with multiple PDFs :books:")
|
116 |
user_question = st.text_input("Ask a question about your documents:")
|
|
|
134 |
vectorstore = get_vectorstore(text_chunks)
|
135 |
|
136 |
# create conversation chain
|
137 |
+
st.session_state.conversation = get_conversation_chain(vectorstore, model_name)
|
138 |
st.text_area(retrieved_docs)
|
139 |
|