paloma99 commited on
Commit
3b4356d
1 Parent(s): 457b66b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -20
app.py CHANGED
@@ -124,27 +124,11 @@ qa_chain = ConversationalRetrievalChain.from_llm(
124
  output_key = 'answer'
125
  )
126
 
127
- def chat_interface(question, history):
128
- # Invoke the QA chain to get the result for the latest question
129
  result = qa_chain.invoke({"question": question})
130
-
131
- # Access the history stored in the memory
132
- all_messages = memory.get_all_messages()
133
-
134
- # Filter out all but the latest question and answer
135
- latest_question = None
136
- latest_answer = None
137
- for message in all_messages[::-1]:
138
- if message['output_key'] == 'answer':
139
- latest_answer = message['output']
140
- elif message['input_key'] == 'question':
141
- latest_question = message['input']
142
- # If we have found the latest question and its corresponding answer,
143
- # break out of the loop to avoid unnecessary iterations
144
- break
145
-
146
- # Return the latest answer if available, otherwise return an empty string
147
- return latest_answer
148
 
149
  chatbot_gradio_app = gr.ChatInterface(
150
  fn=chat_interface,
 
124
  output_key = 'answer'
125
  )
126
 
127
+ def chat_interface(question,history):
128
+
129
  result = qa_chain.invoke({"question": question})
130
+ return result['answer'] # If the result is a string, return it directly
131
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
132
 
133
  chatbot_gradio_app = gr.ChatInterface(
134
  fn=chat_interface,