paloma99 commited on
Commit
e08fbf7
·
verified ·
1 Parent(s): 5929494

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -3
app.py CHANGED
@@ -124,10 +124,25 @@ qa_chain = ConversationalRetrievalChain.from_llm(
124
  output_key = 'answer'
125
  )
126
 
127
- def chat_interface(question,history):
128
-
129
  result = qa_chain.invoke({"question": question})
130
- return result['answer'] # If the result is a string, return it directly
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
 
132
  chatbot_gradio_app = gr.ChatInterface(
133
  fn=chat_interface,
 
124
  output_key = 'answer'
125
  )
126
 
127
+ def chat_interface(question, history):
128
+ # Invoke the QA chain to get the latest answer
129
  result = qa_chain.invoke({"question": question})
130
+
131
+ # Access the history stored in the memory
132
+ all_messages = memory.get_all_messages()
133
+
134
+ # Filter out all but the latest question and answer
135
+ latest_question = None
136
+ latest_answer = None
137
+ for message in all_messages[::-1]:
138
+ if message['output_key'] == 'answer':
139
+ latest_answer = message['output']
140
+ elif message['input_key'] == 'question' and latest_question is None:
141
+ latest_question = message['input']
142
+ if latest_question is not None and latest_answer is not None:
143
+ break
144
+
145
+ return latest_answer # Return the answer to the latest question
146
 
147
  chatbot_gradio_app = gr.ChatInterface(
148
  fn=chat_interface,