ppsingh commited on
Commit
659f3df
·
verified ·
1 Parent(s): d9b8276

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -217,7 +217,8 @@ async def chat(query,history,sources,reports,subtype, client_ip=None, session_id
217
  answer_yet += token
218
  parsed_answer = parse_output_llm_with_sources(answer_yet)
219
  history[-1] = (query, parsed_answer)
220
- yield [tuple(x) for x in history], docs_html
 
221
 
222
  # Stream the response updates
223
  async for update in process_stream():
@@ -256,7 +257,7 @@ async def chat(query,history,sources,reports,subtype, client_ip=None, session_id
256
 
257
  response = chat_model.chat_completion(
258
  messages=formatted_messages,
259
- max_tokens=int(model_config.get('reader', 'MAX_TOKENS'))
260
  )
261
 
262
  response_text = response.choices[0].message.content
@@ -278,9 +279,9 @@ async def chat(query,history,sources,reports,subtype, client_ip=None, session_id
278
 
279
  # logging the event
280
  try:
281
- save_logs(scheduler,JSON_DATASET_PATH,logs)
282
  except Exception as e:
283
- logging.error(e)
284
 
285
 
286
 
 
217
  answer_yet += token
218
  parsed_answer = parse_output_llm_with_sources(answer_yet)
219
  history[-1] = (query, parsed_answer)
220
+ logs_data["answer"] = parsed_answer
221
+ yield [tuple(x) for x in history], docs_html, logs_data, session_id
222
 
223
  # Stream the response updates
224
  async for update in process_stream():
 
257
 
258
  response = chat_model.chat_completion(
259
  messages=formatted_messages,
260
+ max_tokens= int(model_config.get('reader', 'MAX_TOKENS'))
261
  )
262
 
263
  response_text = response.choices[0].message.content
 
279
 
280
  # logging the event
281
  try:
282
+ save_logs(scheduler,JSON_DATASET_PATH,logs_data)
283
  except Exception as e:
284
+ raise
285
 
286
 
287