Spaces:
wuhp
/
Running on Zero

wuhp commited on
Commit
683b0bb
·
verified ·
1 Parent(s): 3986b4b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -6
app.py CHANGED
@@ -218,8 +218,7 @@ def predict(
218
  do_sample=True
219
  )[0]["generated_text"]
220
 
221
- formatted_output += output.strip() + solution_prefix
222
- formatted_output += "Final Answer (This part is a placeholder and needs better extraction): ... "
223
 
224
  return formatted_output
225
 
@@ -252,8 +251,7 @@ def compare_models(
252
  thinking_prefix = f"**{model_name} - Thinking Process:**\n"
253
  solution_prefix = f"\n**{model_name} - Solution:**\n"
254
  formatted_output = thinking_prefix
255
- formatted_output += raw_output.strip() + solution_prefix
256
- formatted_output += f"{model_name} Final Answer: ... "
257
  return formatted_output
258
 
259
  local_out_raw = local_pipe(
@@ -413,8 +411,7 @@ def chat_rag(
413
  do_sample=True
414
  )[0]["generated_text"]
415
 
416
- formatted_output += output.strip() + solution_prefix
417
- formatted_output += "Final Answer (This part is a placeholder and needs better extraction): ... "
418
  assistant_reply = formatted_output
419
 
420
  if assistant_reply.startswith(prompt):
 
218
  do_sample=True
219
  )[0]["generated_text"]
220
 
221
+ formatted_output += output.strip()
 
222
 
223
  return formatted_output
224
 
 
251
  thinking_prefix = f"**{model_name} - Thinking Process:**\n"
252
  solution_prefix = f"\n**{model_name} - Solution:**\n"
253
  formatted_output = thinking_prefix
254
+ formatted_output += raw_output.strip()
 
255
  return formatted_output
256
 
257
  local_out_raw = local_pipe(
 
411
  do_sample=True
412
  )[0]["generated_text"]
413
 
414
+ formatted_output += output.strip()
 
415
  assistant_reply = formatted_output
416
 
417
  if assistant_reply.startswith(prompt):