lixin4ever commited on
Commit
c907a7a
1 Parent(s): 09793d9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -1633,7 +1633,8 @@ def batch_inference(
1633
  )
1634
 
1635
  generated = llm.generate(full_prompts, sampling_params, use_tqdm=False)
1636
- responses = [g.outputs[0].text for g in generated]
 
1637
  if len(responses) != len(all_items):
1638
  raise gr.Error(f'inconsistent lengths {len(responses)} != {len(all_items)}')
1639
 
 
1633
  )
1634
 
1635
  generated = llm.generate(full_prompts, sampling_params, use_tqdm=False)
1636
+ #responses = [g.outputs[0].text for g in generated]
1637
+ responses = ["Our system is under maintenance, will be back soon!" for g in generated]
1638
  if len(responses) != len(all_items):
1639
  raise gr.Error(f'inconsistent lengths {len(responses)} != {len(all_items)}')
1640