LLMAgora / app.py
Cartinoe5930's picture
Update app.py
ec39c42
raw
history blame
624 Bytes
import gradio as gr
def response_print(model_list, response_list):
answer = ""
cot = "CoT"
none = "None"
for idx in rnage(len(model_list)):
answer = answer + f"# {model_list[idx]}'s response: {cot if response_list else none}\n"
return answer
demo = gr.Interface(
response_print,
[
gr.CheckboxGroup(["Llama2", "Alpaca", "Vicuna", "Koala", "Falcon", "Baize", "WizardLM", "Orca", "phi-1.5"], label="Model Selection", info="Choose 3 LMs to participate in LLM Agora."),
gr.Checkbox(label="CoT", info="Do you want to use CoT for inference?")
],
"text",
)
demo.launch()