Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -115,7 +115,7 @@ def evaluate(
|
|
115 |
return output.split("### Response:")[1].strip()
|
116 |
|
117 |
|
118 |
-
gr.Interface(
|
119 |
fn=evaluate,
|
120 |
inputs=[
|
121 |
gr.components.Textbox(
|
@@ -138,7 +138,9 @@ gr.Interface(
|
|
138 |
],
|
139 |
title="π¦π² Alpaca-LoRA",
|
140 |
description="Alpaca-LoRA is a 7B-parameter LLaMA model finetuned to follow instructions. It is trained on the [Stanford Alpaca](https://github.com/tatsu-lab/stanford_alpaca) dataset and makes use of the Huggingface LLaMA implementation. For more information, please visit [the project's website](https://github.com/tloen/alpaca-lora).",
|
141 |
-
)
|
|
|
|
|
142 |
|
143 |
# Old testing code follows.
|
144 |
|
|
|
115 |
return output.split("### Response:")[1].strip()
|
116 |
|
117 |
|
118 |
+
g = gr.Interface(
|
119 |
fn=evaluate,
|
120 |
inputs=[
|
121 |
gr.components.Textbox(
|
|
|
138 |
],
|
139 |
title="π¦π² Alpaca-LoRA",
|
140 |
description="Alpaca-LoRA is a 7B-parameter LLaMA model finetuned to follow instructions. It is trained on the [Stanford Alpaca](https://github.com/tatsu-lab/stanford_alpaca) dataset and makes use of the Huggingface LLaMA implementation. For more information, please visit [the project's website](https://github.com/tloen/alpaca-lora).",
|
141 |
+
)
|
142 |
+
g.queue(concurrency_count=1)
|
143 |
+
g.launch()
|
144 |
|
145 |
# Old testing code follows.
|
146 |
|