Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -58,8 +58,8 @@ def predict(message, history, system_prompt, temperature, max_new_tokens, top_k,
|
|
58 |
stop_tokens = [tokenizer.eos_token_id]
|
59 |
instruction = system_prompt + "\n\n"
|
60 |
for user, assistant in history:
|
61 |
-
instruction += f"
|
62 |
-
instruction += f"
|
63 |
|
64 |
print(instruction)
|
65 |
|
@@ -123,5 +123,5 @@ gr.ChatInterface(
|
|
123 |
gr.Slider(0, 2, 1.1, label="Repetition penalty"),
|
124 |
gr.Slider(0, 1, 0.95, label="Top P sampling"),
|
125 |
],
|
126 |
-
theme=gr.themes.Soft(primary_hue=COLOR),
|
127 |
).queue().launch()
|
|
|
58 |
stop_tokens = [tokenizer.eos_token_id]
|
59 |
instruction = system_prompt + "\n\n"
|
60 |
for user, assistant in history:
|
61 |
+
instruction += f"role:user, content: {user}\nrole:assistant, content: {assistant}\n"
|
62 |
+
instruction += f"role:user, content: {message}\nassistant:"
|
63 |
|
64 |
print(instruction)
|
65 |
|
|
|
123 |
gr.Slider(0, 2, 1.1, label="Repetition penalty"),
|
124 |
gr.Slider(0, 1, 0.95, label="Top P sampling"),
|
125 |
],
|
126 |
+
#theme=gr.themes.Soft(primary_hue=COLOR),
|
127 |
).queue().launch()
|