Spaces:
Sleeping
Sleeping
Elijahbodden
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -54,8 +54,8 @@ tokenizer.chat_template = custom_template
|
|
54 |
|
55 |
presets = {
|
56 |
# Make sure assistant responses end with a "\n" because reasons
|
57 |
-
"
|
58 |
-
"
|
59 |
"Thinky" : [{"role": "user", "content": "Woah you just totally blew my mind\ngehh now the fermi paradox is going to be bugging me 24/7\nok ttyl"}, {"role": "assistant", "content": "nah our deep convos are always the best, we should talk again soon\nttyl\n"}],
|
60 |
}
|
61 |
|
@@ -120,7 +120,7 @@ demo = gr.ChatInterface(
|
|
120 |
description="The model may take a while if it hasn't run recently or a lot of people are using it",
|
121 |
title="EliGPT v1.3",
|
122 |
additional_inputs=[
|
123 |
-
gr.Radio(presets.keys(), label="Personality preset", info="VERY SLIGHTLY influence the model's personality [WARNING, IF YOU CHANGE THIS WHILE THERE ARE MESSAGES IN THE CHAT, THE MODEL WILL BECOME VERY SLOW]", value="
|
124 |
gr.Slider(minimum=0.1, maximum=4.0, value=1.5, step=0.1, label="Temperature", info="How chaotic should the model be?"),
|
125 |
gr.Slider(minimum=0.0, maximum=1.0, value=0.1, step=0.01, label="Min_p", info="Lower values give it more \"personality\""),
|
126 |
gr.Slider(minimum=0, maximum=512, value=5, step=1, label="Length penalty start", info='When should the model start being more likely to shut up?'),
|
|
|
54 |
|
55 |
presets = {
|
56 |
# Make sure assistant responses end with a "\n" because reasons
|
57 |
+
"Default" : [{"role": "user", "content": "good convo, bye"}, {"role": "assistant", "content": "Haha cool ttyl\n"}],
|
58 |
+
"Rizz ????" : [{"role": "user", "content": "omg it's so hot when you flirt with me"}, {"role": "assistant", "content": "haha well you're lucky can even string a sentence together, the way you take my breath away π\n"}, {"role": "user", "content": "alright love you, gn!"}, {"role": "assistant", "content": "ttyl babe π\n"}],
|
59 |
"Thinky" : [{"role": "user", "content": "Woah you just totally blew my mind\ngehh now the fermi paradox is going to be bugging me 24/7\nok ttyl"}, {"role": "assistant", "content": "nah our deep convos are always the best, we should talk again soon\nttyl\n"}],
|
60 |
}
|
61 |
|
|
|
120 |
description="The model may take a while if it hasn't run recently or a lot of people are using it",
|
121 |
title="EliGPT v1.3",
|
122 |
additional_inputs=[
|
123 |
+
gr.Radio(presets.keys(), label="Personality preset", info="VERY SLIGHTLY influence the model's personality [WARNING, IF YOU CHANGE THIS WHILE THERE ARE MESSAGES IN THE CHAT, THE MODEL WILL BECOME VERY SLOW]", value="Default"),
|
124 |
gr.Slider(minimum=0.1, maximum=4.0, value=1.5, step=0.1, label="Temperature", info="How chaotic should the model be?"),
|
125 |
gr.Slider(minimum=0.0, maximum=1.0, value=0.1, step=0.01, label="Min_p", info="Lower values give it more \"personality\""),
|
126 |
gr.Slider(minimum=0, maximum=512, value=5, step=1, label="Length penalty start", info='When should the model start being more likely to shut up?'),
|