Elijahbodden commited on
Commit
6cda4f2
Β·
verified Β·
1 Parent(s): a601f68

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -28,9 +28,9 @@ model = Llama.from_pretrained(
28
  tokenizer = AutoTokenizer.from_pretrained(model_id)
29
 
30
  presets = {
31
- "Default" : [{"role": "user", "value": "good convo, bye"}, {"role": "assistant", "value": "Haha cool ttyl"}],
32
- "Rizz ????" : [{"role": "user", "value": "omg it's so hot when you flirt with me"}, {"role": "assistant", "value": "haha well you're lucky can even string a sentence together, the way you take my breath away 😘"}, {"role": "user", "value": "alright love you, gn!"}, {"role": "assistant", "value": "ttyl babe πŸ’•"}],
33
- "Thinky" : [{"role": "user", "value": "Woah you just totally blew my mind\ngehh now the fermi paradox is going to be bugging me 24/7\nok ttyl"}, {"role": "assistant", "value": "nah our deep convos are always the best, we should talk again soon\nttyl"}],
34
  }
35
 
36
  def custom_lp_logits_processor(ids, logits, lp_start, lp_decay, prompt_tok_len):
@@ -56,11 +56,11 @@ def respond(
56
  messages = presets[preset].copy()
57
  for val in history:
58
  if val[0]:
59
- messages.append({"role": "user", "value": val[0]})
60
  if val[1]:
61
- messages.append({"role": "assistant", "value": val[1]})
62
 
63
- messages.append({"role": "user", "value": message})
64
 
65
  response = ""
66
 
 
28
  tokenizer = AutoTokenizer.from_pretrained(model_id)
29
 
30
  presets = {
31
+ "Default" : [{"from": "human", "value": "good convo, bye"}, {"from": "gpt", "value": "Haha cool ttyl"}],
32
+ "Rizz ????" : [{"from": "human", "value": "omg it's so hot when you flirt with me"}, {"from": "gpt", "value": "haha well you're lucky can even string a sentence together, the way you take my breath away 😘"}, {"from": "human", "value": "alright love you, gn!"}, {"from": "gpt", "value": "ttyl babe πŸ’•"}],
33
+ "Thinky" : [{"from": "human", "value": "Woah you just totally blew my mind\ngehh now the fermi paradox is going to be bugging me 24/7\nok ttyl"}, {"from": "gpt", "value": "nah our deep convos are always the best, we should talk again soon\nttyl"}],
34
  }
35
 
36
  def custom_lp_logits_processor(ids, logits, lp_start, lp_decay, prompt_tok_len):
 
56
  messages = presets[preset].copy()
57
  for val in history:
58
  if val[0]:
59
+ messages.append({"from": "human", "value": val[0]})
60
  if val[1]:
61
+ messages.append({"from": "gpt", "value": val[1]})
62
 
63
+ messages.append({"from": "human", "value": message})
64
 
65
  response = ""
66