j2moreno commited on
Commit
dc08000
1 Parent(s): c1bdfbf

Add examples

Browse files
Files changed (1) hide show
  1. app.py +10 -11
app.py CHANGED
@@ -27,13 +27,13 @@ Leonardo Moreno contacts:
27
 
28
  **Warning:** This space uses the free CPU Basic hardware from Hugging Face. Some steps and LLM models used below (free inference endpoints) can take some time to generate a reply.
29
  """
30
- # examples=[
31
- # ['Who is Leonardo Moreno?'],
32
- # ['Describe Leonardo Moreno\'s professional background.'],
33
- # ['What projects has Leonardo Moreno worked on?'],
34
- # ["What are Leonardo Moreno's core technical skills?"],
35
- # ['How has Leonardo Moreno integrated AI in his work?'],
36
- # ]
37
 
38
  # @spaces.GPU
39
  def generate_response(message, history):
@@ -56,14 +56,13 @@ def generate_response(message, history):
56
  outputs = pipe(prompt, max_new_tokens=max_new_tokens, do_sample=True,
57
  temperature=temperature, top_k=top_k, top_p=top_p, repetition_penalty=1.10)
58
 
59
- print(outputs[0]["generated_text"])
60
- return outputs[0]["generated_text"]
61
 
62
  if __name__ == "__main__":
63
  gr.ChatInterface(generate_response,
64
  title=title,
65
  description=description,
66
- #examples=examples,
67
- #cache_examples=True,
68
  #additional_inputs=additional_inputs,
69
  ).launch()
 
27
 
28
  **Warning:** This space uses the free CPU Basic hardware from Hugging Face. Some steps and LLM models used below (free inference endpoints) can take some time to generate a reply.
29
  """
30
+ examples=[
31
+ ['Who is Leonardo Moreno?'],
32
+ ['Describe Leonardo Moreno\'s professional background.'],
33
+ ['What projects has Leonardo Moreno worked on?'],
34
+ ["What are Leonardo Moreno's core technical skills?"],
35
+ ['How has Leonardo Moreno integrated AI in his work?'],
36
+ ]
37
 
38
  # @spaces.GPU
39
  def generate_response(message, history):
 
56
  outputs = pipe(prompt, max_new_tokens=max_new_tokens, do_sample=True,
57
  temperature=temperature, top_k=top_k, top_p=top_p, repetition_penalty=1.10)
58
 
59
+ return outputs[0]["generated_text"].split("<|assistant|>")[-1].lstrip()
 
60
 
61
  if __name__ == "__main__":
62
  gr.ChatInterface(generate_response,
63
  title=title,
64
  description=description,
65
+ examples=examples,
66
+ cache_examples=True,
67
  #additional_inputs=additional_inputs,
68
  ).launch()