lruizap commited on
Commit
9e35d46
·
1 Parent(s): bbccd1b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -6
app.py CHANGED
@@ -14,8 +14,8 @@ pipe = pipeline("text-generation", model="HuggingFaceH4/zephyr-7b-alpha",
14
  torch_dtype=torch.bfloat16, device_map="auto")
15
 
16
 
17
- def generatePrompt(input, inputdos):
18
- prompt = input
19
  promptdos = inputdos
20
  batch = tokenizer(prompt, return_tensors="pt")
21
  generated_ids = model.generate(batch["input_ids"])
@@ -31,12 +31,11 @@ def generatePrompt(input, inputdos):
31
  },
32
  ]
33
  # https://huggingface.co/docs/transformers/main/en/chat_templating
34
- final_prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
35
 
36
- outputs = pipe(final_prompt)
37
- result = outputs[0]["generated_text"]
38
 
39
- return result
40
  #
41
 
42
  # Interface
 
14
  torch_dtype=torch.bfloat16, device_map="auto")
15
 
16
 
17
+ def generatePrompt(inputuno, inputdos):
18
+ prompt = inputuno
19
  promptdos = inputdos
20
  batch = tokenizer(prompt, return_tensors="pt")
21
  generated_ids = model.generate(batch["input_ids"])
 
31
  },
32
  ]
33
  # https://huggingface.co/docs/transformers/main/en/chat_templating
34
+ # final_prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
35
 
36
+ outputs = pipe(messages, tokenize=False, add_generation_prompt=True)
 
37
 
38
+ return outputs[0]["generated_text"]
39
  #
40
 
41
  # Interface