Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -17,6 +17,7 @@ def generatePrompt(inputuno, inputdos):
|
|
17 |
|
18 |
prompt = inputuno
|
19 |
promptdos = inputdos
|
|
|
20 |
batch = tokenizer(prompt, return_tensors="pt")
|
21 |
generated_ids = model.generate(batch["input_ids"])
|
22 |
output = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
|
@@ -33,7 +34,7 @@ def generatePrompt(inputuno, inputdos):
|
|
33 |
# https://huggingface.co/docs/transformers/main/en/chat_templating
|
34 |
final_prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
35 |
|
36 |
-
outputs = final_prompt
|
37 |
|
38 |
return outputs[0]["generated_text"]
|
39 |
#
|
|
|
17 |
|
18 |
prompt = inputuno
|
19 |
promptdos = inputdos
|
20 |
+
|
21 |
batch = tokenizer(prompt, return_tensors="pt")
|
22 |
generated_ids = model.generate(batch["input_ids"])
|
23 |
output = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
|
|
|
34 |
# https://huggingface.co/docs/transformers/main/en/chat_templating
|
35 |
final_prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
36 |
|
37 |
+
outputs = pipe(final_prompt, do_sample=True,)
|
38 |
|
39 |
return outputs[0]["generated_text"]
|
40 |
#
|