|
from transformers import AutoTokenizer, AutoModelForCausalLM |
|
tokenizer = AutoTokenizer.from_pretrained("giulio98/codegen-350M-multi-xlcost-v2") |
|
model = AutoModelForCausalLM.from_pretrained("giulio98/codegen-350M-multi-xlcost-v2") |
|
|
|
text = tokenizer.eos_token + "\'\'\'\n" + "function to add two numbers" + "\n\'\'\'\n" + "###\n" |
|
input_ids = tokenizer(text, return_tensors="pt").input_ids |
|
|
|
generated_ids = model.generate(input_ids, max_length=128) |
|
print(tokenizer.decode(generated_ids[0], skip_special_tokens=True)) |
|
|