File size: 417 Bytes
2638901
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("/export/share/txie/llama2/Llama-2-7b-hf")
model = AutoModelForCausalLM.from_pretrained("/export/share/txie/llama2/Llama-2-7b-hf", torch_dtype=torch.bfloat16)
inputs = tokenizer("The world is", return_tensors="pt")
sample = model.generate(**inputs, max_length=128)
print(tokenizer.decode(sample[0]))