bloom-multilingual-chat / load_model.py
as-cle-bert's picture
Update load_model.py
bde06a8 verified
raw
history blame
352 Bytes
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
model = AutoModelForCausalLM.from_pretrained("h2oai/h2o-danube2-1.8b-chat")
tokenizer = AutoTokenizer.from_pretrained("h2oai/h2o-danube2-1.8b-chat")
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=1024, repetition_penalty=1.2, temperature=0.4)