Text Generation
PEFT
Safetensors
German
Bavarian
JanPf commited on
Commit
11fb704
·
verified ·
1 Parent(s): bab952f

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +4 -4
README.md CHANGED
@@ -24,17 +24,17 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
24
 
25
  # script config
26
  base_model_name = "LSX-UniWue/LLaMmlein_1B"
27
- chat_adapter_name = "LSX-UniWue/Betzerl_1B_wiki_preview"
28
  device = "cuda" # or mps
29
 
30
  # load model
31
- config = PeftConfig.from_pretrained(chat_adapter_name)
32
  base_model = model = AutoModelForCausalLM.from_pretrained(
33
  base_model_name,
34
  torch_dtype=torch.bfloat16,
35
  device_map=device,
36
  )
37
  base_model.resize_token_embeddings(32001)
38
- model = PeftModel.from_pretrained(base_model, chat_adapter_name)
39
- tokenizer = AutoTokenizer.from_pretrained(chat_adapter_name)
40
  ```
 
24
 
25
  # script config
26
  base_model_name = "LSX-UniWue/LLaMmlein_1B"
27
+ adapter_name = "LSX-UniWue/Betzerl_1B_wiki_preview"
28
  device = "cuda" # or mps
29
 
30
  # load model
31
+ config = PeftConfig.from_pretrained(adapter_name)
32
  base_model = model = AutoModelForCausalLM.from_pretrained(
33
  base_model_name,
34
  torch_dtype=torch.bfloat16,
35
  device_map=device,
36
  )
37
  base_model.resize_token_embeddings(32001)
38
+ model = PeftModel.from_pretrained(base_model, adapter_name)
39
+ tokenizer = AutoTokenizer.from_pretrained(adapter_name)
40
  ```