winglian commited on
Commit
66afb76
·
1 Parent(s): a692ad3

don't use llama if trust_remote_code is set since that needs to use AutoModel path

Browse files
Files changed (1) hide show
  1. src/axolotl/utils/models.py +1 -1
src/axolotl/utils/models.py CHANGED
@@ -202,7 +202,7 @@ def load_model(
202
  else True,
203
  )
204
  load_in_8bit = False
205
- elif cfg.is_llama_derived_model:
206
  from transformers import LlamaForCausalLM
207
 
208
  config = LlamaConfig.from_pretrained(base_model_config)
 
202
  else True,
203
  )
204
  load_in_8bit = False
205
+ elif cfg.is_llama_derived_model and not cfg.trust_remote_code:
206
  from transformers import LlamaForCausalLM
207
 
208
  config = LlamaConfig.from_pretrained(base_model_config)