winglian commited on
Commit
919246f
1 Parent(s): ffac902

don't pass rope_scaling kwarg if it's None (#383)

Browse files
Files changed (1) hide show
  1. src/axolotl/utils/models.py +5 -1
src/axolotl/utils/models.py CHANGED
@@ -229,8 +229,12 @@ def load_model(
229
  elif cfg.is_llama_derived_model and not cfg.trust_remote_code:
230
  from transformers import LlamaForCausalLM
231
 
 
 
 
232
  config = LlamaConfig.from_pretrained(
233
- base_model_config, rope_scaling=cfg.rope_scaling
 
234
  )
235
  model = LlamaForCausalLM.from_pretrained(
236
  base_model,
 
229
  elif cfg.is_llama_derived_model and not cfg.trust_remote_code:
230
  from transformers import LlamaForCausalLM
231
 
232
+ config_kwargs = {}
233
+ if cfg.rope_scaling:
234
+ config_kwargs["rope_scaling"] = cfg.rope_scaling
235
  config = LlamaConfig.from_pretrained(
236
+ base_model_config,
237
+ **config_kwargs,
238
  )
239
  model = LlamaForCausalLM.from_pretrained(
240
  base_model,