Nanobit commited on
Commit
aefb2fc
1 Parent(s): b5aa8d8

Fix backward compat for peft

Browse files
Files changed (1) hide show
  1. src/axolotl/utils/models.py +8 -2
src/axolotl/utils/models.py CHANGED
@@ -140,12 +140,18 @@ def load_model(
140
  )
141
 
142
  replace_peft_model_with_int4_lora_model()
143
- else:
144
- from peft import prepare_model_for_kbit_training
145
  except Exception as err:
146
  logging.exception(err)
147
  raise err
148
 
 
 
 
 
 
 
 
 
149
  model_kwargs = {}
150
  if cfg.adapter == "qlora" and cfg.load_in_4bit:
151
  model_kwargs["quantization_config"] = BitsAndBytesConfig(
 
140
  )
141
 
142
  replace_peft_model_with_int4_lora_model()
 
 
143
  except Exception as err:
144
  logging.exception(err)
145
  raise err
146
 
147
+ try:
148
+ from peft import prepare_model_for_kbit_training
149
+ except ImportError:
150
+ # For backward compatibility
151
+ from peft import (
152
+ prepare_model_for_int8_training as prepare_model_for_kbit_training,
153
+ )
154
+
155
  model_kwargs = {}
156
  if cfg.adapter == "qlora" and cfg.load_in_4bit:
157
  model_kwargs["quantization_config"] = BitsAndBytesConfig(