Nanobit commited on
Commit
9196237
1 Parent(s): bbfc333

Add cfg.lora_target_linear

Browse files
Files changed (2) hide show
  1. README.md +1 -0
  2. src/axolotl/utils/models.py +12 -8
README.md CHANGED
@@ -232,6 +232,7 @@ lora_target_modules:
232
  # - gate_proj
233
  # - down_proj
234
  # - up_proj
 
235
  lora_modules_to_save:
236
  # - embed_tokens
237
  # - lm_head
 
232
  # - gate_proj
233
  # - down_proj
234
  # - up_proj
235
+ lora_target_linear: # if true, will target all linear layers
236
  lora_modules_to_save:
237
  # - embed_tokens
238
  # - lm_head
src/axolotl/utils/models.py CHANGED
@@ -364,14 +364,18 @@ def load_lora(model, cfg):
364
  PeftModel,
365
  )
366
 
367
- bits = None
368
- if cfg.load_in_4bit:
369
- bits = 4
370
- elif cfg.load_in_8bit:
371
- bits = 8
372
- linear_names = find_all_linear_names(bits, model)
373
- logging.info(f"found linear modules: {repr(linear_names)}")
374
- lora_target_modules = list(set(list(cfg.lora_target_modules) + linear_names))
 
 
 
 
375
 
376
  lora_config = LoraConfig(
377
  r=cfg.lora_r,
 
364
  PeftModel,
365
  )
366
 
367
+ lora_target_modules = list(cfg.lora_target_modules)
368
+
369
+ if cfg.lora_target_linear:
370
+ bits = None
371
+ if cfg.load_in_4bit:
372
+ bits = 4
373
+ elif cfg.load_in_8bit:
374
+ bits = 8
375
+
376
+ linear_names = find_all_linear_names(bits, model)
377
+ logging.info(f"found linear modules: {repr(linear_names)}")
378
+ lora_target_modules = list(set(lora_target_modules + linear_names))
379
 
380
  lora_config = LoraConfig(
381
  r=cfg.lora_r,