hamel Nanobit commited on
Commit
a3e8783
1 Parent(s): b31038a

[Docs] delete unused cfg value `lora_out_dir` (#1029)

Browse files

* Update README.md

* Update README.md

* Update README.md

Co-authored-by: NanoCode012 <kevinvong@rocketmail.com>

---------

Co-authored-by: NanoCode012 <kevinvong@rocketmail.com>

Files changed (1) hide show
  1. README.md +2 -5
README.md CHANGED
@@ -643,7 +643,8 @@ max_memory:
643
  # If you want to use 'lora' or 'qlora' or leave blank to train all parameters in original model
644
  adapter: lora
645
  # If you already have a lora model trained that you want to load, put that here.
646
- # This means after training, if you want to test the model, you should set this to the value of `lora_out_dir`.
 
647
  lora_model_dir:
648
 
649
  # LoRA hyperparameters
@@ -670,10 +671,6 @@ lora_modules_to_save:
670
  # - embed_tokens
671
  # - lm_head
672
 
673
- # Once you complete training, the model will be saved to the following directory.
674
- # If you merge the adapter to the base model, a subdirectory `merged` will be created under this directory.
675
- # Make sure `lora_model_dir` points to this directory if you want to use the trained model.
676
- lora_out_dir:
677
  lora_fan_in_fan_out: false
678
 
679
  # ReLoRA configuration
 
643
  # If you want to use 'lora' or 'qlora' or leave blank to train all parameters in original model
644
  adapter: lora
645
  # If you already have a lora model trained that you want to load, put that here.
646
+ # This means after training, if you want to test the model, you should set this to the value of `output_dir`.
647
+ # Note that if you merge an adapter to the base model, a new subdirectory `merged` will be created under the `output_dir`.
648
  lora_model_dir:
649
 
650
  # LoRA hyperparameters
 
671
  # - embed_tokens
672
  # - lm_head
673
 
 
 
 
 
674
  lora_fan_in_fan_out: false
675
 
676
  # ReLoRA configuration