Nanobit commited on
Commit
05c1834
1 Parent(s): 5b712af

Update scheduler configs

Browse files
Files changed (1) hide show
  1. README.md +12 -2
README.md CHANGED
@@ -254,8 +254,18 @@ gradient_checkpointing: false
254
  # stop training after this many evaluation losses have increased in a row
255
  # https://huggingface.co/transformers/v4.2.2/_modules/transformers/trainer_callback.html#EarlyStoppingCallback
256
  early_stopping_patience: 3
257
- # specify a scheduler to use with the optimizer. only one_cycle is supported currently
258
- lr_scheduler:
 
 
 
 
 
 
 
 
 
 
259
  # specify optimizer
260
  optimizer:
261
  # specify weight decay
 
254
  # stop training after this many evaluation losses have increased in a row
255
  # https://huggingface.co/transformers/v4.2.2/_modules/transformers/trainer_callback.html#EarlyStoppingCallback
256
  early_stopping_patience: 3
257
+
258
+ # specify a scheduler and kwargs to use with the optimizer
259
+ lr_scheduler: # 'one_cycle' | 'log_sweep' | empty for cosine
260
+ lr_scheduler_kwargs:
261
+
262
+ # for one_cycle optim
263
+ lr_div_factor: # learning rate div factor
264
+
265
+ # for log_sweep optim
266
+ log_sweep_min_lr:
267
+ log_sweep_max_lr:
268
+
269
  # specify optimizer
270
  optimizer:
271
  # specify weight decay