winglian commited on
Commit
6f84980
2 Parent(s): c16644d 136522f

Merge pull request #206 from MaciejKarasek/issue205

Browse files
Files changed (1) hide show
  1. src/axolotl/utils/models.py +7 -1
src/axolotl/utils/models.py CHANGED
@@ -252,11 +252,16 @@ def load_model(
252
  )
253
  # Shouldn't be a problem most of the time. will obviously error if the model doesn't support this
254
  # when training starts
255
- if hasattr(config, "max_seq_len") and cfg.sequence_len > config.max_seq_len:
 
 
 
 
256
  config.max_seq_len = cfg.sequence_len
257
  logging.warning(f"increasing context length to {cfg.sequence_len}")
258
  elif (
259
  hasattr(config, "max_sequence_length")
 
260
  and cfg.sequence_len > config.max_sequence_length
261
  ):
262
  config.max_sequence_length = cfg.sequence_len
@@ -290,6 +295,7 @@ def load_model(
290
 
291
  if (
292
  hasattr(model.config, "max_position_embeddings")
 
293
  and cfg.sequence_len >= model.config.max_position_embeddings
294
  ):
295
  logging.warning(
 
252
  )
253
  # Shouldn't be a problem most of the time. will obviously error if the model doesn't support this
254
  # when training starts
255
+ if (
256
+ hasattr(config, "max_seq_len")
257
+ and config.max_seq_len
258
+ and cfg.sequence_len > config.max_seq_len
259
+ ):
260
  config.max_seq_len = cfg.sequence_len
261
  logging.warning(f"increasing context length to {cfg.sequence_len}")
262
  elif (
263
  hasattr(config, "max_sequence_length")
264
+ and config.max_sequence_length
265
  and cfg.sequence_len > config.max_sequence_length
266
  ):
267
  config.max_sequence_length = cfg.sequence_len
 
295
 
296
  if (
297
  hasattr(model.config, "max_position_embeddings")
298
+ and model.config.max_position_embeddings
299
  and cfg.sequence_len >= model.config.max_position_embeddings
300
  ):
301
  logging.warning(