Skip to content

Commit

Permalink
[Minor] Narrow search range of lr-finder, lessen skip-window of lr_su…
Browse files Browse the repository at this point in the history
…ggestion (#1643)

* narrow range of lr-finder, lessen skip of suggestion

* reduce change
  • Loading branch information
ourownstory authored Aug 30, 2024
1 parent bde9b39 commit ae560c1
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions neuralprophet/utils_lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def smooth_loss_and_suggest(lr_finder, window=10):
)
raise
# get the tuner's default suggestion
suggestion_default = lr_finder.suggestion(skip_begin=20, skip_end=10)
suggestion_default = lr_finder.suggestion(skip_begin=10, skip_end=3)

log.info(f"Learning rate finder ---- default suggestion: {suggestion_default}")
log.info(f"Learning rate finder ---- steepest: {suggestion_steepest}")
Expand Down Expand Up @@ -271,17 +271,17 @@ def find_learning_rate(model, loader, trainer, train_epochs):
# Configure the learning rate finder args
batches_per_epoch = len(loader)
main_training_total_steps = train_epochs * batches_per_epoch
# main_training_total_steps is around 1e3 to 1e6 -> num_training 100 to 400
num_training = 100 + int(np.log10(1 + main_training_total_steps / 1000) * 100)
# main_training_total_steps is around 1e3 to 1e6 -> num_training 100 to 200
num_training = 100 + int(np.log10(1 + main_training_total_steps / 1000) * 30)
if batches_per_epoch < num_training:
log.warning(
f"Learning rate finder: The number of batches per epoch ({batches_per_epoch}) is too small than the required number \
for the learning rate finder ({num_training}). The results might not be optimal."
)
# num_training = num_batches
lr_finder_args = {
"min_lr": 1e-7,
"max_lr": 1e1,
"min_lr": 1e-6,
"max_lr": 10.0,
"num_training": num_training,
"early_stop_threshold": None,
"mode": "exponential",
Expand Down

0 comments on commit ae560c1

Please sign in to comment.