Skip to content

Commit

Permalink
Merge pull request #322 from mir-group/fix_defaults
Browse files Browse the repository at this point in the history
Fix optimizer defaults
  • Loading branch information
simonbatzner authored Mar 26, 2023
2 parents 55bfc1f + aecd7c0 commit c56f48f
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 4 deletions.
7 changes: 5 additions & 2 deletions configs/example.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -124,8 +124,11 @@ metrics_components:

# optimizer, may be any optimizer defined in torch.optim
# the name `optimizer_name`is case sensitive
optimizer_name: Adam # default optimizer is Adam
optimizer_amsgrad: false
# IMPORTANT: for NequIP (not for Allegro), we find that in most cases AMSGrad strongly improves
# out-of-distribution generalization over Adam. We highly recommed trying both AMSGrad (by setting
# optimizer_amsgrad: true) and Adam (by setting optimizer_amsgrad: false)
optimizer_name: Adam
optimizer_amsgrad: true

# lr scheduler, currently only supports the two options listed in full.yaml, i.e. on-pleteau and cosine annealing with warm restarts, if you need more please file an issue
# here: on-plateau, reduce lr by factory of lr_scheduler_factor if metrics_key hasn't improved for lr_scheduler_patience epoch
Expand Down
7 changes: 5 additions & 2 deletions configs/full.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -266,8 +266,11 @@ metrics_components:

# optimizer, may be any optimizer defined in torch.optim
# the name `optimizer_name`is case sensitive
optimizer_name: Adam # default optimizer is Adam
optimizer_amsgrad: false
# IMPORTANT: for NequIP (not for Allegro), we find that in most cases AMSGrad strongly improves
# out-of-distribution generalization over Adam. We highly recommed trying both AMSGrad (by setting
# optimizer_amsgrad: true) and Adam (by setting optimizer_amsgrad: false)
optimizer_name: Adam
optimizer_amsgrad: true
optimizer_betas: !!python/tuple
- 0.9
- 0.999
Expand Down

0 comments on commit c56f48f

Please sign in to comment.