Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: move LearningRateExp to deepmd.utils.learning_rate #4219

Merged
merged 3 commits into from
Oct 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 53 additions & 0 deletions deepmd/dpmodel/utils/learning_rate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
import numpy as np


class LearningRateExp:
def __init__(
self,
start_lr,
stop_lr,
decay_steps,
stop_steps,
decay_rate=None,
**kwargs,
):
"""
Construct an exponential-decayed learning rate.

Parameters
----------
start_lr
The learning rate at the start of the training.
stop_lr
The desired learning rate at the end of the training.
When decay_rate is explicitly set, this value will serve as
the minimum learning rate during training. In other words,
if the learning rate decays below stop_lr, stop_lr will be applied instead.
decay_steps
The learning rate is decaying every this number of training steps.
stop_steps
The total training steps for learning rate scheduler.
decay_rate
The decay rate for the learning rate.
If provided, the decay rate will be set instead of
calculating it through interpolation between start_lr and stop_lr.
"""
self.start_lr = start_lr
default_ds = 100 if stop_steps // 10 > 100 else stop_steps // 100 + 1
self.decay_steps = decay_steps
if self.decay_steps >= stop_steps:
self.decay_steps = default_ds
self.decay_rate = np.exp(
np.log(stop_lr / self.start_lr) / (stop_steps / self.decay_steps)
)
if decay_rate is not None:
self.decay_rate = decay_rate
self.min_lr = stop_lr

def value(self, step) -> np.float64:
"""Get the learning rate at the given step."""
step_lr = self.start_lr * np.power(self.decay_rate, step // self.decay_steps)
if step_lr < self.min_lr:
step_lr = self.min_lr
return step_lr
57 changes: 6 additions & 51 deletions deepmd/pt/utils/learning_rate.py
Original file line number Diff line number Diff line change
@@ -1,53 +1,8 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
import numpy as np
from deepmd.dpmodel.utils.learning_rate import (
LearningRateExp,
)


class LearningRateExp:
def __init__(
self,
start_lr,
stop_lr,
decay_steps,
stop_steps,
decay_rate=None,
**kwargs,
):
"""
Construct an exponential-decayed learning rate.

Parameters
----------
start_lr
The learning rate at the start of the training.
stop_lr
The desired learning rate at the end of the training.
When decay_rate is explicitly set, this value will serve as
the minimum learning rate during training. In other words,
if the learning rate decays below stop_lr, stop_lr will be applied instead.
decay_steps
The learning rate is decaying every this number of training steps.
stop_steps
The total training steps for learning rate scheduler.
decay_rate
The decay rate for the learning rate.
If provided, the decay rate will be set instead of
calculating it through interpolation between start_lr and stop_lr.
"""
self.start_lr = start_lr
default_ds = 100 if stop_steps // 10 > 100 else stop_steps // 100 + 1
self.decay_steps = decay_steps
if self.decay_steps >= stop_steps:
self.decay_steps = default_ds
self.decay_rate = np.exp(
np.log(stop_lr / self.start_lr) / (stop_steps / self.decay_steps)
)
if decay_rate is not None:
self.decay_rate = decay_rate
self.min_lr = stop_lr

def value(self, step):
"""Get the learning rate at the given step."""
step_lr = self.start_lr * np.power(self.decay_rate, step // self.decay_steps)
if step_lr < self.min_lr:
step_lr = self.min_lr
return step_lr
__all__ = [
"LearningRateExp",
]