Skip to content

Commit

Permalink
add cosine restart learning rate
Browse files Browse the repository at this point in the history
  • Loading branch information
hellozhaoming committed Oct 27, 2023
1 parent 3f3449c commit 05052c1
Showing 1 changed file with 24 additions and 22 deletions.
46 changes: 24 additions & 22 deletions deepmd/train/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,8 @@ def get_lr_and_coef(lr_param):
scale_lr_coef = np.sqrt(self.run_opt.world_size).real
else:
scale_lr_coef = 1.0
lr_type = lr_param.get("type", "exp")
if lr_type == "exp":
self.lr_type = lr_param.get("type", "exp")
if self.lr_type == "exp":
lr = LearningRateExp(
lr_param["start_lr"], lr_param["stop_lr"], lr_param["decay_steps"]
)
Expand All @@ -129,7 +129,7 @@ def get_lr_and_coef(lr_param):
lr_param["start_lr"], lr_param["stop_lr"], lr_param["decay_steps"]
)
else:
raise RuntimeError("unknown learning_rate type " + lr_type)
raise RuntimeError("unknown learning_rate type " + self.lr_type)

Check warning on line 132 in deepmd/train/trainer.py

View check run for this annotation

Codecov / codecov/patch

deepmd/train/trainer.py#L132

Added line #L132 was not covered by tests
return lr, scale_lr_coef

# learning rate
Expand Down Expand Up @@ -563,29 +563,31 @@ def train(self, train_data=None, valid_data=None):
is_first_step = True
self.cur_batch = cur_batch
if not self.multi_task_mode:
log.info(
"start training at lr %.2e (== %.2e), decay_step %d, decay_rate %f, final lr will be %.2e"
% (
run_sess(self.sess, self.learning_rate),
self.lr.value(cur_batch),
self.lr.decay_steps_,
self.lr.decay_rate_,
self.lr.value(stop_batch),
)
)
else:
for fitting_key in self.fitting:
if self.lr_type == "exp":
log.info(
"%s: start training at lr %.2e (== %.2e), decay_step %d, decay_rate %f, final lr will be %.2e"
"start training at lr %.2e (== %.2e), decay_step %d, decay_rate %f, final lr will be %.2e"
% (
fitting_key,
run_sess(self.sess, self.learning_rate_dict[fitting_key]),
self.lr_dict[fitting_key].value(cur_batch),
self.lr_dict[fitting_key].decay_steps_,
self.lr_dict[fitting_key].decay_rate_,
self.lr_dict[fitting_key].value(stop_batch),
run_sess(self.sess, self.learning_rate),
self.lr.value(cur_batch),
self.lr.decay_steps_,
self.lr.decay_rate_,
self.lr.value(stop_batch),
)
)
else:
for fitting_key in self.fitting:
if self.lr_type == "exp":
log.info(
"%s: start training at lr %.2e (== %.2e), decay_step %d, decay_rate %f, final lr will be %.2e"
% (
fitting_key,
run_sess(self.sess, self.learning_rate_dict[fitting_key]),
self.lr_dict[fitting_key].value(cur_batch),
self.lr_dict[fitting_key].decay_steps_,
self.lr_dict[fitting_key].decay_rate_,
self.lr_dict[fitting_key].value(stop_batch),
)
)

prf_options = None
prf_run_metadata = None
Expand Down

0 comments on commit 05052c1

Please sign in to comment.