njzjz/deepmd-kit

add optimizers for multitask

Closed this issue · 0 comments

author: iProzd

Line: 569

else:
return self.lr_exp.value(step - warmup_steps) / self.lr_exp.start_lr
# TODO add optimizers for multitask
# author: iProzd
if self.opt_type == "Adam":
self.optimizer = torch.optim.Adam(
self.wrapper.parameters(), lr=self.lr_exp.start_lr
)
if optimizer_state_dict is not None and self.restart_training:
self.optimizer.load_state_dict(optimizer_state_dict)