funasr/schedulers/warmup_lr.py
@@ -1,4 +1,5 @@ """Warm up learning rate scheduler module.""" from typing import Union import torch @@ -41,8 +42,6 @@ def get_lr(self): step_num = self.last_epoch + 1 return [ lr * self.warmup_steps**0.5 * min(step_num**-0.5, step_num * self.warmup_steps**-1.5) lr * self.warmup_steps**0.5 * min(step_num**-0.5, step_num * self.warmup_steps**-1.5) for lr in self.base_lrs ]