From ae49b2a8e1bc676e6014d8a12ebeec947b655e3e Mon Sep 17 00:00:00 2001
From: 莫拉古 <61447879+yechaoying@users.noreply.github.com>
Date: 星期五, 29 十一月 2024 09:55:43 +0800
Subject: [PATCH] 变量名写错了 (#2249)
---
funasr/schedulers/tri_stage_scheduler.py | 16 +++++++---------
1 files changed, 7 insertions(+), 9 deletions(-)
diff --git a/funasr/schedulers/tri_stage_scheduler.py b/funasr/schedulers/tri_stage_scheduler.py
index c442260..7fc13b2 100644
--- a/funasr/schedulers/tri_stage_scheduler.py
+++ b/funasr/schedulers/tri_stage_scheduler.py
@@ -14,12 +14,12 @@
class TriStageLR(_LRScheduler, AbsBatchStepScheduler):
def __init__(
- self,
- optimizer: torch.optim.Optimizer,
- last_epoch: int = -1,
- phase_ratio: Optional[List[float]] = None,
- init_lr_scale: float = 0.01,
- final_lr_scale: float = 0.01,
+ self,
+ optimizer: torch.optim.Optimizer,
+ last_epoch: int = -1,
+ phase_ratio: Optional[List[float]] = None,
+ init_lr_scale: float = 0.01,
+ final_lr_scale: float = 0.01,
):
self.optimizer = optimizer
self.last_epoch = last_epoch
@@ -42,9 +42,7 @@
self.decay_steps = int(self.max_update * self.phase_ratio[2])
self.warmup_rate = (
- (self.peak_lr - self.init_lr) / self.warmup_steps
- if self.warmup_steps != 0
- else 0
+ (self.peak_lr - self.init_lr) / self.warmup_steps if self.warmup_steps != 0 else 0
)
self.decay_factor = -math.log(self.final_lr_scale) / self.decay_steps
--
Gitblit v1.9.1