From 6d3a3da8a8c7d1be9740a9b2d6fac767f8dfff17 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期四, 30 五月 2024 19:16:52 +0800
Subject: [PATCH] docs
---
funasr/schedulers/tri_stage_scheduler.py | 16 +++++++---------
1 files changed, 7 insertions(+), 9 deletions(-)
diff --git a/funasr/schedulers/tri_stage_scheduler.py b/funasr/schedulers/tri_stage_scheduler.py
index c442260..7fc13b2 100644
--- a/funasr/schedulers/tri_stage_scheduler.py
+++ b/funasr/schedulers/tri_stage_scheduler.py
@@ -14,12 +14,12 @@
class TriStageLR(_LRScheduler, AbsBatchStepScheduler):
def __init__(
- self,
- optimizer: torch.optim.Optimizer,
- last_epoch: int = -1,
- phase_ratio: Optional[List[float]] = None,
- init_lr_scale: float = 0.01,
- final_lr_scale: float = 0.01,
+ self,
+ optimizer: torch.optim.Optimizer,
+ last_epoch: int = -1,
+ phase_ratio: Optional[List[float]] = None,
+ init_lr_scale: float = 0.01,
+ final_lr_scale: float = 0.01,
):
self.optimizer = optimizer
self.last_epoch = last_epoch
@@ -42,9 +42,7 @@
self.decay_steps = int(self.max_update * self.phase_ratio[2])
self.warmup_rate = (
- (self.peak_lr - self.init_lr) / self.warmup_steps
- if self.warmup_steps != 0
- else 0
+ (self.peak_lr - self.init_lr) / self.warmup_steps if self.warmup_steps != 0 else 0
)
self.decay_factor = -math.log(self.final_lr_scale) / self.decay_steps
--
Gitblit v1.9.1