From 1233c0d3ff9cf7fd6131862e7d0b208d3981f6da Mon Sep 17 00:00:00 2001
From: shixian.shi <shixian.shi@alibaba-inc.com>
Date: 星期一, 15 一月 2024 20:34:47 +0800
Subject: [PATCH] code update

---
 funasr/schedulers/__init__.py |   24 ++++++++++++------------
 1 files changed, 12 insertions(+), 12 deletions(-)

diff --git a/funasr/schedulers/__init__.py b/funasr/schedulers/__init__.py
index cba286a..0d1a578 100644
--- a/funasr/schedulers/__init__.py
+++ b/funasr/schedulers/__init__.py
@@ -8,16 +8,16 @@
 from funasr.schedulers.warmup_lr import WarmupLR
 
 scheduler_classes = dict(
-	ReduceLROnPlateau=torch.optim.lr_scheduler.ReduceLROnPlateau,
-	lambdalr=torch.optim.lr_scheduler.LambdaLR,
-	steplr=torch.optim.lr_scheduler.StepLR,
-	multisteplr=torch.optim.lr_scheduler.MultiStepLR,
-	exponentiallr=torch.optim.lr_scheduler.ExponentialLR,
-	CosineAnnealingLR=torch.optim.lr_scheduler.CosineAnnealingLR,
-	noamlr=NoamLR,
-	warmuplr=WarmupLR,
-	tri_stage=TriStageLR,
-	cycliclr=torch.optim.lr_scheduler.CyclicLR,
-	onecyclelr=torch.optim.lr_scheduler.OneCycleLR,
-	CosineAnnealingWarmRestarts=torch.optim.lr_scheduler.CosineAnnealingWarmRestarts,
+    ReduceLROnPlateau=torch.optim.lr_scheduler.ReduceLROnPlateau,
+    lambdalr=torch.optim.lr_scheduler.LambdaLR,
+    steplr=torch.optim.lr_scheduler.StepLR,
+    multisteplr=torch.optim.lr_scheduler.MultiStepLR,
+    exponentiallr=torch.optim.lr_scheduler.ExponentialLR,
+    CosineAnnealingLR=torch.optim.lr_scheduler.CosineAnnealingLR,
+    noamlr=NoamLR,
+    warmuplr=WarmupLR,
+    tri_stage=TriStageLR,
+    cycliclr=torch.optim.lr_scheduler.CyclicLR,
+    onecyclelr=torch.optim.lr_scheduler.OneCycleLR,
+    CosineAnnealingWarmRestarts=torch.optim.lr_scheduler.CosineAnnealingWarmRestarts,
 )

--
Gitblit v1.9.1