From ddbc8b5eded1fff6084001d160d46b532020ecb7 Mon Sep 17 00:00:00 2001
From: Shi Xian <40013335+R1ckShi@users.noreply.github.com>
Date: 星期一, 15 一月 2024 20:36:20 +0800
Subject: [PATCH] Merge pull request #1247 from alibaba-damo-academy/funasr1.0

---
 funasr/schedulers/__init__.py |   24 ++++++++++++------------
 1 files changed, 12 insertions(+), 12 deletions(-)

diff --git a/funasr/schedulers/__init__.py b/funasr/schedulers/__init__.py
index cba286a..0d1a578 100644
--- a/funasr/schedulers/__init__.py
+++ b/funasr/schedulers/__init__.py
@@ -8,16 +8,16 @@
 from funasr.schedulers.warmup_lr import WarmupLR
 
 scheduler_classes = dict(
-	ReduceLROnPlateau=torch.optim.lr_scheduler.ReduceLROnPlateau,
-	lambdalr=torch.optim.lr_scheduler.LambdaLR,
-	steplr=torch.optim.lr_scheduler.StepLR,
-	multisteplr=torch.optim.lr_scheduler.MultiStepLR,
-	exponentiallr=torch.optim.lr_scheduler.ExponentialLR,
-	CosineAnnealingLR=torch.optim.lr_scheduler.CosineAnnealingLR,
-	noamlr=NoamLR,
-	warmuplr=WarmupLR,
-	tri_stage=TriStageLR,
-	cycliclr=torch.optim.lr_scheduler.CyclicLR,
-	onecyclelr=torch.optim.lr_scheduler.OneCycleLR,
-	CosineAnnealingWarmRestarts=torch.optim.lr_scheduler.CosineAnnealingWarmRestarts,
+    ReduceLROnPlateau=torch.optim.lr_scheduler.ReduceLROnPlateau,
+    lambdalr=torch.optim.lr_scheduler.LambdaLR,
+    steplr=torch.optim.lr_scheduler.StepLR,
+    multisteplr=torch.optim.lr_scheduler.MultiStepLR,
+    exponentiallr=torch.optim.lr_scheduler.ExponentialLR,
+    CosineAnnealingLR=torch.optim.lr_scheduler.CosineAnnealingLR,
+    noamlr=NoamLR,
+    warmuplr=WarmupLR,
+    tri_stage=TriStageLR,
+    cycliclr=torch.optim.lr_scheduler.CyclicLR,
+    onecyclelr=torch.optim.lr_scheduler.OneCycleLR,
+    CosineAnnealingWarmRestarts=torch.optim.lr_scheduler.CosineAnnealingWarmRestarts,
 )

--
Gitblit v1.9.1