From b75d1e89bb2f513a79bb07e9100ba1cd2bbcf40c Mon Sep 17 00:00:00 2001 From: 游雁 <zhifu.gzf@alibaba-inc.com> Date: 星期日, 09 六月 2024 00:32:57 +0800 Subject: [PATCH] fix bug --- funasr/schedulers/__init__.py | 26 ++++++++++++++------------ 1 files changed, 14 insertions(+), 12 deletions(-) diff --git a/funasr/schedulers/__init__.py b/funasr/schedulers/__init__.py index cba286a..39f8c0e 100644 --- a/funasr/schedulers/__init__.py +++ b/funasr/schedulers/__init__.py @@ -6,18 +6,20 @@ from funasr.schedulers.noam_lr import NoamLR from funasr.schedulers.tri_stage_scheduler import TriStageLR from funasr.schedulers.warmup_lr import WarmupLR +from funasr.schedulers.lambdalr_cus import CustomLambdaLR scheduler_classes = dict( - ReduceLROnPlateau=torch.optim.lr_scheduler.ReduceLROnPlateau, - lambdalr=torch.optim.lr_scheduler.LambdaLR, - steplr=torch.optim.lr_scheduler.StepLR, - multisteplr=torch.optim.lr_scheduler.MultiStepLR, - exponentiallr=torch.optim.lr_scheduler.ExponentialLR, - CosineAnnealingLR=torch.optim.lr_scheduler.CosineAnnealingLR, - noamlr=NoamLR, - warmuplr=WarmupLR, - tri_stage=TriStageLR, - cycliclr=torch.optim.lr_scheduler.CyclicLR, - onecyclelr=torch.optim.lr_scheduler.OneCycleLR, - CosineAnnealingWarmRestarts=torch.optim.lr_scheduler.CosineAnnealingWarmRestarts, + ReduceLROnPlateau=torch.optim.lr_scheduler.ReduceLROnPlateau, + lambdalr=torch.optim.lr_scheduler.LambdaLR, + steplr=torch.optim.lr_scheduler.StepLR, + multisteplr=torch.optim.lr_scheduler.MultiStepLR, + exponentiallr=torch.optim.lr_scheduler.ExponentialLR, + CosineAnnealingLR=torch.optim.lr_scheduler.CosineAnnealingLR, + noamlr=NoamLR, + warmuplr=WarmupLR, + tri_stage=TriStageLR, + cycliclr=torch.optim.lr_scheduler.CyclicLR, + onecyclelr=torch.optim.lr_scheduler.OneCycleLR, + CosineAnnealingWarmRestarts=torch.optim.lr_scheduler.CosineAnnealingWarmRestarts, + custom_lambdalr=CustomLambdaLR, ) -- Gitblit v1.9.1