From 9b4e9cc8a0311e5243d69b73ed073e7ea441982e Mon Sep 17 00:00:00 2001 From: 游雁 <zhifu.gzf@alibaba-inc.com> Date: 星期三, 27 三月 2024 16:05:29 +0800 Subject: [PATCH] train update --- funasr/schedulers/__init__.py | 25 +++++++++++++++++++++++++ 1 files changed, 25 insertions(+), 0 deletions(-) diff --git a/funasr/schedulers/__init__.py b/funasr/schedulers/__init__.py index e69de29..39f8c0e 100644 --- a/funasr/schedulers/__init__.py +++ b/funasr/schedulers/__init__.py @@ -0,0 +1,25 @@ +import torch +import torch.multiprocessing +import torch.nn +import torch.optim + +from funasr.schedulers.noam_lr import NoamLR +from funasr.schedulers.tri_stage_scheduler import TriStageLR +from funasr.schedulers.warmup_lr import WarmupLR +from funasr.schedulers.lambdalr_cus import CustomLambdaLR + +scheduler_classes = dict( + ReduceLROnPlateau=torch.optim.lr_scheduler.ReduceLROnPlateau, + lambdalr=torch.optim.lr_scheduler.LambdaLR, + steplr=torch.optim.lr_scheduler.StepLR, + multisteplr=torch.optim.lr_scheduler.MultiStepLR, + exponentiallr=torch.optim.lr_scheduler.ExponentialLR, + CosineAnnealingLR=torch.optim.lr_scheduler.CosineAnnealingLR, + noamlr=NoamLR, + warmuplr=WarmupLR, + tri_stage=TriStageLR, + cycliclr=torch.optim.lr_scheduler.CyclicLR, + onecyclelr=torch.optim.lr_scheduler.OneCycleLR, + CosineAnnealingWarmRestarts=torch.optim.lr_scheduler.CosineAnnealingWarmRestarts, + custom_lambdalr=CustomLambdaLR, +) -- Gitblit v1.9.1