From cc2c1d1d53dea5d2c45f858d1baa5bd279f47987 Mon Sep 17 00:00:00 2001
From: nichongjia-2007 <nichongjia@gmail.com>
Date: 星期三, 31 五月 2023 14:39:25 +0800
Subject: [PATCH] Merge branch 'main' of https://github.com/alibaba-damo-academy/FunASR
---
funasr/build_utils/build_optimizer.py | 28 ++++++++++++++++++++++++++++
1 files changed, 28 insertions(+), 0 deletions(-)
diff --git a/funasr/build_utils/build_optimizer.py b/funasr/build_utils/build_optimizer.py
new file mode 100644
index 0000000..bd0b73d
--- /dev/null
+++ b/funasr/build_utils/build_optimizer.py
@@ -0,0 +1,28 @@
+import torch
+
+from funasr.optimizers.fairseq_adam import FairseqAdam
+from funasr.optimizers.sgd import SGD
+
+
+def build_optimizer(args, model):
+ optim_classes = dict(
+ adam=torch.optim.Adam,
+ fairseq_adam=FairseqAdam,
+ adamw=torch.optim.AdamW,
+ sgd=SGD,
+ adadelta=torch.optim.Adadelta,
+ adagrad=torch.optim.Adagrad,
+ adamax=torch.optim.Adamax,
+ asgd=torch.optim.ASGD,
+ lbfgs=torch.optim.LBFGS,
+ rmsprop=torch.optim.RMSprop,
+ rprop=torch.optim.Rprop,
+ )
+
+ optim_class = optim_classes.get(args.optim)
+ if optim_class is None:
+ raise ValueError(f"must be one of {list(optim_classes)}: {args.optim}")
+ optimizer = optim_class(model.parameters(), **args.optim_conf)
+
+ optimizers = [optimizer]
+ return optimizers
\ No newline at end of file
--
Gitblit v1.9.1