From 6d8528fa9f0ca9261718ef0afec67a4acb48c6d7 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期三, 08 二月 2023 19:58:16 +0800
Subject: [PATCH] Merge branch 'main' of github.com:alibaba-damo-academy/FunASR add
---
funasr/bin/build_trainer.py | 28 ++++++++++++++++++++++++++--
1 files changed, 26 insertions(+), 2 deletions(-)
diff --git a/funasr/bin/build_trainer.py b/funasr/bin/build_trainer.py
index c13f91b..5ef736a 100644
--- a/funasr/bin/build_trainer.py
+++ b/funasr/bin/build_trainer.py
@@ -34,8 +34,22 @@
return args, ASRTask
-def build_trainer(modelscope_dict, data_dir, output_dir, train_set="train", dev_set="validation", distributed=False,
- dataset_type="small", lr=None, batch_bins=None, max_epoch=None, mate_params=None):
+def build_trainer(modelscope_dict,
+ data_dir,
+ output_dir,
+ train_set="train",
+ dev_set="validation",
+ distributed=False,
+ dataset_type="small",
+ batch_bins=None,
+ max_epoch=None,
+ optim=None,
+ lr=None,
+ scheduler=None,
+ scheduler_conf=None,
+ specaug=None,
+ specaug_conf=None,
+ param_dict=None):
mode = modelscope_dict['mode']
args, ASRTask = parse_args(mode=mode)
# ddp related
@@ -94,8 +108,18 @@
args.output_dir = output_dir
args.gpu_id = args.local_rank
args.config = finetune_config
+ if optim is not None:
+ args.optim = optim
if lr is not None:
args.optim_conf["lr"] = lr
+ if scheduler is not None:
+ args.scheduler = scheduler
+ if scheduler_conf is not None:
+ args.scheduler_conf = scheduler_conf
+ if specaug is not None:
+ args.specaug = specaug
+ if specaug_conf is not None:
+ args.specaug_conf = specaug_conf
if max_epoch is not None:
args.max_epoch = max_epoch
if batch_bins is not None:
--
Gitblit v1.9.1