From 594b79f59e7eefa6955c729f6264c8c99d1d9571 Mon Sep 17 00:00:00 2001
From: hnluo <haoneng.lhn@alibaba-inc.com>
Date: 星期一, 05 六月 2023 16:46:15 +0800
Subject: [PATCH] Merge pull request #591 from alibaba-damo-academy/dev_lhn
---
funasr/bin/build_trainer.py | 8 ++++----
1 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/funasr/bin/build_trainer.py b/funasr/bin/build_trainer.py
index df921fa..267e405 100644
--- a/funasr/bin/build_trainer.py
+++ b/funasr/bin/build_trainer.py
@@ -23,6 +23,8 @@
from funasr.tasks.asr import ASRTask as ASRTask
elif mode == "paraformer":
from funasr.tasks.asr import ASRTaskParaformer as ASRTask
+ elif mode == "paraformer_streaming":
+ from funasr.tasks.asr import ASRTaskParaformer as ASRTask
elif mode == "paraformer_vad_punc":
from funasr.tasks.asr import ASRTaskParaformer as ASRTask
elif mode == "uniasr":
@@ -83,8 +85,7 @@
finetune_configs = yaml.safe_load(f)
# set data_types
if dataset_type == "large":
- if 'data_types' not in finetune_configs['dataset_conf']:
- finetune_configs["dataset_conf"]["data_types"] = "sound,text"
+ finetune_configs["dataset_conf"]["data_types"] = "sound,text"
finetune_configs = update_dct(configs, finetune_configs)
for key, value in finetune_configs.items():
if hasattr(args, key):
@@ -132,8 +133,7 @@
if args.dataset_type == "small":
args.batch_bins = batch_bins
elif args.dataset_type == "large":
- # args.dataset_conf["batch_conf"]["batch_size"] = batch_bins
- pass
+ args.dataset_conf["batch_conf"]["batch_size"] = batch_bins
else:
raise ValueError(f"Not supported dataset_type={args.dataset_type}")
if args.normalize in ["null", "none", "None"]:
--
Gitblit v1.9.1