From 202ab8a2c9e2af5c147faf080f96e97abbb7be42 Mon Sep 17 00:00:00 2001
From: hnluo <haoneng.lhn@alibaba-inc.com>
Date: 星期五, 08 十二月 2023 16:43:48 +0800
Subject: [PATCH] Merge pull request #1161 from alibaba-damo-academy/dev_lhn
---
funasr/bin/build_trainer.py | 9 ++++++---
1 files changed, 6 insertions(+), 3 deletions(-)
diff --git a/funasr/bin/build_trainer.py b/funasr/bin/build_trainer.py
index 61af766..c03bdf3 100644
--- a/funasr/bin/build_trainer.py
+++ b/funasr/bin/build_trainer.py
@@ -18,7 +18,7 @@
from funasr.build_utils.build_scheduler import build_scheduler
from funasr.build_utils.build_trainer import build_trainer as build_trainer_modelscope
from funasr.modules.lora.utils import mark_only_lora_as_trainable
-from funasr.text.phoneme_tokenizer import g2p_choices
+from funasr.tokenizer.phoneme_tokenizer import g2p_choices
from funasr.torch_utils.load_pretrained_model import load_pretrained_model
from funasr.torch_utils.model_summary import model_summary
from funasr.torch_utils.pytorch_version import pytorch_cudnn_version
@@ -548,7 +548,10 @@
init_param = modelscope_dict['init_model']
cmvn_file = modelscope_dict['cmvn_file']
seg_dict_file = modelscope_dict['seg_dict']
- bpemodel = modelscope_dict['bpemodel']
+ if 'bpemodel' in modelscope_dict:
+ bpemodel = modelscope_dict['bpemodel']
+ else:
+ bpemodel = None
# overwrite parameters
with open(config) as f:
@@ -582,7 +585,7 @@
args.seg_dict_file = seg_dict_file
else:
args.seg_dict_file = None
- if os.path.exists(bpemodel):
+ if bpemodel is not None and os.path.exists(bpemodel):
args.bpemodel = bpemodel
else:
args.bpemodel = None
--
Gitblit v1.9.1