From 2ff405b2f4ab899eff9bece232969fbb0c8f0555 Mon Sep 17 00:00:00 2001
From: jmwang66 <wangjiaming.wjm@alibaba-inc.com>
Date: 星期二, 20 六月 2023 00:26:37 +0800
Subject: [PATCH] Merge pull request #653 from alibaba-damo-academy/dev_wjm_infer
---
funasr/build_utils/build_lm_model.py | 9 +++++++--
1 files changed, 7 insertions(+), 2 deletions(-)
diff --git a/funasr/build_utils/build_lm_model.py b/funasr/build_utils/build_lm_model.py
index 8f4a958..f78a20e 100644
--- a/funasr/build_utils/build_lm_model.py
+++ b/funasr/build_utils/build_lm_model.py
@@ -34,10 +34,14 @@
def build_lm_model(args):
# token_list
- if args.token_list is not None:
- with open(args.token_list) as f:
+ if isinstance(args.token_list, str):
+ with open(args.token_list, encoding="utf-8") as f:
token_list = [line.rstrip() for line in f]
args.token_list = list(token_list)
+ vocab_size = len(token_list)
+ logging.info(f"Vocabulary size: {vocab_size}")
+ elif isinstance(args.token_list, (tuple, list)):
+ token_list = list(args.token_list)
vocab_size = len(token_list)
logging.info(f"Vocabulary size: {vocab_size}")
else:
@@ -47,6 +51,7 @@
lm_class = lm_choices.get_class(args.lm)
lm = lm_class(vocab_size=vocab_size, **args.lm_conf)
+ args.model = args.model if hasattr(args, "model") else "lm"
model_class = model_choices.get_class(args.model)
model = model_class(lm=lm, vocab_size=vocab_size, **args.model_conf)
--
Gitblit v1.9.1