From 33d3d2084403fd34b79c835d2f2fe04f6cd8f738 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期三, 13 九月 2023 09:33:54 +0800
Subject: [PATCH] Merge branch 'main' of github.com:alibaba-damo-academy/FunASR add

---
 funasr/build_utils/build_lm_model.py |   40 ++++++++++++++++++++++++++++++++++------
 1 files changed, 34 insertions(+), 6 deletions(-)

diff --git a/funasr/build_utils/build_lm_model.py b/funasr/build_utils/build_lm_model.py
index 2cd869d..f78a20e 100644
--- a/funasr/build_utils/build_lm_model.py
+++ b/funasr/build_utils/build_lm_model.py
@@ -1,6 +1,9 @@
-from funasr.lm.abs_model import AbsLM
-from funasr.lm.seq_rnn_lm import SequentialRNNLM
-from funasr.lm.transformer_lm import TransformerLM
+import logging
+
+from funasr.train.abs_model import AbsLM
+from funasr.train.abs_model import LanguageModel
+from funasr.models.seq_rnn_lm import SequentialRNNLM
+from funasr.models.transformer_lm import TransformerLM
 from funasr.torch_utils.initialize import initialize
 from funasr.train.class_choices import ClassChoices
 
@@ -13,22 +16,47 @@
     type_check=AbsLM,
     default="seq_rnn",
 )
+model_choices = ClassChoices(
+    "model",
+    classes=dict(
+        lm=LanguageModel,
+    ),
+    default="lm",
+)
 
 class_choices_list = [
     # --lm and --lm_conf
-    lm_choices
+    lm_choices,
+    # --model and --model_conf
+    model_choices
 ]
 
 
 def build_lm_model(args):
     # token_list
-    if args.token_list is not None:
-        with open(args.token_list) as f:
+    if isinstance(args.token_list, str):
+        with open(args.token_list, encoding="utf-8") as f:
             token_list = [line.rstrip() for line in f]
         args.token_list = list(token_list)
         vocab_size = len(token_list)
         logging.info(f"Vocabulary size: {vocab_size}")
+    elif isinstance(args.token_list, (tuple, list)):
+        token_list = list(args.token_list)
+        vocab_size = len(token_list)
+        logging.info(f"Vocabulary size: {vocab_size}")
     else:
         vocab_size = None
 
+    # lm
+    lm_class = lm_choices.get_class(args.lm)
+    lm = lm_class(vocab_size=vocab_size, **args.lm_conf)
+
+    args.model = args.model if hasattr(args, "model") else "lm"
+    model_class = model_choices.get_class(args.model)
+    model = model_class(lm=lm, vocab_size=vocab_size, **args.model_conf)
+
+    # initialize
+    if args.init is not None:
+        initialize(model, args.init)
+
     return model

--
Gitblit v1.9.1