From 5feca0cc1718ca2dea23aecbacf2d2218e13a036 Mon Sep 17 00:00:00 2001
From: 嘉渊 <wangjiaming.wjm@alibaba-inc.com>
Date: 星期三, 14 六月 2023 23:39:48 +0800
Subject: [PATCH] update repo
---
funasr/build_utils/build_asr_model.py | 8 ++++++--
funasr/models/e2e_uni_asr.py | 4 ++--
2 files changed, 8 insertions(+), 4 deletions(-)
diff --git a/funasr/build_utils/build_asr_model.py b/funasr/build_utils/build_asr_model.py
index 621c4d9..7483a9a 100644
--- a/funasr/build_utils/build_asr_model.py
+++ b/funasr/build_utils/build_asr_model.py
@@ -239,6 +239,7 @@
vocab_size = len(token_list)
logging.info(f"Vocabulary size: {vocab_size}")
else:
+ token_list = None
vocab_size = None
# frontend
@@ -265,7 +266,10 @@
# normalization layer
if args.normalize is not None:
normalize_class = normalize_choices.get_class(args.normalize)
- normalize = normalize_class(**args.normalize_conf)
+ if args.model == "mfcca":
+ normalize = normalize_class(stats_file=args.cmvn_file,**args.normalize_conf)
+ else:
+ normalize = normalize_class(**args.normalize_conf)
else:
normalize = None
@@ -300,7 +304,7 @@
**args.model_conf,
)
elif args.model in ["paraformer", "paraformer_online", "paraformer_bert", "bicif_paraformer",
- "contextual_paraformer"]:
+ "contextual_paraformer", "neatcontextual_paraformer"]:
# predictor
predictor_class = predictor_choices.get_class(args.predictor)
predictor = predictor_class(**args.predictor_conf)
diff --git a/funasr/models/e2e_uni_asr.py b/funasr/models/e2e_uni_asr.py
index d08ea37..9ec3a39 100644
--- a/funasr/models/e2e_uni_asr.py
+++ b/funasr/models/e2e_uni_asr.py
@@ -50,9 +50,7 @@
frontend: Optional[AbsFrontend],
specaug: Optional[AbsSpecAug],
normalize: Optional[AbsNormalize],
- preencoder: Optional[AbsPreEncoder],
encoder: AbsEncoder,
- postencoder: Optional[AbsPostEncoder],
decoder: AbsDecoder,
ctc: CTC,
ctc_weight: float = 0.5,
@@ -80,6 +78,8 @@
loss_weight_model1: float = 0.5,
enable_maas_finetune: bool = False,
freeze_encoder2: bool = False,
+ preencoder: Optional[AbsPreEncoder] = None,
+ postencoder: Optional[AbsPostEncoder] = None,
encoder1_encoder2_joint_training: bool = True,
):
assert check_argument_types()
--
Gitblit v1.9.1