嘉渊
2023-06-14 5feca0cc1718ca2dea23aecbacf2d2218e13a036
update repo
2个文件已修改
10 ■■■■ 已修改文件
funasr/build_utils/build_asr_model.py 6 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
funasr/models/e2e_uni_asr.py 4 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
funasr/build_utils/build_asr_model.py
@@ -239,6 +239,7 @@
        vocab_size = len(token_list)
        logging.info(f"Vocabulary size: {vocab_size}")
    else:
        token_list = None
        vocab_size = None
    # frontend
@@ -265,6 +266,9 @@
    # normalization layer
    if args.normalize is not None:
        normalize_class = normalize_choices.get_class(args.normalize)
        if args.model == "mfcca":
            normalize = normalize_class(stats_file=args.cmvn_file,**args.normalize_conf)
        else:
        normalize = normalize_class(**args.normalize_conf)
    else:
        normalize = None
@@ -300,7 +304,7 @@
            **args.model_conf,
        )
    elif args.model in ["paraformer", "paraformer_online", "paraformer_bert", "bicif_paraformer",
                        "contextual_paraformer"]:
                        "contextual_paraformer", "neatcontextual_paraformer"]:
        # predictor
        predictor_class = predictor_choices.get_class(args.predictor)
        predictor = predictor_class(**args.predictor_conf)
funasr/models/e2e_uni_asr.py
@@ -50,9 +50,7 @@
        frontend: Optional[AbsFrontend],
        specaug: Optional[AbsSpecAug],
        normalize: Optional[AbsNormalize],
        preencoder: Optional[AbsPreEncoder],
        encoder: AbsEncoder,
        postencoder: Optional[AbsPostEncoder],
        decoder: AbsDecoder,
        ctc: CTC,
        ctc_weight: float = 0.5,
@@ -80,6 +78,8 @@
        loss_weight_model1: float = 0.5,
        enable_maas_finetune: bool = False,
        freeze_encoder2: bool = False,
        preencoder: Optional[AbsPreEncoder] = None,
        postencoder: Optional[AbsPostEncoder] = None,
        encoder1_encoder2_joint_training: bool = True,
    ):
        assert check_argument_types()