From 9b4e0969f29dccdfa78a55219f8107e549c96e45 Mon Sep 17 00:00:00 2001
From: lzr265946 <lzr265946@alibaba-inc.com>
Date: 星期四, 07 九月 2023 17:19:02 +0800
Subject: [PATCH] fix transformerLM inference recipe
---
funasr/models/e2e_asr_paraformer.py | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diff --git a/funasr/models/e2e_asr_paraformer.py b/funasr/models/e2e_asr_paraformer.py
index 5a1a29b..e157454 100644
--- a/funasr/models/e2e_asr_paraformer.py
+++ b/funasr/models/e2e_asr_paraformer.py
@@ -2107,7 +2107,7 @@
return loss_att, acc_att, cer_att, wer_att, loss_pre
- def cal_decoder_with_predictor(self, encoder_out, encoder_out_lens, sematic_embeds, ys_pad_lens, hw_list=None):
+ def cal_decoder_with_predictor(self, encoder_out, encoder_out_lens, sematic_embeds, ys_pad_lens, hw_list=None, clas_scale=1.0):
if hw_list is None:
# default hotword list
hw_list = [torch.Tensor([self.sos]).long().to(encoder_out.device)] # empty hotword list
--
Gitblit v1.9.1