From e60ac4bc991183a780fdd03d22db7d3b42df9b58 Mon Sep 17 00:00:00 2001
From: haoneng.lhn <haoneng.lhn@alibaba-inc.com>
Date: 星期一, 11 九月 2023 17:36:27 +0800
Subject: [PATCH] support chunk size select for chunk-hopping encoder
---
funasr/bin/asr_infer.py | 6 ++++--
1 files changed, 4 insertions(+), 2 deletions(-)
diff --git a/funasr/bin/asr_infer.py b/funasr/bin/asr_infer.py
index 2e002b7..7746821 100644
--- a/funasr/bin/asr_infer.py
+++ b/funasr/bin/asr_infer.py
@@ -399,7 +399,7 @@
@torch.no_grad()
def __call__(
self, speech: Union[torch.Tensor, np.ndarray], speech_lengths: Union[torch.Tensor, np.ndarray] = None,
- begin_time: int = 0, end_time: int = None,
+ decoding_ind: int = None, begin_time: int = 0, end_time: int = None,
):
"""Inference
@@ -429,7 +429,9 @@
batch = to_device(batch, device=self.device)
# b. Forward Encoder
- enc, enc_len = self.asr_model.encode(**batch, ind=self.decoding_ind)
+ if decoding_ind is None:
+ decoding_ind = self.decoding_ind
+ enc, enc_len = self.asr_model.encode(**batch, ind=decoding_ind)
if isinstance(enc, tuple):
enc = enc[0]
# assert len(enc) == 1, len(enc)
--
Gitblit v1.9.1