From ddbc8b5eded1fff6084001d160d46b532020ecb7 Mon Sep 17 00:00:00 2001
From: Shi Xian <40013335+R1ckShi@users.noreply.github.com>
Date: 星期一, 15 一月 2024 20:36:20 +0800
Subject: [PATCH] Merge pull request #1247 from alibaba-damo-academy/funasr1.0

---
 funasr/bin/inference.py |   10 ++--------
 1 files changed, 2 insertions(+), 8 deletions(-)

diff --git a/funasr/bin/inference.py b/funasr/bin/inference.py
index cefee55..7368d16 100644
--- a/funasr/bin/inference.py
+++ b/funasr/bin/inference.py
@@ -274,12 +274,9 @@
     def generate_with_vad(self, input, input_len=None, **cfg):
         
         # step.1: compute the vad model
-        model = self.vad_model
-        kwargs = self.vad_kwargs
-        kwargs.update(cfg)
+        self.vad_kwargs.update(cfg)
         beg_vad = time.time()
-        res = self.generate(input, input_len=input_len, model=model, kwargs=kwargs, **cfg)
-        vad_res = res
+        res = self.generate(input, input_len=input_len, model=self.vad_model, kwargs=self.vad_kwargs, **cfg)
         end_vad = time.time()
         print(f"time cost vad: {end_vad - beg_vad:0.3f}")
 
@@ -312,10 +309,7 @@
             if not len(sorted_data):
                 logging.info("decoding, utt: {}, empty speech".format(key))
                 continue
-            
 
-            # if kwargs["device"] == "cpu":
-            #     batch_size = 0
             if len(sorted_data) > 0 and len(sorted_data[0]) > 0:
                 batch_size = max(batch_size, sorted_data[0][0][1] - sorted_data[0][0][0])
             

--
Gitblit v1.9.1