From 3cd3473bf7a3b41484baa86d9092248d78e7af39 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期五, 21 四月 2023 17:17:37 +0800
Subject: [PATCH] docs
---
funasr/bin/vad_inference_online.py | 7 +++++--
1 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/funasr/bin/vad_inference_online.py b/funasr/bin/vad_inference_online.py
index f35e5a1..4d02620 100644
--- a/funasr/bin/vad_inference_online.py
+++ b/funasr/bin/vad_inference_online.py
@@ -151,6 +151,9 @@
**kwargs,
):
assert check_argument_types()
+ ncpu = kwargs.get("ncpu", 1)
+ torch.set_num_threads(ncpu)
+
if batch_size > 1:
raise NotImplementedError("batch decoding is not implemented")
if ngpu > 1:
@@ -218,8 +221,8 @@
vad_results = []
batch_in_cache = param_dict['in_cache'] if param_dict is not None else dict()
- is_final = param_dict['is_final'] if param_dict is not None else False
- max_end_sil = param_dict['max_end_sil'] if param_dict is not None else 800
+ is_final = param_dict.get('is_final', False) if param_dict is not None else False
+ max_end_sil = param_dict.get('max_end_sil', 800) if param_dict is not None else 800
for keys, batch in loader:
assert isinstance(batch, dict), type(batch)
assert all(isinstance(s, str) for s in keys), keys
--
Gitblit v1.9.1