From 2e769fb36ce88dabfa984e8b81e8cb1c90799c95 Mon Sep 17 00:00:00 2001
From: zhifu gao <zhifu.gzf@alibaba-inc.com>
Date: 星期五, 07 四月 2023 15:54:09 +0800
Subject: [PATCH] Merge branch 'main' into dev_cmz2
---
funasr/runtime/python/libtorch/funasr_torch/paraformer_bin.py | 10 ++++++++--
1 files changed, 8 insertions(+), 2 deletions(-)
diff --git a/funasr/runtime/python/libtorch/funasr_torch/paraformer_bin.py b/funasr/runtime/python/libtorch/funasr_torch/paraformer_bin.py
index 3c0606d..e169087 100644
--- a/funasr/runtime/python/libtorch/funasr_torch/paraformer_bin.py
+++ b/funasr/runtime/python/libtorch/funasr_torch/paraformer_bin.py
@@ -46,6 +46,7 @@
)
self.ort_infer = torch.jit.load(model_file)
self.batch_size = batch_size
+ self.device_id = device_id
self.plot_timestamp_to = plot_timestamp_to
self.pred_bias = pred_bias
@@ -58,8 +59,13 @@
end_idx = min(waveform_nums, beg_idx + self.batch_size)
feats, feats_len = self.extract_feat(waveform_list[beg_idx:end_idx])
try:
- outputs = self.ort_infer(feats, feats_len)
- am_scores, valid_token_lens = outputs[0], outputs[1]
+ with torch.no_grad():
+ if int(self.device_id) == -1:
+ outputs = self.ort_infer(feats, feats_len)
+ am_scores, valid_token_lens = outputs[0], outputs[1]
+ else:
+ outputs = self.ort_infer(feats.cuda(), feats_len.cuda())
+ am_scores, valid_token_lens = outputs[0].cpu(), outputs[1].cpu()
if len(outputs) == 4:
# for BiCifParaformer Inference
us_alphas, us_peaks = outputs[2], outputs[3]
--
Gitblit v1.9.1