From c2f174cd7811a7a11e6227ecb371887f97fd66d3 Mon Sep 17 00:00:00 2001
From: 夜雨飘零 <yeyupiaoling@foxmail.com>
Date: 星期五, 02 二月 2024 23:06:13 +0800
Subject: [PATCH] Use ffmpeg read data (#1349)
---
funasr/auto/auto_model.py | 8 ++------
1 files changed, 2 insertions(+), 6 deletions(-)
diff --git a/funasr/auto/auto_model.py b/funasr/auto/auto_model.py
index d072219..d99fc56 100644
--- a/funasr/auto/auto_model.py
+++ b/funasr/auto/auto_model.py
@@ -134,8 +134,6 @@
self.spk_model = spk_model
self.spk_kwargs = spk_kwargs
self.model_path = kwargs.get("model_path")
-
-
def build_model(self, **kwargs):
assert "model" in kwargs
@@ -146,7 +144,7 @@
set_all_random_seed(kwargs.get("seed", 0))
device = kwargs.get("device", "cuda")
- if not torch.cuda.is_available() or kwargs.get("ngpu", 0) == 0:
+ if not torch.cuda.is_available() or kwargs.get("ngpu", 1) == 0:
device = "cpu"
kwargs["batch_size"] = 1
kwargs["device"] = device
@@ -200,8 +198,6 @@
res = self.model(*args, kwargs)
return res
-
-
def generate(self, input, input_len=None, **cfg):
if self.vad_model is None:
return self.inference(input, input_len=input_len, **cfg)
@@ -232,7 +228,7 @@
data_batch = data_list[beg_idx:end_idx]
key_batch = key_list[beg_idx:end_idx]
batch = {"data_in": data_batch, "key": key_batch}
- if (end_idx - beg_idx) == 1 and isinstance(data_batch[0], torch.Tensor): # fbank
+ if (end_idx - beg_idx) == 1 and kwargs.get("data_type", None) == "fbank": # fbank
batch["data_in"] = data_batch[0]
batch["data_lengths"] = input_len
--
Gitblit v1.9.1