From 54931dd4e1a099d7d6f144c4e12e5453deb3aa26 Mon Sep 17 00:00:00 2001
From: 雾聪 <wucong.lyb@alibaba-inc.com>
Date: 星期三, 28 六月 2023 10:41:57 +0800
Subject: [PATCH] Merge branch 'main' of https://github.com/alibaba-damo-academy/FunASR into main
---
funasr/datasets/large_datasets/utils/padding.py | 16 ++++++++--------
1 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/funasr/datasets/large_datasets/utils/padding.py b/funasr/datasets/large_datasets/utils/padding.py
index 7c66608..20ba7a3 100644
--- a/funasr/datasets/large_datasets/utils/padding.py
+++ b/funasr/datasets/large_datasets/utils/padding.py
@@ -13,16 +13,16 @@
batch = {}
data_names = data[0].keys()
for data_name in data_names:
- if data_name == "key" or data_name == "sampling_rate" or data_name == 'hotword_indxs':
- batch[data_name] = data[0][data_name]
+ if data_name == "key" or data_name == "sampling_rate":
continue
else:
- if data[0][data_name].dtype.kind == "i":
- pad_value = int_pad_value
- tensor_type = torch.int64
- else:
- pad_value = float_pad_value
- tensor_type = torch.float32
+ if data_name != 'hotword_indxs':
+ if data[0][data_name].dtype.kind == "i":
+ pad_value = int_pad_value
+ tensor_type = torch.int64
+ else:
+ pad_value = float_pad_value
+ tensor_type = torch.float32
tensor_list = [torch.tensor(np.copy(d[data_name]), dtype=tensor_type) for d in data]
tensor_lengths = torch.tensor([len(d[data_name]) for d in data], dtype=torch.int32)
--
Gitblit v1.9.1