From 4bc6db3ef88795eb570f92f9576f8bc7c56f96bc Mon Sep 17 00:00:00 2001
From: 志浩 <neo.dzh@alibaba-inc.com>
Date: 星期二, 01 八月 2023 17:03:39 +0800
Subject: [PATCH] TOLD: add TOLD/SOND recipe on callhome
---
funasr/modules/nets_utils.py | 42 ++++++++++++++++++++++++++++++++++++++++++
1 files changed, 42 insertions(+), 0 deletions(-)
diff --git a/funasr/modules/nets_utils.py b/funasr/modules/nets_utils.py
index 55c5768..b1879fa 100644
--- a/funasr/modules/nets_utils.py
+++ b/funasr/modules/nets_utils.py
@@ -61,6 +61,48 @@
return pad
+def pad_list_all_dim(xs, pad_value):
+ """Perform padding for the list of tensors.
+
+ Args:
+ xs (List): List of Tensors [(T_1, `*`), (T_2, `*`), ..., (T_B, `*`)].
+ pad_value (float): Value for padding.
+
+ Returns:
+ Tensor: Padded tensor (B, Tmax, `*`).
+
+ Examples:
+ >>> x = [torch.ones(4), torch.ones(2), torch.ones(1)]
+ >>> x
+ [tensor([1., 1., 1., 1.]), tensor([1., 1.]), tensor([1.])]
+ >>> pad_list(x, 0)
+ tensor([[1., 1., 1., 1.],
+ [1., 1., 0., 0.],
+ [1., 0., 0., 0.]])
+
+ """
+ n_batch = len(xs)
+ num_dim = len(xs[0].shape)
+ max_len_all_dim = []
+ for i in range(num_dim):
+ max_len_all_dim.append(max(x.size(i) for x in xs))
+ pad = xs[0].new(n_batch, *max_len_all_dim).fill_(pad_value)
+
+ for i in range(n_batch):
+ if num_dim == 1:
+ pad[i, : xs[i].size(0)] = xs[i]
+ elif num_dim == 2:
+ pad[i, : xs[i].size(0), : xs[i].size(1)] = xs[i]
+ elif num_dim == 3:
+ pad[i, : xs[i].size(0), : xs[i].size(1), : xs[i].size(2)] = xs[i]
+ else:
+ raise ValueError(
+ "pad_list_all_dim only support 1-D, 2-D and 3-D tensors, not {}-D.".format(num_dim)
+ )
+
+ return pad
+
+
def make_pad_mask(lengths, xs=None, length_dim=-1, maxlen=None):
"""Make mask tensor containing indices of padded part.
--
Gitblit v1.9.1