From a016617c7ec98ab9c7475ff7d3b6150b98d5beeb Mon Sep 17 00:00:00 2001
From: zhifu gao <zhifu.gzf@alibaba-inc.com>
Date: 星期二, 28 二月 2023 18:36:52 +0800
Subject: [PATCH] Merge pull request #165 from alibaba-damo-academy/dev_cmz

---
 funasr/modules/mask.py |   17 +++++++++++++++++
 1 files changed, 17 insertions(+), 0 deletions(-)

diff --git a/funasr/modules/mask.py b/funasr/modules/mask.py
index 8f068e1..a8c168b 100644
--- a/funasr/modules/mask.py
+++ b/funasr/modules/mask.py
@@ -33,3 +33,20 @@
     ys_mask = ys_in_pad != ignore_id
     m = subsequent_mask(ys_mask.size(-1), device=ys_mask.device).unsqueeze(0)
     return ys_mask.unsqueeze(-2) & m
+
+def vad_mask(size, vad_pos, device="cpu", dtype=torch.bool):
+    """Create mask for decoder self-attention.
+
+    :param int size: size of mask
+    :param int vad_pos: index of vad index
+    :param str device: "cpu" or "cuda" or torch.Tensor.device
+    :param torch.dtype dtype: result dtype
+    :rtype: torch.Tensor (B, Lmax, Lmax)
+    """
+    ret = torch.ones(size, size, device=device, dtype=dtype)
+    if vad_pos <= 0 or vad_pos >= size:
+        return ret
+    sub_corner = torch.zeros(
+        vad_pos - 1, size - vad_pos, device=device, dtype=dtype)
+    ret[0:vad_pos - 1, vad_pos:] = sub_corner
+    return ret

--
Gitblit v1.9.1