From 6427c834dfd97b1f05c6659cdc7ccf010bf82fe1 Mon Sep 17 00:00:00 2001
From: 嘉渊 <wangjiaming.wjm@alibaba-inc.com>
Date: 星期一, 24 四月 2023 19:50:07 +0800
Subject: [PATCH] update

---
 funasr/export/utils/torch_function.py |   12 ++++++++++++
 1 files changed, 12 insertions(+), 0 deletions(-)

diff --git a/funasr/export/utils/torch_function.py b/funasr/export/utils/torch_function.py
index e8e5e1a..a078a7e 100644
--- a/funasr/export/utils/torch_function.py
+++ b/funasr/export/utils/torch_function.py
@@ -44,6 +44,18 @@
         else:
             return mask
 
+class sequence_mask(nn.Module):
+    def __init__(self, max_seq_len=512, flip=True):
+        super().__init__()
+    
+    def forward(self, lengths, max_seq_len=None, dtype=torch.float32, device=None):
+        if max_seq_len is None:
+            max_seq_len = lengths.max()
+        row_vector = torch.arange(0, max_seq_len, 1).to(lengths.device)
+        matrix = torch.unsqueeze(lengths, dim=-1)
+        mask = row_vector < matrix
+        
+        return mask.type(dtype).to(device) if device is not None else mask.type(dtype)
 
 def normalize(input: torch.Tensor, p: float = 2.0, dim: int = 1, out: Optional[torch.Tensor] = None) -> torch.Tensor:
     if out is None:

--
Gitblit v1.9.1