From 1596f6f414f6f41da66506debb1dff19fffeb3ec Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期一, 24 六月 2024 11:55:17 +0800
Subject: [PATCH] fixbug hotwords

---
 funasr/models/transformer/utils/mask.py |    6 +++---
 1 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/funasr/models/transformer/utils/mask.py b/funasr/models/transformer/utils/mask.py
index a8c168b..e859e0e 100644
--- a/funasr/models/transformer/utils/mask.py
+++ b/funasr/models/transformer/utils/mask.py
@@ -34,6 +34,7 @@
     m = subsequent_mask(ys_mask.size(-1), device=ys_mask.device).unsqueeze(0)
     return ys_mask.unsqueeze(-2) & m
 
+
 def vad_mask(size, vad_pos, device="cpu", dtype=torch.bool):
     """Create mask for decoder self-attention.
 
@@ -46,7 +47,6 @@
     ret = torch.ones(size, size, device=device, dtype=dtype)
     if vad_pos <= 0 or vad_pos >= size:
         return ret
-    sub_corner = torch.zeros(
-        vad_pos - 1, size - vad_pos, device=device, dtype=dtype)
-    ret[0:vad_pos - 1, vad_pos:] = sub_corner
+    sub_corner = torch.zeros(vad_pos - 1, size - vad_pos, device=device, dtype=dtype)
+    ret[0 : vad_pos - 1, vad_pos:] = sub_corner
     return ret

--
Gitblit v1.9.1