From 1596f6f414f6f41da66506debb1dff19fffeb3ec Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期一, 24 六月 2024 11:55:17 +0800
Subject: [PATCH] fixbug hotwords

---
 funasr/models/transformer/utils/lightconv.py |    8 ++------
 1 files changed, 2 insertions(+), 6 deletions(-)

diff --git a/funasr/models/transformer/utils/lightconv.py b/funasr/models/transformer/utils/lightconv.py
index b249402..b6d131b 100644
--- a/funasr/models/transformer/utils/lightconv.py
+++ b/funasr/models/transformer/utils/lightconv.py
@@ -50,9 +50,7 @@
         self.act = nn.GLU()
 
         # lightconv related
-        self.weight = nn.Parameter(
-            torch.Tensor(self.wshare, 1, kernel_size).uniform_(0, 1)
-        )
+        self.weight = nn.Parameter(torch.Tensor(self.wshare, 1, kernel_size).uniform_(0, 1))
         self.use_bias = use_bias
         if self.use_bias:
             self.bias = nn.Parameter(torch.Tensor(n_feat))
@@ -96,9 +94,7 @@
             self.kernel_mask = self.kernel_mask.to(x.device)
             weight = weight.masked_fill(self.kernel_mask == 0.0, float("-inf"))
         weight = F.softmax(weight, dim=-1)
-        x = F.conv1d(x, weight, padding=self.padding_size, groups=self.wshare).view(
-            B, C, T
-        )
+        x = F.conv1d(x, weight, padding=self.padding_size, groups=self.wshare).view(B, C, T)
         if self.use_bias:
             x = x + self.bias.view(1, -1, 1)
         x = x.transpose(1, 2)  # B x T x C

--
Gitblit v1.9.1