From 1596f6f414f6f41da66506debb1dff19fffeb3ec Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期一, 24 六月 2024 11:55:17 +0800
Subject: [PATCH] fixbug hotwords
---
funasr/models/emotion2vec/modules.py | 14 +++-----------
1 files changed, 3 insertions(+), 11 deletions(-)
diff --git a/funasr/models/emotion2vec/modules.py b/funasr/models/emotion2vec/modules.py
index fcf99eb..6e36409 100644
--- a/funasr/models/emotion2vec/modules.py
+++ b/funasr/models/emotion2vec/modules.py
@@ -77,18 +77,10 @@
x = self.dropout(x)
for i, blk in enumerate(self.blocks):
- if (
- not self.training
- or self.layerdrop == 0
- or (np.random.random() > self.layerdrop)
- ):
+ if not self.training or self.layerdrop == 0 or (np.random.random() > self.layerdrop):
ab = alibi_bias
if ab is not None and alibi_scale is not None:
- scale = (
- alibi_scale[i]
- if alibi_scale.size(0) > 1
- else alibi_scale.squeeze(0)
- )
+ scale = alibi_scale[i] if alibi_scale.size(0) > 1 else alibi_scale.squeeze(0)
ab = ab * scale.type_as(ab)
x, _ = blk(x, padding_mask, ab)
@@ -264,7 +256,7 @@
super().__init__()
self.num_heads = num_heads
head_dim = dim // num_heads
- self.scale = qk_scale or head_dim ** -0.5
+ self.scale = qk_scale or head_dim**-0.5
self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
self.attn_drop = nn.Dropout(attn_drop)
--
Gitblit v1.9.1