From 28ccfbfc51068a663a80764e14074df5edf2b5ba Mon Sep 17 00:00:00 2001
From: kongdeqiang <kongdeqiang960204@163.com>
Date: 星期五, 13 三月 2026 17:41:41 +0800
Subject: [PATCH] 提交

---
 funasr/models/emotion2vec/modules.py |   14 +++-----------
 1 files changed, 3 insertions(+), 11 deletions(-)

diff --git a/funasr/models/emotion2vec/modules.py b/funasr/models/emotion2vec/modules.py
index fcf99eb..6e36409 100644
--- a/funasr/models/emotion2vec/modules.py
+++ b/funasr/models/emotion2vec/modules.py
@@ -77,18 +77,10 @@
         x = self.dropout(x)
 
         for i, blk in enumerate(self.blocks):
-            if (
-                not self.training
-                or self.layerdrop == 0
-                or (np.random.random() > self.layerdrop)
-            ):
+            if not self.training or self.layerdrop == 0 or (np.random.random() > self.layerdrop):
                 ab = alibi_bias
                 if ab is not None and alibi_scale is not None:
-                    scale = (
-                        alibi_scale[i]
-                        if alibi_scale.size(0) > 1
-                        else alibi_scale.squeeze(0)
-                    )
+                    scale = alibi_scale[i] if alibi_scale.size(0) > 1 else alibi_scale.squeeze(0)
                     ab = ab * scale.type_as(ab)
                 x, _ = blk(x, padding_mask, ab)
 
@@ -264,7 +256,7 @@
         super().__init__()
         self.num_heads = num_heads
         head_dim = dim // num_heads
-        self.scale = qk_scale or head_dim ** -0.5
+        self.scale = qk_scale or head_dim**-0.5
 
         self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
         self.attn_drop = nn.Dropout(attn_drop)

--
Gitblit v1.9.1