From 2c2fb5e1eb1185a081e3507c2aa5c3aafaa2bb6d Mon Sep 17 00:00:00 2001
From: zhifu gao <zhifu.gzf@alibaba-inc.com>
Date: 星期二, 22 四月 2025 09:56:44 +0800
Subject: [PATCH] Update README.md (#2487)

---
 funasr/models/transformer/layer_norm.py |   25 +++++++------------------
 1 files changed, 7 insertions(+), 18 deletions(-)

diff --git a/funasr/models/transformer/layer_norm.py b/funasr/models/transformer/layer_norm.py
index 8683230..ce4f887 100644
--- a/funasr/models/transformer/layer_norm.py
+++ b/funasr/models/transformer/layer_norm.py
@@ -36,11 +36,7 @@
         """
         if self.dim == -1:
             return super(LayerNorm, self).forward(x)
-        return (
-            super(LayerNorm, self)
-            .forward(x.transpose(self.dim, -1))
-            .transpose(self.dim, -1)
-        )
+        return super(LayerNorm, self).forward(x.transpose(self.dim, -1)).transpose(self.dim, -1)
 
 
 class GlobalLayerNorm(nn.Module):
@@ -97,10 +93,7 @@
             mean = torch.mean(x, (1, 2), keepdim=True)
             var = torch.mean((x - mean) ** 2, (1, 2), keepdim=True)
             if self.elementwise_affine:
-                x = (
-                    self.weight * (x - mean) / torch.sqrt(var + self.eps)
-                    + self.bias
-                )
+                x = self.weight * (x - mean) / torch.sqrt(var + self.eps) + self.bias
             else:
                 x = (x - mean) / torch.sqrt(var + self.eps)
 
@@ -108,10 +101,7 @@
             mean = torch.mean(x, (1, 2, 3), keepdim=True)
             var = torch.mean((x - mean) ** 2, (1, 2, 3), keepdim=True)
             if self.elementwise_affine:
-                x = (
-                    self.weight * (x - mean) / torch.sqrt(var + self.eps)
-                    + self.bias
-                )
+                x = self.weight * (x - mean) / torch.sqrt(var + self.eps) + self.bias
             else:
                 x = (x - mean) / torch.sqrt(var + self.eps)
         return x
@@ -165,13 +155,12 @@
 
 
 class ScaleNorm(nn.Module):
-    def __init__(self, dim, eps = 1e-5):
+    def __init__(self, dim, eps=1e-5):
         super().__init__()
-        self.scale = dim ** -0.5
+        self.scale = dim**-0.5
         self.eps = eps
         self.g = nn.Parameter(torch.ones(1))
 
     def forward(self, x):
-        norm = torch.norm(x, dim = -1, keepdim = True) * self.scale
-        return x / norm.clamp(min = self.eps) * self.g
-
+        norm = torch.norm(x, dim=-1, keepdim=True) * self.scale
+        return x / norm.clamp(min=self.eps) * self.g

--
Gitblit v1.9.1