From e9d2cfc3a134b00f4e98271fbee3838d1ccecbcc Mon Sep 17 00:00:00 2001
From: VirtuosoQ <2416050435@qq.com>
Date: 星期五, 26 四月 2024 14:59:30 +0800
Subject: [PATCH] FunASR java http client
---
funasr/models/transformer/positionwise_feed_forward.py | 33 ++++++++++++---------------------
1 files changed, 12 insertions(+), 21 deletions(-)
diff --git a/funasr/models/transformer/positionwise_feed_forward.py b/funasr/models/transformer/positionwise_feed_forward.py
index ffa0f4e..081ff5b 100644
--- a/funasr/models/transformer/positionwise_feed_forward.py
+++ b/funasr/models/transformer/positionwise_feed_forward.py
@@ -34,25 +34,16 @@
return self.w_2(self.dropout(self.activation(self.w_1(x))))
-class PositionwiseFeedForwardDecoderSANM(torch.nn.Module):
- """Positionwise feed forward layer.
+class PositionwiseFeedForwardDecoderSANMExport(torch.nn.Module):
+ def __init__(self, model):
+ super().__init__()
+ self.w_1 = model.w_1
+ self.w_2 = model.w_2
+ self.activation = model.activation
+ self.norm = model.norm
+
+ def forward(self, x):
+ x = self.activation(self.w_1(x))
+ x = self.w_2(self.norm(x))
+ return x
- Args:
- idim (int): Input dimenstion.
- hidden_units (int): The number of hidden units.
- dropout_rate (float): Dropout rate.
-
- """
-
- def __init__(self, idim, hidden_units, dropout_rate, adim=None, activation=torch.nn.ReLU()):
- """Construct an PositionwiseFeedForward object."""
- super(PositionwiseFeedForwardDecoderSANM, self).__init__()
- self.w_1 = torch.nn.Linear(idim, hidden_units)
- self.w_2 = torch.nn.Linear(hidden_units, idim if adim is None else adim, bias=False)
- self.dropout = torch.nn.Dropout(dropout_rate)
- self.activation = activation
- self.norm = LayerNorm(hidden_units)
-
- def forward(self, x):
- """Forward function."""
- return self.w_2(self.norm(self.dropout(self.activation(self.w_1(x)))))
--
Gitblit v1.9.1