From 28ccfbfc51068a663a80764e14074df5edf2b5ba Mon Sep 17 00:00:00 2001
From: kongdeqiang <kongdeqiang960204@163.com>
Date: 星期五, 13 三月 2026 17:41:41 +0800
Subject: [PATCH] 提交
---
funasr/models/e_branchformer/encoder.py | 14 +++++---------
1 files changed, 5 insertions(+), 9 deletions(-)
diff --git a/funasr/models/e_branchformer/encoder.py b/funasr/models/e_branchformer/encoder.py
index 4084e21..adc6642 100644
--- a/funasr/models/e_branchformer/encoder.py
+++ b/funasr/models/e_branchformer/encoder.py
@@ -44,6 +44,7 @@
)
from funasr.register import tables
+
class EBranchformerEncoderLayer(torch.nn.Module):
"""E-Branchformer encoder layer module.
@@ -174,6 +175,7 @@
return x, mask
+
@tables.register("encoder_classes", "EBranchformerEncoder")
class EBranchformerEncoder(nn.Module):
"""E-Branchformer encoder module."""
@@ -232,9 +234,7 @@
elif pos_enc_layer_type == "legacy_rel_pos":
assert attention_layer_type == "legacy_rel_selfattn"
pos_enc_class = LegacyRelPositionalEncoding
- logging.warning(
- "Using legacy_rel_pos and it will be deprecated in the future."
- )
+ logging.warning("Using legacy_rel_pos and it will be deprecated in the future.")
else:
raise ValueError("unknown pos_enc_layer: " + pos_enc_layer_type)
@@ -320,9 +320,7 @@
output_size,
attention_dropout_rate,
)
- logging.warning(
- "Using legacy_rel_selfattn and it will be deprecated in the future."
- )
+ logging.warning("Using legacy_rel_selfattn and it will be deprecated in the future.")
elif attention_layer_type == "rel_selfattn":
assert pos_enc_layer_type == "rel_pos"
encoder_selfattn_layer = RelPositionMultiHeadedAttention
@@ -360,9 +358,7 @@
encoder_selfattn_layer(*encoder_selfattn_layer_args),
cgmlp_layer(*cgmlp_layer_args),
positionwise_layer(*positionwise_layer_args) if use_ffn else None,
- positionwise_layer(*positionwise_layer_args)
- if use_ffn and macaron_ffn
- else None,
+ positionwise_layer(*positionwise_layer_args) if use_ffn and macaron_ffn else None,
dropout_rate,
merge_conv_kernel,
),
--
Gitblit v1.9.1