From 0e622e694e6cb4459955f1e5942a7c53349ce640 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期二, 19 十二月 2023 21:58:14 +0800
Subject: [PATCH] funasr2

---
 funasr/models/branchformer/encoder.py |   11 ++++++-----
 1 files changed, 6 insertions(+), 5 deletions(-)

diff --git a/funasr/models/branchformer/branchformer_encoder.py b/funasr/models/branchformer/encoder.py
similarity index 98%
rename from funasr/models/branchformer/branchformer_encoder.py
rename to funasr/models/branchformer/encoder.py
index 0f037c8..11b6429 100644
--- a/funasr/models/branchformer/branchformer_encoder.py
+++ b/funasr/models/branchformer/encoder.py
@@ -16,8 +16,8 @@
 
 import numpy
 import torch
+import torch.nn as nn
 
-from funasr.models.encoder.abs_encoder import AbsEncoder
 from funasr.models.branchformer.cgmlp import ConvolutionalGatingMLP
 from funasr.models.branchformer.fastformer import FastSelfAttention
 from funasr.models.transformer.utils.nets_utils import make_pad_mask
@@ -33,8 +33,8 @@
     ScaledPositionalEncoding,
 )
 from funasr.models.transformer.layer_norm import LayerNorm
-from funasr.models.transformer.repeat import repeat
-from funasr.models.transformer.subsampling import (
+from funasr.models.transformer.utils.repeat import repeat
+from funasr.models.transformer.utils.subsampling import (
     Conv2dSubsampling,
     Conv2dSubsampling2,
     Conv2dSubsampling6,
@@ -43,6 +43,7 @@
     check_short_utt,
 )
 
+from funasr.utils.register import register_class
 
 class BranchformerEncoderLayer(torch.nn.Module):
     """Branchformer encoder layer module.
@@ -290,8 +291,8 @@
 
         return x, mask
 
-
-class BranchformerEncoder(AbsEncoder):
+@register_class("encoder_classes", "BranchformerEncoder")
+class BranchformerEncoder(nn.Module):
     """Branchformer encoder module."""
 
     def __init__(

--
Gitblit v1.9.1