From 0e622e694e6cb4459955f1e5942a7c53349ce640 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期二, 19 十二月 2023 21:58:14 +0800
Subject: [PATCH] funasr2

---
 funasr/models/transducer/rnn_decoder.py |    7 +++----
 1 files changed, 3 insertions(+), 4 deletions(-)

diff --git a/funasr/models/transducer/rnn_decoder.py b/funasr/models/transducer/rnn_decoder.py
index 1743f99..204f0b1 100644
--- a/funasr/models/transducer/rnn_decoder.py
+++ b/funasr/models/transducer/rnn_decoder.py
@@ -2,13 +2,12 @@
 
 import numpy as np
 import torch
+import torch.nn as nn
 import torch.nn.functional as F
 
 from funasr.models.transformer.utils.nets_utils import make_pad_mask
 from funasr.models.transformer.utils.nets_utils import to_device
 from funasr.models.language_model.rnn.attentions import initial_att
-from funasr.models.decoder.abs_decoder import AbsDecoder
-from funasr.utils.get_default_kwargs import get_default_kwargs
 
 
 def build_attention_list(
@@ -80,7 +79,7 @@
     return att_list
 
 
-class RNNDecoder(AbsDecoder):
+class RNNDecoder(nn.Module):
     def __init__(
         self,
         vocab_size: int,
@@ -93,7 +92,7 @@
         context_residual: bool = False,
         replace_sos: bool = False,
         num_encs: int = 1,
-        att_conf: dict = get_default_kwargs(build_attention_list),
+        att_conf: dict = None,
     ):
         # FIXME(kamo): The parts of num_spk should be refactored more more more
         if rnn_type not in {"lstm", "gru"}:

--
Gitblit v1.9.1