From fbec0f003d4de9e4b6ccb6bb58d2d4926a0ff332 Mon Sep 17 00:00:00 2001
From: speech_asr <wangjiaming.wjm@alibaba-inc.com>
Date: 星期三, 15 三月 2023 15:58:43 +0800
Subject: [PATCH] update
---
funasr/modules/eend_ola/encoder_decoder_attractor.py | 5 ++++-
1 files changed, 4 insertions(+), 1 deletions(-)
diff --git a/funasr/modules/eend_ola/encoder_decoder_attractor.py b/funasr/modules/eend_ola/encoder_decoder_attractor.py
index db01b00..4e599ab 100644
--- a/funasr/modules/eend_ola/encoder_decoder_attractor.py
+++ b/funasr/modules/eend_ola/encoder_decoder_attractor.py
@@ -2,7 +2,8 @@
import torch
import torch.nn.functional as F
from torch import nn
-
+from modelscope.utils.logger import get_logger
+logger = get_logger()
class EncoderDecoderAttractor(nn.Module):
@@ -16,7 +17,9 @@
self.n_units = n_units
def forward_core(self, xs, zeros):
+ logger.info("xs: ".format(xs))
ilens = torch.from_numpy(np.array([x.shape[0] for x in xs])).to(torch.float32).to(xs[0].device)
+ logger.info("ilens: ".format(ilens))
xs = [self.enc0_dropout(x) for x in xs]
xs = nn.utils.rnn.pad_sequence(xs, batch_first=True, padding_value=-1)
xs = nn.utils.rnn.pack_padded_sequence(xs, ilens, batch_first=True, enforce_sorted=False)
--
Gitblit v1.9.1