From 2e36e738ca39afc8d02f3d11013bd12f937cc874 Mon Sep 17 00:00:00 2001
From: zhaomingwork <61895407+zhaomingwork@users.noreply.github.com>
Date: 星期三, 08 十一月 2023 09:22:06 +0800
Subject: [PATCH] fix bug for h5 hotwords (#1067)

---
 funasr/models/encoder/opennmt_encoders/self_attention_encoder.py |    2 --
 1 files changed, 0 insertions(+), 2 deletions(-)

diff --git a/funasr/models/encoder/opennmt_encoders/self_attention_encoder.py b/funasr/models/encoder/opennmt_encoders/self_attention_encoder.py
index db30f08..7c83cbd 100644
--- a/funasr/models/encoder/opennmt_encoders/self_attention_encoder.py
+++ b/funasr/models/encoder/opennmt_encoders/self_attention_encoder.py
@@ -7,7 +7,6 @@
 import torch
 import torch.nn as nn
 from funasr.modules.streaming_utils.chunk_utilis import overlap_chunk
-from typeguard import check_argument_types
 import numpy as np
 from funasr.modules.nets_utils import make_pad_mask
 from funasr.modules.attention import MultiHeadSelfAttention, MultiHeadedAttentionSANM
@@ -144,7 +143,6 @@
         tf2torch_tensor_name_prefix_tf: str = "seq2seq/encoder",
         out_units=None,
     ):
-        assert check_argument_types()
         super().__init__()
         self._output_size = output_size
 

--
Gitblit v1.9.1