From d14bb0f843aea0aeb254a5b3c21d42d04e28765b Mon Sep 17 00:00:00 2001
From: nichongjia-2007 <nichongjia@gmail.com>
Date: 星期五, 14 七月 2023 17:43:40 +0800
Subject: [PATCH] add conformer export
---
funasr/models/encoder/opennmt_encoders/self_attention_encoder.py | 2 --
1 files changed, 0 insertions(+), 2 deletions(-)
diff --git a/funasr/models/encoder/opennmt_encoders/self_attention_encoder.py b/funasr/models/encoder/opennmt_encoders/self_attention_encoder.py
index db30f08..7c83cbd 100644
--- a/funasr/models/encoder/opennmt_encoders/self_attention_encoder.py
+++ b/funasr/models/encoder/opennmt_encoders/self_attention_encoder.py
@@ -7,7 +7,6 @@
import torch
import torch.nn as nn
from funasr.modules.streaming_utils.chunk_utilis import overlap_chunk
-from typeguard import check_argument_types
import numpy as np
from funasr.modules.nets_utils import make_pad_mask
from funasr.modules.attention import MultiHeadSelfAttention, MultiHeadedAttentionSANM
@@ -144,7 +143,6 @@
tf2torch_tensor_name_prefix_tf: str = "seq2seq/encoder",
out_units=None,
):
- assert check_argument_types()
super().__init__()
self._output_size = output_size
--
Gitblit v1.9.1