From 71613122710d4d955913649cc7995c6c90f110ae Mon Sep 17 00:00:00 2001
From: speech_asr <wangjiaming.wjm@alibaba-inc.com>
Date: 星期二, 11 四月 2023 00:24:12 +0800
Subject: [PATCH] update

---
 /dev/null                   |   14 --------------
 funasr/layers/global_mvn.py |    5 +----
 funasr/models/data2vec.py   |   13 +++++--------
 3 files changed, 6 insertions(+), 26 deletions(-)

diff --git a/funasr/layers/abs_normalize.py b/funasr/layers/abs_normalize.py
deleted file mode 100644
index f2be748..0000000
--- a/funasr/layers/abs_normalize.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from abc import ABC
-from abc import abstractmethod
-from typing import Tuple
-
-import torch
-
-
-class AbsNormalize(torch.nn.Module, ABC):
-    @abstractmethod
-    def forward(
-        self, input: torch.Tensor, input_lengths: torch.Tensor = None
-    ) -> Tuple[torch.Tensor, torch.Tensor]:
-        # return output, output_lengths
-        raise NotImplementedError
diff --git a/funasr/layers/global_mvn.py b/funasr/layers/global_mvn.py
index 5515cdd..11c459d 100644
--- a/funasr/layers/global_mvn.py
+++ b/funasr/layers/global_mvn.py
@@ -7,11 +7,8 @@
 from typeguard import check_argument_types
 
 from funasr.modules.nets_utils import make_pad_mask
-from funasr.layers.abs_normalize import AbsNormalize
-from funasr.layers.inversible_interface import InversibleInterface
 
-
-class GlobalMVN(AbsNormalize, InversibleInterface):
+class GlobalMVN(torch.nn.Module):
     """Apply global mean and variance normalization
 
     TODO(kamo): Make this class portable somehow
diff --git a/funasr/layers/inversible_interface.py b/funasr/layers/inversible_interface.py
deleted file mode 100644
index a1a5939..0000000
--- a/funasr/layers/inversible_interface.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from abc import ABC
-from abc import abstractmethod
-from typing import Tuple
-
-import torch
-
-
-class InversibleInterface(ABC):
-    @abstractmethod
-    def inverse(
-        self, input: torch.Tensor, input_lengths: torch.Tensor = None
-    ) -> Tuple[torch.Tensor, torch.Tensor]:
-        # return output, output_lengths
-        raise NotImplementedError
diff --git a/funasr/models/data2vec.py b/funasr/models/data2vec.py
index fcd6bd2..2d4711f 100644
--- a/funasr/models/data2vec.py
+++ b/funasr/models/data2vec.py
@@ -13,12 +13,9 @@
 from typeguard import check_argument_types
 
 from funasr.layers.abs_normalize import AbsNormalize
-from funasr.models.encoder.abs_encoder import AbsEncoder
-from funasr.models.frontend.abs_frontend import AbsFrontend
 from funasr.models.preencoder.abs_preencoder import AbsPreEncoder
-from funasr.models.specaug.abs_specaug import AbsSpecAug
 from funasr.torch_utils.device_funcs import force_gatherable
-from funasr.train.abs_espnet_model import AbsESPnetModel
+from funasr.models.base_model import FunASRModel
 
 if LooseVersion(torch.__version__) >= LooseVersion("1.6.0"):
     from torch.cuda.amp import autocast
@@ -29,16 +26,16 @@
         yield
 
 
-class Data2VecPretrainModel(AbsESPnetModel):
+class Data2VecPretrainModel(FunASRModel):
     """Data2Vec Pretrain model"""
 
     def __init__(
             self,
-            frontend: Optional[AbsFrontend],
-            specaug: Optional[AbsSpecAug],
+            frontend: Optional[torch.nn.Module],
+            specaug: Optional[torch.nn.Module],
             normalize: Optional[AbsNormalize],
             preencoder: Optional[AbsPreEncoder],
-            encoder: AbsEncoder,
+            encoder: torch.nn.Module,
     ):
         assert check_argument_types()
 

--
Gitblit v1.9.1