From 9539dec5c7058b4a10f9226d08a8f30e56147dcd Mon Sep 17 00:00:00 2001
From: 嘉渊 <wangjiaming.wjm@alibaba-inc.com>
Date: 星期四, 27 四月 2023 17:31:54 +0800
Subject: [PATCH] update

---
 funasr/models/encoder/resnet34_encoder.py |   18 +++++++++++++++---
 1 files changed, 15 insertions(+), 3 deletions(-)

diff --git a/funasr/models/encoder/resnet34_encoder.py b/funasr/models/encoder/resnet34_encoder.py
index df0f124..8445feb 100644
--- a/funasr/models/encoder/resnet34_encoder.py
+++ b/funasr/models/encoder/resnet34_encoder.py
@@ -1,8 +1,8 @@
 import torch
 from torch.nn import functional as F
+from funasr.models.encoder.abs_encoder import AbsEncoder
 from typing import Tuple, Optional
 from funasr.models.pooling.statistic_pooling import statistic_pooling, windowed_statistic_pooling
-from funasr.models.encoder.abs_encoder import AbsEncoder
 from collections import OrderedDict
 import logging
 import numpy as np
@@ -76,7 +76,7 @@
         return xs_pad, ilens
 
 
-class ResNet34(torch.nn.Module):
+class ResNet34(AbsEncoder):
     def __init__(
             self,
             input_size,
@@ -406,6 +406,12 @@
             tf2torch_tensor_name_prefix_torch="encoder",
             tf2torch_tensor_name_prefix_tf="seq2seq/speech_encoder"
     ):
+        """
+        Author: Speech Lab, Alibaba Group, China
+        SOND: Speaker Overlap-aware Neural Diarization for Multi-party Meeting Analysis
+        https://arxiv.org/abs/2211.10243
+        """
+
         super(ResNet34Diar, self).__init__(
             input_size,
             use_head_conv=use_head_conv,
@@ -633,6 +639,12 @@
             tf2torch_tensor_name_prefix_torch="encoder",
             tf2torch_tensor_name_prefix_tf="seq2seq/speech_encoder"
     ):
+        """
+        Author: Speech Lab, Alibaba Group, China
+        TOLD: A Novel Two-Stage Overlap-Aware Framework for Speaker Diarization
+        https://arxiv.org/abs/2303.05397
+        """
+
         super(ResNet34SpL2RegDiar, self).__init__(
             input_size,
             use_head_conv=use_head_conv,
@@ -838,4 +850,4 @@
                 else:
                     logging.warning("{} is missed from tf checkpoint".format(name))
 
-        return var_dict_torch_update
+        return var_dict_torch_update
\ No newline at end of file

--
Gitblit v1.9.1