From 23bc5dee4e88ef8b5d0c0d64d2e188c054422e8b Mon Sep 17 00:00:00 2001
From: speech_asr <wangjiaming.wjm@alibaba-inc.com>
Date: 星期二, 11 四月 2023 00:21:45 +0800
Subject: [PATCH] update
---
funasr/models/encoder/resnet34_encoder.py | 7 +++----
1 files changed, 3 insertions(+), 4 deletions(-)
diff --git a/funasr/models/encoder/resnet34_encoder.py b/funasr/models/encoder/resnet34_encoder.py
index 7fde4b2..6f978eb 100644
--- a/funasr/models/encoder/resnet34_encoder.py
+++ b/funasr/models/encoder/resnet34_encoder.py
@@ -1,6 +1,5 @@
import torch
from torch.nn import functional as F
-from funasr.models.encoder.abs_encoder import AbsEncoder
from typing import Tuple, Optional
from funasr.models.pooling.statistic_pooling import statistic_pooling, windowed_statistic_pooling
from collections import OrderedDict
@@ -76,7 +75,7 @@
return xs_pad, ilens
-class ResNet34(AbsEncoder):
+class ResNet34(torch.nn.Module):
def __init__(
self,
input_size,
@@ -740,7 +739,7 @@
"{}.resnet{}_dense.weight".format(tensor_name_prefix_torch, layer_idx):
{"name": "{}/resnet{}_dense/kernel".format(tensor_name_prefix_tf, layer_idx),
"squeeze": None,
- "transpose": (1, 0),
+ "transpose": (2, 1, 0) if layer_idx == 0 else (1, 0),
},
"{}.resnet{}_dense.bias".format(tensor_name_prefix_torch, layer_idx):
{"name": "{}/resnet{}_dense/bias".format(tensor_name_prefix_tf, layer_idx),
@@ -831,7 +830,7 @@
name, data_tf.size(), name_tf, var_dict_tf[name_tf].shape
))
else:
- var_dict_torch_update[name] = torch.Tensor(map_dict[name]).type(torch.int64).to("cpu")
+ var_dict_torch_update[name] = torch.from_numpy(np.array(map_dict[name])).type(torch.int64).to("cpu")
logging.info("torch tensor: {}, manually assigning to: {}".format(
name, map_dict[name]
))
--
Gitblit v1.9.1