From 1367973f9818d8e15c7bf52ad6ffba4ddb6ac2b2 Mon Sep 17 00:00:00 2001
From: Rin Arakaki <rnarkkx@gmail.com>
Date: 星期二, 24 十二月 2024 17:51:31 +0800
Subject: [PATCH] shfit to shift (#2266)
---
funasr/models/scama/decoder.py | 10 +++++-----
1 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/funasr/models/scama/decoder.py b/funasr/models/scama/decoder.py
index 31b2357..f457b75 100644
--- a/funasr/models/scama/decoder.py
+++ b/funasr/models/scama/decoder.py
@@ -226,7 +226,7 @@
concat_after: bool = False,
att_layer_num: int = 6,
kernel_size: int = 21,
- sanm_shfit: int = None,
+ sanm_shift: int = None,
concat_embeds: bool = False,
attention_dim: int = None,
tf2torch_tensor_name_prefix_torch: str = "decoder",
@@ -271,14 +271,14 @@
self.att_layer_num = att_layer_num
self.num_blocks = num_blocks
- if sanm_shfit is None:
- sanm_shfit = (kernel_size - 1) // 2
+ if sanm_shift is None:
+ sanm_shift = (kernel_size - 1) // 2
self.decoders = repeat(
att_layer_num,
lambda lnum: DecoderLayerSANM(
attention_dim,
MultiHeadedAttentionSANMDecoder(
- attention_dim, self_attention_dropout_rate, kernel_size, sanm_shfit=sanm_shfit
+ attention_dim, self_attention_dropout_rate, kernel_size, sanm_shift=sanm_shift
),
MultiHeadedAttentionCrossAtt(
attention_heads,
@@ -303,7 +303,7 @@
attention_dim,
self_attention_dropout_rate,
kernel_size,
- sanm_shfit=sanm_shfit,
+ sanm_shift=sanm_shift,
),
None,
PositionwiseFeedForwardDecoderSANM(attention_dim, linear_units, dropout_rate),
--
Gitblit v1.9.1