From 30c40c643c19f6e2ac8679fa76d09d0f9ceccc65 Mon Sep 17 00:00:00 2001
From: chenmengzheAAA <123789350+chenmengzheAAA@users.noreply.github.com>
Date: 星期四, 14 九月 2023 18:00:43 +0800
Subject: [PATCH] Update modelscope_models.md
---
funasr/models/encoder/sanm_encoder.py | 12 ++++++++++++
1 files changed, 12 insertions(+), 0 deletions(-)
diff --git a/funasr/models/encoder/sanm_encoder.py b/funasr/models/encoder/sanm_encoder.py
index 45163df..9e27d4a 100644
--- a/funasr/models/encoder/sanm_encoder.py
+++ b/funasr/models/encoder/sanm_encoder.py
@@ -146,6 +146,10 @@
interctc_use_conditioning: bool = False,
kernel_size : int = 11,
sanm_shfit : int = 0,
+ lora_list: List[str] = None,
+ lora_rank: int = 8,
+ lora_alpha: int = 16,
+ lora_dropout: float = 0.1,
selfattention_layer_type: str = "sanm",
tf2torch_tensor_name_prefix_torch: str = "encoder",
tf2torch_tensor_name_prefix_tf: str = "seq2seq/encoder",
@@ -229,6 +233,10 @@
attention_dropout_rate,
kernel_size,
sanm_shfit,
+ lora_list,
+ lora_rank,
+ lora_alpha,
+ lora_dropout,
)
encoder_selfattn_layer_args = (
@@ -238,6 +246,10 @@
attention_dropout_rate,
kernel_size,
sanm_shfit,
+ lora_list,
+ lora_rank,
+ lora_alpha,
+ lora_dropout,
)
self.encoders0 = repeat(
1,
--
Gitblit v1.9.1