From ac1ed30ed2dfe17d49ab4fbf20d9d000865e948c Mon Sep 17 00:00:00 2001
From: hnluo <haoneng.lhn@alibaba-inc.com>
Date: 星期五, 21 七月 2023 14:48:20 +0800
Subject: [PATCH] Update finetune.py

---
 egs_modelscope/asr/paraformer/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch/finetune.py |    8 ++++----
 1 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/egs_modelscope/asr/paraformer/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch/finetune.py b/egs_modelscope/asr/paraformer/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch/finetune.py
index aef5ba5..993f8ed 100644
--- a/egs_modelscope/asr/paraformer/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch/finetune.py
+++ b/egs_modelscope/asr/paraformer/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch/finetune.py
@@ -33,10 +33,10 @@
     params.batch_bins = 2000                       # batch size锛屽鏋渄ataset_type="small"锛宐atch_bins鍗曚綅涓篺bank鐗瑰緛甯ф暟锛屽鏋渄ataset_type="large"锛宐atch_bins鍗曚綅涓烘绉掞紝
     params.max_epoch = 20                           # 鏈�澶ц缁冭疆鏁�
     params.lr = 0.0002                             # 璁剧疆瀛︿範鐜�
-    init_param = []
-    freeze_param = []
-    ignore_init_mismatch = True
-    use_lora = False
+    init_param = []                                 # 鍒濆妯″瀷璺緞锛岄粯璁ゅ姞杞絤odelscope妯″瀷鍒濆鍖栵紝渚嬪: ["checkpoint/20epoch.pb"]
+    freeze_param = []                               # 妯″瀷鍙傛暟freeze, 渚嬪: ["encoder"]
+    ignore_init_mismatch = True                     # 鏄惁蹇界暐妯″瀷鍙傛暟鍒濆鍖栦笉鍖归厤
+    use_lora = False                                # 鏄惁浣跨敤lora杩涜妯″瀷寰皟
     params.param_dict = {"init_param":init_param, "freeze_param": freeze_param, "ignore_init_mismatch": ignore_init_mismatch}
     if use_lora:
         enable_lora = True

--
Gitblit v1.9.1