From dc8a0445d5460af16e9926a492a85da592f224bb Mon Sep 17 00:00:00 2001
From: lyblsgo <lyblsgo@163.com>
Date: 星期二, 11 四月 2023 20:00:13 +0800
Subject: [PATCH] fix bug: msg.c_str
---
egs_modelscope/asr/data2vec/speech_data2vec_pretrain-paraformer-zh-cn-aishell2-16k/finetune.py | 4 ++--
1 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/egs_modelscope/asr/data2vec/speech_data2vec_pretrain-paraformer-zh-cn-aishell2-16k/finetune.py b/egs_modelscope/asr/data2vec/speech_data2vec_pretrain-paraformer-zh-cn-aishell2-16k/finetune.py
index 005cae6..a5f1ee4 100644
--- a/egs_modelscope/asr/data2vec/speech_data2vec_pretrain-paraformer-zh-cn-aishell2-16k/finetune.py
+++ b/egs_modelscope/asr/data2vec/speech_data2vec_pretrain-paraformer-zh-cn-aishell2-16k/finetune.py
@@ -25,13 +25,13 @@
if __name__ == '__main__':
- params = modelscope_args(model="damo/speech_data2vec_pretrain-zh-cn-aishell2-16k-pytorch",
+ params = modelscope_args(model="damo/speech_data2vec_pretrain-paraformer-zh-cn-aishell2-16k",
data_path="./data")
params.output_dir = "./checkpoint"
params.data_path = "./example_data/"
params.dataset_type = "small"
params.batch_bins = 16000
params.max_epoch = 50
- params.lr = 0.00005
+ params.lr = 0.00002
modelscope_finetune(params)
--
Gitblit v1.9.1