From 100ea0304b956e55a9c2fe284b1ee1a26bdf2b7c Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期四, 20 四月 2023 23:49:15 +0800
Subject: [PATCH] docs
---
egs/aishell/transformer/utils/prepare_checkpoint.py | 21 +++++++++++----------
1 files changed, 11 insertions(+), 10 deletions(-)
diff --git a/egs_modelscope/asr/TEMPLATE/infer_after_finetune.py b/egs/aishell/transformer/utils/prepare_checkpoint.py
similarity index 75%
rename from egs_modelscope/asr/TEMPLATE/infer_after_finetune.py
rename to egs/aishell/transformer/utils/prepare_checkpoint.py
index 2d311dd..01763d4 100644
--- a/egs_modelscope/asr/TEMPLATE/infer_after_finetune.py
+++ b/egs/aishell/transformer/utils/prepare_checkpoint.py
@@ -1,12 +1,9 @@
-import json
import os
import shutil
from modelscope.pipelines import pipeline
from modelscope.utils.constant import Tasks
from modelscope.hub.snapshot_download import snapshot_download
-
-from funasr.utils.compute_wer import compute_wer
def modelscope_infer_after_finetune(params):
# prepare for decoding
@@ -39,10 +36,14 @@
if __name__ == '__main__':
- params = {}
- params["modelscope_model_name"] = "damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch"
- params["output_dir"] = "./checkpoint"
- params["data_dir"] = "./data/test"
- params["decoding_model_name"] = "valid.acc.ave_10best.pb"
- params["batch_size"] = 64
- modelscope_infer_after_finetune(params)
\ No newline at end of file
+ import sys
+
+ model = sys.argv[1]
+ checkpoint_dir = sys.argv[2]
+ checkpoint_name = sys.argv[3]
+
+ try:
+ pretrained_model_path = snapshot_download(model, cache_dir=checkpoint_dir)
+ except BaseException:
+ raise BaseException(f"Please download pretrain model from ModelScope firstly.")
+ shutil.copy(os.path.join(checkpoint_dir, checkpoint_name), os.path.join(pretrained_model_path, "model.pb"))
--
Gitblit v1.9.1