zhifu gao
2024-03-05 8ab670afc3b7a9ae4da4043e21e89024321d5b5b
examples/industrial_data_pretraining/whisper/infer_from_local.sh
@@ -13,13 +13,19 @@
# download model
local_path_root=${workspace}/modelscope_models
mkdir -p ${local_path_root}
local_path=${local_path_root}/speech_whisper-large_asr_multilingual
git clone https://www.modelscope.cn/iic/speech_whisper-large_asr_multilingual.git ${local_path}
#Whisper-large-v2
#local_path=${local_path_root}/speech_whisper-large_asr_multilingual
#git clone https://www.modelscope.cn/iic/speech_whisper-large_asr_multilingual.git ${local_path}
#init_param="${local_path}/large-v2.pt"
#Whisper-large-v3
local_path=${local_path_root}/Whisper-large-v3
git clone https://www.modelscope.cn/iic/Whisper-large-v3.git ${local_path}
init_param="${local_path}/large-v3.pt"
device="cuda:0" # "cuda:0" for gpu0, "cuda:1" for gpu1, "cpu"
config="config.yaml"
init_param="${local_path}/large-v2.pt"
python -m funasr.bin.inference \
--config-path "${local_path}" \