From c5339e8302da5ab66af7ca7640b93596de24a4b0 Mon Sep 17 00:00:00 2001
From: 维石 <shixian.shi@alibaba-inc.com>
Date: 星期一, 03 六月 2024 15:27:16 +0800
Subject: [PATCH] update demo
---
runtime/python/libtorch/demo_paraformer.py | 2 +-
runtime/python/onnxruntime/funasr_onnx/paraformer_bin.py | 2 +-
runtime/python/libtorch/funasr_torch/paraformer_bin.py | 2 +-
runtime/python/libtorch/demo_contextual_paraformer.py | 11 ++++++-----
4 files changed, 9 insertions(+), 8 deletions(-)
diff --git a/runtime/python/libtorch/demo_contextual_paraformer.py b/runtime/python/libtorch/demo_contextual_paraformer.py
index 9542d4d..fb2337d 100644
--- a/runtime/python/libtorch/demo_contextual_paraformer.py
+++ b/runtime/python/libtorch/demo_contextual_paraformer.py
@@ -1,12 +1,13 @@
+import torch
from pathlib import Path
-from funasr_torch import Paraformer
+from funasr_torch.paraformer_bin import ContextualParaformer
-model_dir = "damo/speech_paraformer-large-contextual_asr_nat-zh-cn-16k-common-vocab8404"
-model = Paraformer(model_dir, batch_size=1) # cpu
-# model = Paraformer(model_dir, batch_size=1, device_id=0) # gpu
+model_dir = "iic/speech_paraformer-large-contextual_asr_nat-zh-cn-16k-common-vocab8404"
+device_id = 0 if torch.cuda.is_available() else -1
+model = ContextualParaformer(model_dir, batch_size=1, device_id=device_id) # gpu
wav_path = "{}/.cache/modelscope/hub/{}/example/asr_example.wav".format(Path.home(), model_dir)
-hotwords = "浣犵殑鐑瘝 榄旀惌"
+hotwords = "浣犵殑鐑瘝 榄旀惌 杈炬懇鑻�"
result = model(wav_path, hotwords)
print(result)
diff --git a/runtime/python/libtorch/demo_paraformer.py b/runtime/python/libtorch/demo_paraformer.py
index 9c3a082..62355e2 100644
--- a/runtime/python/libtorch/demo_paraformer.py
+++ b/runtime/python/libtorch/demo_paraformer.py
@@ -1,5 +1,5 @@
from pathlib import Path
-from funasr_torch import Paraformer
+from funasr_torch.paraformer_bin import Paraformer
model_dir = "iic/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch"
model = Paraformer(model_dir, batch_size=1) # cpu
diff --git a/runtime/python/libtorch/funasr_torch/paraformer_bin.py b/runtime/python/libtorch/funasr_torch/paraformer_bin.py
index e9642c7..b0cd871 100644
--- a/runtime/python/libtorch/funasr_torch/paraformer_bin.py
+++ b/runtime/python/libtorch/funasr_torch/paraformer_bin.py
@@ -50,7 +50,7 @@
if quantize:
model_file = os.path.join(model_dir, "model_quant.torchscripts")
if not os.path.exists(model_file):
- print(".onnx is not exist, begin to export onnx")
+ print(".torchscripts does not exist, begin to export torchscripts")
try:
from funasr import AutoModel
except:
diff --git a/runtime/python/onnxruntime/funasr_onnx/paraformer_bin.py b/runtime/python/onnxruntime/funasr_onnx/paraformer_bin.py
index 2269273..8194283 100644
--- a/runtime/python/onnxruntime/funasr_onnx/paraformer_bin.py
+++ b/runtime/python/onnxruntime/funasr_onnx/paraformer_bin.py
@@ -62,7 +62,7 @@
if quantize:
model_file = os.path.join(model_dir, "model_quant.onnx")
if not os.path.exists(model_file):
- print(".onnx is not exist, begin to export onnx")
+ print(".onnx does not exist, begin to export onnx")
try:
from funasr import AutoModel
except:
--
Gitblit v1.9.1