From 0f1247d7a8d481a7b5825c903e819dc543224740 Mon Sep 17 00:00:00 2001
From: R1ckShi <shixian.shi@alibaba-inc.com>
Date: 星期二, 11 六月 2024 14:41:21 +0800
Subject: [PATCH] update scripts
---
funasr/utils/export_utils.py | 1 +
runtime/python/libtorch/funasr_torch/utils/timestamp_utils.py | 2 +-
examples/industrial_data_pretraining/bicif_paraformer/export.py | 18 +++++++++---------
3 files changed, 11 insertions(+), 10 deletions(-)
diff --git a/examples/industrial_data_pretraining/bicif_paraformer/export.py b/examples/industrial_data_pretraining/bicif_paraformer/export.py
index 31098d2..44849b0 100644
--- a/examples/industrial_data_pretraining/bicif_paraformer/export.py
+++ b/examples/industrial_data_pretraining/bicif_paraformer/export.py
@@ -12,17 +12,17 @@
device="cpu",
)
-res = model.export(type="onnx", quantize=False)
+res = model.export(type="torchscripts", quantize=False)
print(res)
-# method2, inference from local path
-from funasr import AutoModel
+# # method2, inference from local path
+# from funasr import AutoModel
-model = AutoModel(
- model="/Users/zhifu/.cache/modelscope/hub/iic/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch",
- device="cpu",
-)
+# model = AutoModel(
+# model="/Users/zhifu/.cache/modelscope/hub/iic/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch",
+# device="cpu",
+# )
-res = model.export(type="onnx", quantize=False)
-print(res)
+# res = model.export(type="onnx", quantize=False)
+# print(res)
diff --git a/funasr/utils/export_utils.py b/funasr/utils/export_utils.py
index 8f1aa53..d205400 100644
--- a/funasr/utils/export_utils.py
+++ b/funasr/utils/export_utils.py
@@ -22,6 +22,7 @@
)
elif type == 'torchscripts':
device = 'cuda' if torch.cuda.is_available() else 'cpu'
+ print("Exporting torchscripts on device {}".format(device))
_torchscripts(
m,
path=export_dir,
diff --git a/runtime/python/libtorch/funasr_torch/utils/timestamp_utils.py b/runtime/python/libtorch/funasr_torch/utils/timestamp_utils.py
index 5abbafe..a10d193 100644
--- a/runtime/python/libtorch/funasr_torch/utils/timestamp_utils.py
+++ b/runtime/python/libtorch/funasr_torch/utils/timestamp_utils.py
@@ -7,7 +7,7 @@
START_END_THRESHOLD = 5
MAX_TOKEN_DURATION = 30
TIME_RATE = 10.0 * 6 / 1000 / 3 # 3 times upsampled
- cif_peak = us_cif_peak.reshape(-1)
+ cif_peak = us_cif_peak.reshape(-1).cpu()
num_frames = cif_peak.shape[-1]
if char_list[-1] == "</s>":
char_list = char_list[:-1]
--
Gitblit v1.9.1