From b7060884fa4b8b85f79462644a5c99062d223da0 Mon Sep 17 00:00:00 2001
From: Yabin Li <wucong.lyb@alibaba-inc.com>
Date: 星期二, 25 六月 2024 17:38:04 +0800
Subject: [PATCH] Merge Dev tclas (#1847)
---
funasr/utils/export_utils.py | 6 +++---
1 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/funasr/utils/export_utils.py b/funasr/utils/export_utils.py
index 72b150f..a6d0798 100644
--- a/funasr/utils/export_utils.py
+++ b/funasr/utils/export_utils.py
@@ -23,7 +23,7 @@
export_dir=export_dir,
**kwargs,
)
- elif type == "torchscripts":
+ elif type == "torchscript":
device = "cuda" if torch.cuda.is_available() else "cpu"
print("Exporting torchscripts on device {}".format(device))
_torchscripts(m, path=export_dir, device=device)
@@ -100,7 +100,7 @@
dummy_input = tuple([i.cuda() for i in dummy_input])
model_script = torch.jit.trace(model, dummy_input)
- model_script.save(os.path.join(path, f"{model.export_name}.torchscripts"))
+ model_script.save(os.path.join(path, f"{model.export_name}.torchscript"))
def _bladedisc_opt(model, model_inputs, enable_fp16=True):
@@ -193,4 +193,4 @@
model.encoder = _bladedisc_opt(model.encoder, input_data[:2])
model.decoder = _bladedisc_opt(model.decoder, tuple(decoder_inputs))
model_script = torch.jit.trace(model, input_data)
- model_script.save(os.path.join(path, f"{model.export_name}_blade.torchscripts"))
+ model_script.save(os.path.join(path, f"{model.export_name}_blade.torchscript"))
--
Gitblit v1.9.1