From 930fe72f43524b4e355ef671c7180cc6cf9eefb5 Mon Sep 17 00:00:00 2001
From: 雾聪 <wucong.lyb@alibaba-inc.com>
Date: 星期一, 01 四月 2024 15:27:20 +0800
Subject: [PATCH] set batch default value

---
 funasr/auto/auto_model.py |   22 ++++++++++++++++------
 1 files changed, 16 insertions(+), 6 deletions(-)

diff --git a/funasr/auto/auto_model.py b/funasr/auto/auto_model.py
index c4bab03..47456a3 100644
--- a/funasr/auto/auto_model.py
+++ b/funasr/auto/auto_model.py
@@ -155,15 +155,15 @@
             device = "cpu"
             kwargs["batch_size"] = 1
         kwargs["device"] = device
-        
-        if kwargs.get("ncpu", None):
-            torch.set_num_threads(kwargs.get("ncpu"))
+
+        torch.set_num_threads(kwargs.get("ncpu", 4))
         
         # build tokenizer
         tokenizer = kwargs.get("tokenizer", None)
         if tokenizer is not None:
             tokenizer_class = tables.tokenizer_classes.get(tokenizer)
-            tokenizer = tokenizer_class(**kwargs["tokenizer_conf"])
+            tokenizer_conf = kwargs.get("tokenizer_conf", {})
+            tokenizer = tokenizer_class(**tokenizer_conf)
             kwargs["tokenizer"] = tokenizer
 
             kwargs["token_list"] = tokenizer.token_list if hasattr(tokenizer, "token_list") else None
@@ -476,11 +476,13 @@
                calib_num: int = 100,
                opset_version: int = 14,
                **cfg):
-        os.environ['EXPORTING_MODEL'] = 'TRUE'
+    
+        device = cfg.get("device", "cpu")
+        model = self.model.to(device=device)
         kwargs = self.kwargs
         deep_update(kwargs, cfg)
+        kwargs["device"] = device
         del kwargs["model"]
-        model = self.model
         model.eval()
 
         batch_size = 1
@@ -493,11 +495,19 @@
                 export_dir = export_utils.export_onnx(
                                         model=model,
                                         data_in=data_list,
+                                        quantize=quantize,
+                                        fallback_num=fallback_num,
+                                        calib_num=calib_num,
+                                        opset_version=opset_version,
                                         **kwargs)
             else:
                 export_dir = export_utils.export_torchscripts(
                                         model=model,
                                         data_in=data_list,
+                                        quantize=quantize,
+                                        fallback_num=fallback_num,
+                                        calib_num=calib_num,
+                                        opset_version=opset_version,
                                         **kwargs)
 
         return export_dir
\ No newline at end of file

--
Gitblit v1.9.1