From 585a4d3e5ff8b77ee89c2ec2a9ff1e7cacd79319 Mon Sep 17 00:00:00 2001
From: 雾聪 <wucong.lyb@alibaba-inc.com>
Date: 星期二, 05 三月 2024 18:37:58 +0800
Subject: [PATCH] update docs

---
 funasr/auto/auto_model.py |    9 ++++++---
 1 files changed, 6 insertions(+), 3 deletions(-)

diff --git a/funasr/auto/auto_model.py b/funasr/auto/auto_model.py
index ec3c3f3..9ae9f18 100644
--- a/funasr/auto/auto_model.py
+++ b/funasr/auto/auto_model.py
@@ -143,7 +143,7 @@
     def build_model(self, **kwargs):
         assert "model" in kwargs
         if "model_conf" not in kwargs:
-            logging.info("download models from model hub: {}".format(kwargs.get("model_hub", "ms")))
+            logging.info("download models from model hub: {}".format(kwargs.get("hub", "ms")))
             kwargs = download_model(**kwargs)
         
         set_all_random_seed(kwargs.get("seed", 0))
@@ -180,7 +180,7 @@
         
         # build model
         model_class = tables.model_classes.get(kwargs["model"])
-        model = model_class(**kwargs, **kwargs["model_conf"], vocab_size=vocab_size)
+        model = model_class(**kwargs, **kwargs.get("model_conf", {}), vocab_size=vocab_size)
         model.to(device)
         
         # init_param
@@ -245,7 +245,10 @@
 
             time1 = time.perf_counter()
             with torch.no_grad():
-                results, meta_data = model.inference(**batch, **kwargs)
+                 res = model.inference(**batch, **kwargs)
+                 if isinstance(res, (list, tuple)):
+                    results = res[0]
+                    meta_data = res[1] if len(res) > 1 else {}
             time2 = time.perf_counter()
 
             asr_result_list.extend(results)

--
Gitblit v1.9.1