From 899c7056e8eba1deb561729b5f85ea450dc540d5 Mon Sep 17 00:00:00 2001
From: hnluo <42902471+hnluo@users.noreply.github.com>
Date: 星期三, 28 十二月 2022 12:28:31 +0800
Subject: [PATCH] Merge pull request #20 from alibaba-damo-academy/dev

---
 funasr/bin/asr_inference_uniasr.py |    6 +++---
 1 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/funasr/bin/asr_inference_uniasr.py b/funasr/bin/asr_inference_uniasr.py
index a1a23ba..9aea1a3 100755
--- a/funasr/bin/asr_inference_uniasr.py
+++ b/funasr/bin/asr_inference_uniasr.py
@@ -148,8 +148,8 @@
         for scorer in scorers.values():
             if isinstance(scorer, torch.nn.Module):
                 scorer.to(device=device, dtype=getattr(torch, dtype)).eval()
-        logging.info(f"Beam_search: {beam_search}")
-        logging.info(f"Decoding device={device}, dtype={dtype}")
+        # logging.info(f"Beam_search: {beam_search}")
+        # logging.info(f"Decoding device={device}, dtype={dtype}")
 
         # 5. [Optional] Build Text converter: e.g. bpe-sym -> Text
         if token_type is None:
@@ -167,7 +167,7 @@
         else:
             tokenizer = build_tokenizer(token_type=token_type)
         converter = TokenIDConverter(token_list=token_list)
-        logging.info(f"Text tokenizer: {tokenizer}")
+        # logging.info(f"Text tokenizer: {tokenizer}")
 
         self.asr_model = asr_model
         self.asr_train_args = asr_train_args

--
Gitblit v1.9.1