From 4bfcfd7f13e34da6e25a38c77f1c3de7b138696a Mon Sep 17 00:00:00 2001
From: zhifu gao <zhifu.gzf@alibaba-inc.com>
Date: 星期二, 22 四月 2025 09:53:18 +0800
Subject: [PATCH] Update README_zh.md
---
funasr/tokenizer/char_tokenizer.py | 2 --
1 files changed, 0 insertions(+), 2 deletions(-)
diff --git a/funasr/tokenizer/char_tokenizer.py b/funasr/tokenizer/char_tokenizer.py
index 805ecd0..7b517da 100644
--- a/funasr/tokenizer/char_tokenizer.py
+++ b/funasr/tokenizer/char_tokenizer.py
@@ -50,9 +50,7 @@
)
def text2tokens(self, line: Union[str, list]) -> List[str]:
-
# if self.split_with_space:
-
if self.seg_dict is not None:
tokens = line.strip().split(" ")
tokens = seg_tokenize(tokens, self.seg_dict)
--
Gitblit v1.9.1