From 57e023e5cfa522007e24d87c8e3d82bf7c4a19cd Mon Sep 17 00:00:00 2001
From: Shi Xian <40013335+R1ckShi@users.noreply.github.com>
Date: 星期二, 18 二月 2025 15:15:40 +0800
Subject: [PATCH] Merge pull request #2382 from msgk239/dev_clean

---
 funasr/tokenizer/char_tokenizer.py |    2 --
 1 files changed, 0 insertions(+), 2 deletions(-)

diff --git a/funasr/tokenizer/char_tokenizer.py b/funasr/tokenizer/char_tokenizer.py
index 805ecd0..7b517da 100644
--- a/funasr/tokenizer/char_tokenizer.py
+++ b/funasr/tokenizer/char_tokenizer.py
@@ -50,9 +50,7 @@
         )
 
     def text2tokens(self, line: Union[str, list]) -> List[str]:
-
         # if self.split_with_space:
-
         if self.seg_dict is not None:
             tokens = line.strip().split(" ")
             tokens = seg_tokenize(tokens, self.seg_dict)

--
Gitblit v1.9.1