From c1e365fea09aafda387cac12fdff43d28c598979 Mon Sep 17 00:00:00 2001
From: BienBoy <92378515+BienBoy@users.noreply.github.com>
Date: 星期六, 01 二月 2025 23:29:34 +0800
Subject: [PATCH] fix: resolve unexpected 'out of memory' issue in multi-GPU setup (#2373)
---
funasr/tokenizer/word_tokenizer.py | 4 +---
1 files changed, 1 insertions(+), 3 deletions(-)
diff --git a/funasr/tokenizer/word_tokenizer.py b/funasr/tokenizer/word_tokenizer.py
index d7bbaf9..c1fd6e0 100644
--- a/funasr/tokenizer/word_tokenizer.py
+++ b/funasr/tokenizer/word_tokenizer.py
@@ -14,14 +14,12 @@
delimiter: str = None,
non_linguistic_symbols: Union[Path, str, Iterable[str]] = None,
remove_non_linguistic_symbols: bool = False,
- **kwargs,
):
self.delimiter = delimiter
if not remove_non_linguistic_symbols and non_linguistic_symbols is not None:
warnings.warn(
- "non_linguistic_symbols is only used "
- "when remove_non_linguistic_symbols = True"
+ "non_linguistic_symbols is only used " "when remove_non_linguistic_symbols = True"
)
if non_linguistic_symbols is None:
--
Gitblit v1.9.1