From c1e365fea09aafda387cac12fdff43d28c598979 Mon Sep 17 00:00:00 2001
From: BienBoy <92378515+BienBoy@users.noreply.github.com>
Date: 星期六, 01 二月 2025 23:29:34 +0800
Subject: [PATCH] fix: resolve unexpected 'out of memory' issue in multi-GPU setup (#2373)

---
 funasr/tokenizer/hf_tokenizer.py |   18 ++++++++----------
 1 files changed, 8 insertions(+), 10 deletions(-)

diff --git a/funasr/tokenizer/hf_tokenizer.py b/funasr/tokenizer/hf_tokenizer.py
index c856b3d..1ac7ae2 100644
--- a/funasr/tokenizer/hf_tokenizer.py
+++ b/funasr/tokenizer/hf_tokenizer.py
@@ -1,15 +1,13 @@
-
-try:
-	from transformers import AutoTokenizer
-except:
-	print("If you want to use hugging, please `pip install -U transformers`")
-
 from funasr.register import tables
+
 
 @tables.register("tokenizer_classes", "HuggingfaceTokenizer")
 def HuggingfaceTokenizer(init_param_path, **kwargs):
+    try:
+        from transformers import AutoTokenizer
+    except:
+        # print("If you want to use hugging, please `pip install -U transformers`")
+        pass
+    tokenizer = AutoTokenizer.from_pretrained(init_param_path)
 
-	tokenizer = AutoTokenizer.from_pretrained(init_param_path)
-	
-	return tokenizer
-
+    return tokenizer

--
Gitblit v1.9.1