From 3a4281f4959534b1bf5d01acf0085f4f8e6f2ec8 Mon Sep 17 00:00:00 2001
From: wuhongsheng <664116298@qq.com>
Date: 星期五, 05 七月 2024 00:55:32 +0800
Subject: [PATCH] 优化speakid和语句匹配逻辑,部分解决speakid不从0递增问题 (#1870)

---
 fun_text_processing/text_normalization/zh/verbalizers/whitelist.py |   12 +++++-------
 1 files changed, 5 insertions(+), 7 deletions(-)

diff --git a/fun_text_processing/text_normalization/zh/verbalizers/whitelist.py b/fun_text_processing/text_normalization/zh/verbalizers/whitelist.py
index 212eb6b..4d04066 100644
--- a/fun_text_processing/text_normalization/zh/verbalizers/whitelist.py
+++ b/fun_text_processing/text_normalization/zh/verbalizers/whitelist.py
@@ -1,18 +1,16 @@
-
-
 import pynini
 from fun_text_processing.text_normalization.zh.graph_utils import FUN_NOT_QUOTE, GraphFst
 from pynini.lib import pynutil
 
 
 class Whitelist(GraphFst):
-    '''
-        tokens { whitelist: "ATM" } -> A T M
-    '''
+    """
+    tokens { whitelist: "ATM" } -> A T M
+    """
 
     def __init__(self, deterministic: bool = True, lm: bool = False):
         super().__init__(name="whitelist", kind="verbalize", deterministic=deterministic)
-        remove_erhua = pynutil.delete("erhua: \"") + pynutil.delete("鍎�") + pynutil.delete("\"")
-        whitelist = pynutil.delete("name: \"") + pynini.closure(FUN_NOT_QUOTE) + pynutil.delete("\"")
+        remove_erhua = pynutil.delete('erhua: "') + pynutil.delete("鍎�") + pynutil.delete('"')
+        whitelist = pynutil.delete('name: "') + pynini.closure(FUN_NOT_QUOTE) + pynutil.delete('"')
         graph = remove_erhua | whitelist
         self.fst = graph.optimize()

--
Gitblit v1.9.1