From 3a4281f4959534b1bf5d01acf0085f4f8e6f2ec8 Mon Sep 17 00:00:00 2001
From: wuhongsheng <664116298@qq.com>
Date: 星期五, 05 七月 2024 00:55:32 +0800
Subject: [PATCH] 优化speakid和语句匹配逻辑,部分解决speakid不从0递增问题 (#1870)

---
 fun_text_processing/inverse_text_normalization/pt/verbalizers/money.py |   14 ++++++++------
 1 files changed, 8 insertions(+), 6 deletions(-)

diff --git a/fun_text_processing/inverse_text_normalization/pt/verbalizers/money.py b/fun_text_processing/inverse_text_normalization/pt/verbalizers/money.py
index 9cde5f4..ae8baec 100644
--- a/fun_text_processing/inverse_text_normalization/pt/verbalizers/money.py
+++ b/fun_text_processing/inverse_text_normalization/pt/verbalizers/money.py
@@ -1,8 +1,10 @@
-
-
-
 import pynini
-from fun_text_processing.text_normalization.en.graph_utils import DAMO_CHAR, GraphFst, delete_space, insert_space
+from fun_text_processing.text_normalization.en.graph_utils import (
+    DAMO_CHAR,
+    GraphFst,
+    delete_space,
+    insert_space,
+)
 from pynini.lib import pynutil
 
 
@@ -20,9 +22,9 @@
         unit = (
             pynutil.delete("currency:")
             + delete_space
-            + pynutil.delete("\"")
+            + pynutil.delete('"')
             + pynini.closure(DAMO_CHAR - " ", 1)
-            + pynutil.delete("\"")
+            + pynutil.delete('"')
         )
         graph = unit + delete_space + insert_space + decimal.numbers
         delete_tokens = self.delete_tokens(graph)

--
Gitblit v1.9.1