From 0efc87352ce7d3903dbdedbfa5d01ca5e1cb19e7 Mon Sep 17 00:00:00 2001
From: Shi Xian <40013335+R1ckShi@users.noreply.github.com>
Date: 星期四, 05 十二月 2024 15:15:38 +0800
Subject: [PATCH] Merge pull request #2267 from modelscope/dev_sx2

---
 fun_text_processing/inverse_text_normalization/de/verbalizers/cardinal.py |    5 +++--
 1 files changed, 3 insertions(+), 2 deletions(-)

diff --git a/fun_text_processing/inverse_text_normalization/de/verbalizers/cardinal.py b/fun_text_processing/inverse_text_normalization/de/verbalizers/cardinal.py
index b98fd48..a9f8426 100644
--- a/fun_text_processing/inverse_text_normalization/de/verbalizers/cardinal.py
+++ b/fun_text_processing/inverse_text_normalization/de/verbalizers/cardinal.py
@@ -1,4 +1,3 @@
-
 import pynini
 from fun_text_processing.text_normalization.en.graph_utils import DAMO_NOT_QUOTE, GraphFst
 from pynini.lib import pynutil
@@ -16,7 +15,9 @@
     def __init__(self, tn_cardinal_verbalizer: GraphFst, deterministic: bool = True):
         super().__init__(name="cardinal", kind="verbalize", deterministic=deterministic)
         self.numbers = tn_cardinal_verbalizer.numbers
-        optional_sign = pynini.closure(pynutil.delete("negative: \"") + DAMO_NOT_QUOTE + pynutil.delete("\" "), 0, 1)
+        optional_sign = pynini.closure(
+            pynutil.delete('negative: "') + DAMO_NOT_QUOTE + pynutil.delete('" '), 0, 1
+        )
         graph = optional_sign + self.numbers
         delete_tokens = self.delete_tokens(graph)
         self.fst = delete_tokens.optimize()

--
Gitblit v1.9.1