From 807a3acdbb6511dab3b5af5e952ef5a8fe231c99 Mon Sep 17 00:00:00 2001
From: 雾聪 <wucong.lyb@alibaba-inc.com>
Date: 星期日, 03 三月 2024 20:37:29 +0800
Subject: [PATCH] update readme

---
 funasr/models/contextual_paraformer/model.py |   15 ++++++---------
 1 files changed, 6 insertions(+), 9 deletions(-)

diff --git a/funasr/models/contextual_paraformer/model.py b/funasr/models/contextual_paraformer/model.py
index abbac8c..49868a8 100644
--- a/funasr/models/contextual_paraformer/model.py
+++ b/funasr/models/contextual_paraformer/model.py
@@ -65,11 +65,9 @@
 
 
         if bias_encoder_type == 'lstm':
-            logging.warning("enable bias encoder sampling and contextual training")
             self.bias_encoder = torch.nn.LSTM(inner_dim, inner_dim, 1, batch_first=True, dropout=bias_encoder_dropout_rate)
             self.bias_embed = torch.nn.Embedding(self.vocab_size, inner_dim)
         elif bias_encoder_type == 'mean':
-            logging.warning("enable bias encoder sampling and contextual training")
             self.bias_embed = torch.nn.Embedding(self.vocab_size, inner_dim)
         else:
             logging.error("Unsupport bias encoder type: {}".format(bias_encoder_type))
@@ -192,13 +190,10 @@
         # 0. sampler
         decoder_out_1st = None
         if self.sampling_ratio > 0.0:
-            if self.step_cur < 2:
-                logging.info("enable sampler in paraformer, sampling_ratio: {}".format(self.sampling_ratio))
+
             sematic_embeds, decoder_out_1st = self.sampler(encoder_out, encoder_out_lens, ys_pad, ys_pad_lens,
                                                            pre_acoustic_embeds, contextual_info)
         else:
-            if self.step_cur < 2:
-                logging.info("disable sampler in paraformer, sampling_ratio: {}".format(self.sampling_ratio))
             sematic_embeds = pre_acoustic_embeds
         
         # 1. Forward decoder
@@ -382,9 +377,11 @@
                 nbest_hyps = [Hypothesis(yseq=yseq, score=score)]
             for nbest_idx, hyp in enumerate(nbest_hyps):
                 ibest_writer = None
-                if ibest_writer is None and kwargs.get("output_dir") is not None:
-                    writer = DatadirWriter(kwargs.get("output_dir"))
-                    ibest_writer = writer[f"{nbest_idx + 1}best_recog"]
+                if kwargs.get("output_dir") is not None:
+                    if not hasattr(self, "writer"):
+                        self.writer = DatadirWriter(kwargs.get("output_dir"))
+                    ibest_writer = self.writer[f"{nbest_idx + 1}best_recog"]
+    
                 # remove sos/eos and get results
                 last_pos = -1
                 if isinstance(hyp.yseq, list):

--
Gitblit v1.9.1