From 9b4e9cc8a0311e5243d69b73ed073e7ea441982e Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期三, 27 三月 2024 16:05:29 +0800
Subject: [PATCH] train update

---
 funasr/models/paraformer/model.py |   15 ++++++++++++---
 1 files changed, 12 insertions(+), 3 deletions(-)

diff --git a/funasr/models/paraformer/model.py b/funasr/models/paraformer/model.py
index 90ce162..bd85df0 100644
--- a/funasr/models/paraformer/model.py
+++ b/funasr/models/paraformer/model.py
@@ -13,13 +13,14 @@
 from funasr.models.ctc.ctc import CTC
 from funasr.utils import postprocess_utils
 from funasr.metrics.compute_acc import th_accuracy
+from funasr.train_utils.device_funcs import to_device
 from funasr.utils.datadir_writer import DatadirWriter
 from funasr.models.paraformer.search import Hypothesis
 from funasr.models.paraformer.cif_predictor import mae_loss
 from funasr.train_utils.device_funcs import force_gatherable
 from funasr.losses.label_smoothing_loss import LabelSmoothingLoss
 from funasr.models.transformer.utils.add_sos_eos import add_sos_eos
-from funasr.models.transformer.utils.nets_utils import make_pad_mask, pad_list
+from funasr.models.transformer.utils.nets_utils import make_pad_mask
 from funasr.utils.load_utils import load_audio_text_image_video, extract_fbank
 
 
@@ -154,8 +155,8 @@
         self.predictor_bias = predictor_bias
         self.sampling_ratio = sampling_ratio
         self.criterion_pre = mae_loss(normalize_length=length_normalized_loss)
-        # self.step_cur = 0
-        #
+
+
         self.share_embedding = share_embedding
         if self.share_embedding:
             self.decoder.embed = None
@@ -230,6 +231,7 @@
         stats["loss_pre"] = loss_pre.detach().cpu() if loss_pre is not None else None
         
         stats["loss"] = torch.clone(loss.detach())
+        stats["batch_size"] = batch_size
         
         # force_gatherable: to-device and to-tensor if scalar for DataParallel
         if self.length_normalized_loss:
@@ -549,3 +551,10 @@
                 
         return results, meta_data
 
+    def export(self, **kwargs):
+        from .export_meta import export_rebuild_model
+        if 'max_seq_len' not in kwargs:
+            kwargs['max_seq_len'] = 512
+        models = export_rebuild_model(model=self, **kwargs)
+        return models
+

--
Gitblit v1.9.1