From 73959c6e8e80a6e735bbb7d63acf942a6e2f652d Mon Sep 17 00:00:00 2001
From: hnluo <haoneng.lhn@alibaba-inc.com>
Date: 星期四, 07 十二月 2023 14:54:25 +0800
Subject: [PATCH] Merge pull request #1157 from alibaba-damo-academy/dev_lhn

---
 funasr/models/e2e_asr_contextual_paraformer.py |    3 +++
 1 files changed, 3 insertions(+), 0 deletions(-)

diff --git a/funasr/models/e2e_asr_contextual_paraformer.py b/funasr/models/e2e_asr_contextual_paraformer.py
index a2f7078..d4dc784 100644
--- a/funasr/models/e2e_asr_contextual_paraformer.py
+++ b/funasr/models/e2e_asr_contextual_paraformer.py
@@ -125,6 +125,7 @@
         if self.crit_attn_weight > 0:
             self.attn_loss = torch.nn.L1Loss()
         self.crit_attn_smooth = crit_attn_smooth
+        self.length_normalized_loss = length_normalized_loss
 
     def forward(
             self,
@@ -231,6 +232,8 @@
 
         stats["loss"] = torch.clone(loss.detach())
         # force_gatherable: to-device and to-tensor if scalar for DataParallel
+        if self.length_normalized_loss:
+            batch_size = (text_lengths + self.predictor_bias).sum().type_as(batch_size)
         loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
         return loss, stats, weight
     

--
Gitblit v1.9.1