From acb9a0fec8d8a4dabeedcbb8e08c26f66d7083f0 Mon Sep 17 00:00:00 2001
From: haoneng.lhn <haoneng.lhn@alibaba-inc.com>
Date: 星期五, 08 十二月 2023 16:19:00 +0800
Subject: [PATCH] fix loss normalization for ddp training
---
funasr/models/e2e_asr.py | 2 +-
funasr/models/e2e_asr_paraformer.py | 12 ++++++------
funasr/models/e2e_asr_contextual_paraformer.py | 2 +-
3 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/funasr/models/e2e_asr.py b/funasr/models/e2e_asr.py
index 162bfba..050847e 100644
--- a/funasr/models/e2e_asr.py
+++ b/funasr/models/e2e_asr.py
@@ -222,7 +222,7 @@
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
- batch_size = (text_lengths + 1).sum().type_as(batch_size)
+ batch_size = int((text_lengths + 1).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight
diff --git a/funasr/models/e2e_asr_contextual_paraformer.py b/funasr/models/e2e_asr_contextual_paraformer.py
index d4dc784..b474dbc 100644
--- a/funasr/models/e2e_asr_contextual_paraformer.py
+++ b/funasr/models/e2e_asr_contextual_paraformer.py
@@ -233,7 +233,7 @@
stats["loss"] = torch.clone(loss.detach())
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
- batch_size = (text_lengths + self.predictor_bias).sum().type_as(batch_size)
+ batch_size = int((text_lengths + self.predictor_bias).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight
diff --git a/funasr/models/e2e_asr_paraformer.py b/funasr/models/e2e_asr_paraformer.py
index b793d52..0e0b95b 100644
--- a/funasr/models/e2e_asr_paraformer.py
+++ b/funasr/models/e2e_asr_paraformer.py
@@ -255,7 +255,7 @@
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
- batch_size = (text_lengths + self.predictor_bias).sum().type_as(batch_size)
+ batch_size = int((text_lengths + self.predictor_bias).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight
@@ -867,7 +867,7 @@
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
- batch_size = (text_lengths + self.predictor_bias).sum().type_as(batch_size)
+ batch_size = int((text_lengths + self.predictor_bias).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight
@@ -1494,7 +1494,7 @@
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
- batch_size = (text_lengths + self.predictor_bias).sum().type_as(batch_size)
+ batch_size = int((text_lengths + self.predictor_bias).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight
@@ -1765,7 +1765,7 @@
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
- batch_size = (text_lengths + self.predictor_bias).sum().type_as(batch_size)
+ batch_size = int((text_lengths + self.predictor_bias).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight
@@ -1967,7 +1967,7 @@
# force_gatherable: to-device and to-tensor if scalar for DataParallel
if self.length_normalized_loss:
- batch_size = (text_lengths + self.predictor_bias).sum().type_as(batch_size)
+ batch_size = int((text_lengths + self.predictor_bias).sum())
loss, stats, weight = force_gatherable((loss, stats, batch_size), loss.device)
return loss, stats, weight
@@ -2262,4 +2262,4 @@
"torch tensor: {}, {}, loading from tf tensor: {}, {}".format(name, data_tf.size(), name_tf,
var_dict_tf[name_tf].shape))
- return var_dict_torch_update
\ No newline at end of file
+ return var_dict_torch_update
--
Gitblit v1.9.1