From 28ccfbfc51068a663a80764e14074df5edf2b5ba Mon Sep 17 00:00:00 2001
From: kongdeqiang <kongdeqiang960204@163.com>
Date: 星期五, 13 三月 2026 17:41:41 +0800
Subject: [PATCH] 提交
---
funasr/losses/label_smoothing_loss.py | 16 ++++++----------
1 files changed, 6 insertions(+), 10 deletions(-)
diff --git a/funasr/losses/label_smoothing_loss.py b/funasr/losses/label_smoothing_loss.py
index c272ea8..ffc38da 100644
--- a/funasr/losses/label_smoothing_loss.py
+++ b/funasr/losses/label_smoothing_loss.py
@@ -8,7 +8,7 @@
import torch
from torch import nn
-from funasr.modules.nets_utils import make_pad_mask
+from funasr.models.transformer.utils.nets_utils import make_pad_mask
class LabelSmoothingLoss(nn.Module):
@@ -50,8 +50,8 @@
"""
assert x.size(2) == self.size
batch_size = x.size(0)
- x = x.view(-1, self.size)
- target = target.view(-1)
+ x = x.contiguous().view(-1, self.size)
+ target = target.contiguous().view(-1)
with torch.no_grad():
true_dist = x.clone()
true_dist.fill_(self.smoothing / (self.size - 1))
@@ -65,11 +65,7 @@
class SequenceBinaryCrossEntropy(nn.Module):
- def __init__(
- self,
- normalize_length=False,
- criterion=nn.BCEWithLogitsLoss(reduction="none")
- ):
+ def __init__(self, normalize_length=False, criterion=nn.BCEWithLogitsLoss(reduction="none")):
super().__init__()
self.normalize_length = normalize_length
self.criterion = criterion
@@ -95,7 +91,7 @@
size,
padding_idx,
normalize_length=False,
- criterion=nn.NLLLoss(reduction='none'),
+ criterion=nn.NLLLoss(reduction="none"),
):
"""Construct an NllLoss object."""
super(NllLoss, self).__init__()
@@ -122,6 +118,6 @@
ignore = target == self.padding_idx # (B,)
total = len(target) - ignore.sum().item()
target = target.masked_fill(ignore, 0) # avoid -1 index
- kl = self.criterion(x , target)
+ kl = self.criterion(x, target)
denom = total if self.normalize_length else batch_size
return kl.masked_fill(ignore, 0).sum() / denom
--
Gitblit v1.9.1