From a6441441cb25d26e544bcfe76b2da8d19048d929 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期三, 05 六月 2024 16:54:33 +0800
Subject: [PATCH] auto frontend
---
funasr/train_utils/trainer_ds.py | 1 -
funasr/models/llm_asr/adaptor.py | 7 ++++---
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/funasr/models/llm_asr/adaptor.py b/funasr/models/llm_asr/adaptor.py
index 82edce3..9b79ed2 100644
--- a/funasr/models/llm_asr/adaptor.py
+++ b/funasr/models/llm_asr/adaptor.py
@@ -1,5 +1,7 @@
import torch
import torch.nn as nn
+import torch.nn.functional as F
+from funasr.models.transformer.utils.nets_utils import make_pad_mask
from funasr.register import tables
@@ -119,9 +121,8 @@
x = self.linear2(x)
olens = None
- if ilens is not None:
- olens = (ilens - 1) // self.k + 1
- mask = (~make_pad_mask(olens)[:, None, :]).to(x.device)
+ olens = (ilens - 1) // self.k + 1
+ masks = (~make_pad_mask(olens)[:, None, :]).to(x.device)
for layer, block in enumerate(self.blocks):
x, masks = block(x, masks)
return x, olens
diff --git a/funasr/train_utils/trainer_ds.py b/funasr/train_utils/trainer_ds.py
index 1a553f8..ec887cc 100644
--- a/funasr/train_utils/trainer_ds.py
+++ b/funasr/train_utils/trainer_ds.py
@@ -621,7 +621,6 @@
self.train_acc_avg = train_acc_avg.detach().cpu().item() / self.world_size
def forward_step(self, model, batch, loss_dict={}):
- dtype = torch.bfloat16
with maybe_autocast(dtype=self.dtype, use_deepspeed=self.use_deepspeed):
retval = model(**batch)
--
Gitblit v1.9.1