From a6441441cb25d26e544bcfe76b2da8d19048d929 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期三, 05 六月 2024 16:54:33 +0800
Subject: [PATCH] auto frontend

---
 funasr/models/llm_asr/adaptor.py |    7 ++++---
 1 files changed, 4 insertions(+), 3 deletions(-)

diff --git a/funasr/models/llm_asr/adaptor.py b/funasr/models/llm_asr/adaptor.py
index 82edce3..9b79ed2 100644
--- a/funasr/models/llm_asr/adaptor.py
+++ b/funasr/models/llm_asr/adaptor.py
@@ -1,5 +1,7 @@
 import torch
 import torch.nn as nn
+import torch.nn.functional as F
+from funasr.models.transformer.utils.nets_utils import make_pad_mask
 
 from funasr.register import tables
 
@@ -119,9 +121,8 @@
         x = self.linear2(x)
 
         olens = None
-        if ilens is not None:
-            olens = (ilens - 1) // self.k + 1
-            mask = (~make_pad_mask(olens)[:, None, :]).to(x.device)
+        olens = (ilens - 1) // self.k + 1
+        masks = (~make_pad_mask(olens)[:, None, :]).to(x.device)
         for layer, block in enumerate(self.blocks):
             x, masks = block(x, masks)
         return x, olens

--
Gitblit v1.9.1