From f57b68121a526baea43b2e93f4540d8a2995f633 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期一, 29 四月 2024 15:15:24 +0800
Subject: [PATCH] batch
---
funasr/models/lcbnet/attention.py | 3 +--
1 files changed, 1 insertions(+), 2 deletions(-)
diff --git a/funasr/models/lcbnet/attention.py b/funasr/models/lcbnet/attention.py
index 8e8c594..05a5041 100644
--- a/funasr/models/lcbnet/attention.py
+++ b/funasr/models/lcbnet/attention.py
@@ -11,6 +11,7 @@
import torch
from torch import nn
+
class MultiHeadedAttentionReturnWeight(nn.Module):
"""Multi-Head Attention layer.
@@ -108,5 +109,3 @@
q, k, v = self.forward_qkv(query, key, value)
scores = torch.matmul(q, k.transpose(-2, -1)) / math.sqrt(self.d_k)
return self.forward_attention(v, scores, mask)
-
-
--
Gitblit v1.9.1