雾聪
2024-03-21 d4aaa84ad16c2c862ffcb5d73bf7852c8ee90d24
funasr/models/sanm/attention.py
@@ -831,6 +831,3 @@
        scores = torch.matmul(q_h, k_h.transpose(-2, -1))
        att_outs = self.forward_attention(v_h, scores, mask, mask_att_chunk_encoder)
        return att_outs