From 2ac38adbe5f4e1374a079e032ed4b504351a207c Mon Sep 17 00:00:00 2001 From: zhifu gao <zhifu.gzf@alibaba-inc.com> Date: 星期二, 23 四月 2024 18:08:57 +0800 Subject: [PATCH] Dev gzf exp (#1647) --- funasr/models/ct_transformer_streaming/attention.py | 18 +++--------------- 1 files changed, 3 insertions(+), 15 deletions(-) diff --git a/funasr/models/ct_transformer_streaming/attention.py b/funasr/models/ct_transformer_streaming/attention.py index 382334e..3177eca 100644 --- a/funasr/models/ct_transformer_streaming/attention.py +++ b/funasr/models/ct_transformer_streaming/attention.py @@ -1,22 +1,10 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- +# -*- encoding: utf-8 -*- +# Copyright FunASR (https://github.com/alibaba-damo-academy/FunASR). All Rights Reserved. +# MIT License (https://opensource.org/licenses/MIT) -# Copyright 2019 Shigeki Karita -# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0) - -"""Multi-Head Attention layer definition.""" - -import math - -import numpy import torch -from torch import nn -import torch.nn.functional as F -from typing import Optional, Tuple - from funasr.models.sanm.attention import MultiHeadedAttentionSANM - - class MultiHeadedAttentionSANMwithMask(MultiHeadedAttentionSANM): -- Gitblit v1.9.1