From 1cdb3cc28d4d89a576cc06e5cd8eb80da1f3a3aa Mon Sep 17 00:00:00 2001
From: zhifu gao <zhifu.gzf@alibaba-inc.com>
Date: 星期五, 26 四月 2024 11:27:39 +0800
Subject: [PATCH] Dev gzf exp (#1665)
---
funasr/models/emotion2vec/modules.py | 22 +++++++---------------
1 files changed, 7 insertions(+), 15 deletions(-)
diff --git a/funasr/models/emotion2vec/modules.py b/funasr/models/emotion2vec/modules.py
index 33947f2..6e36409 100644
--- a/funasr/models/emotion2vec/modules.py
+++ b/funasr/models/emotion2vec/modules.py
@@ -4,9 +4,10 @@
# LICENSE file in the root directory of this source tree.
import torch
-import torch.nn as nn
-import torch.nn.functional as F
import numpy as np
+import torch.nn as nn
+from enum import Enum, auto
+import torch.nn.functional as F
from dataclasses import dataclass
from funasr.models.emotion2vec.fairseq_modules import (
LayerNorm,
@@ -14,12 +15,11 @@
TransposeLast,
)
-from enum import Enum, auto
+
class Modality(Enum):
AUDIO = auto()
-
@dataclass
class D2vDecoderConfig:
decoder_dim: int = 384
@@ -77,18 +77,10 @@
x = self.dropout(x)
for i, blk in enumerate(self.blocks):
- if (
- not self.training
- or self.layerdrop == 0
- or (np.random.random() > self.layerdrop)
- ):
+ if not self.training or self.layerdrop == 0 or (np.random.random() > self.layerdrop):
ab = alibi_bias
if ab is not None and alibi_scale is not None:
- scale = (
- alibi_scale[i]
- if alibi_scale.size(0) > 1
- else alibi_scale.squeeze(0)
- )
+ scale = alibi_scale[i] if alibi_scale.size(0) > 1 else alibi_scale.squeeze(0)
ab = ab * scale.type_as(ab)
x, _ = blk(x, padding_mask, ab)
@@ -264,7 +256,7 @@
super().__init__()
self.num_heads = num_heads
head_dim = dim // num_heads
- self.scale = qk_scale or head_dim ** -0.5
+ self.scale = qk_scale or head_dim**-0.5
self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
self.attn_drop = nn.Dropout(attn_drop)
--
Gitblit v1.9.1