From 6427c834dfd97b1f05c6659cdc7ccf010bf82fe1 Mon Sep 17 00:00:00 2001
From: 嘉渊 <wangjiaming.wjm@alibaba-inc.com>
Date: 星期一, 24 四月 2023 19:50:07 +0800
Subject: [PATCH] update
---
funasr/modules/embedding.py | 22 ++++++++++++++++++++--
1 files changed, 20 insertions(+), 2 deletions(-)
diff --git a/funasr/modules/embedding.py b/funasr/modules/embedding.py
index b61a61a..79ca0b2 100644
--- a/funasr/modules/embedding.py
+++ b/funasr/modules/embedding.py
@@ -8,7 +8,7 @@
import math
import torch
-
+import torch.nn.functional as F
def _pre_hook(
state_dict,
@@ -405,4 +405,22 @@
positions = torch.arange(1, timesteps+1)[None, :]
position_encoding = self.encode(positions, input_dim, x.dtype).to(x.device)
- return x + position_encoding
\ No newline at end of file
+ return x + position_encoding
+
+ def forward_chunk(self, x, cache=None):
+ start_idx = 0
+ pad_left = 0
+ pad_right = 0
+ batch_size, timesteps, input_dim = x.size()
+ if cache is not None:
+ start_idx = cache["start_idx"]
+ pad_left = cache["left"]
+ pad_right = cache["right"]
+ positions = torch.arange(1, timesteps+start_idx+1)[None, :]
+ position_encoding = self.encode(positions, input_dim, x.dtype).to(x.device)
+ outputs = x + position_encoding[:, start_idx: start_idx + timesteps]
+ outputs = outputs.transpose(1,2)
+ outputs = F.pad(outputs, (pad_left, pad_right))
+ outputs = outputs.transpose(1,2)
+ return outputs
+
--
Gitblit v1.9.1