From 4137f5cf26e7c4b40853959cd2574edfde03aa60 Mon Sep 17 00:00:00 2001
From: 志浩 <neo.dzh@alibaba-inc.com>
Date: 星期五, 07 四月 2023 21:03:34 +0800
Subject: [PATCH] Merge branch 'main' of github.com:alibaba-damo-academy/FunASR into dev_dzh

---
 funasr/modules/embedding.py |   22 ++++++++++++++++++++--
 1 files changed, 20 insertions(+), 2 deletions(-)

diff --git a/funasr/modules/embedding.py b/funasr/modules/embedding.py
index b61a61a..79ca0b2 100644
--- a/funasr/modules/embedding.py
+++ b/funasr/modules/embedding.py
@@ -8,7 +8,7 @@
 
 import math
 import torch
-
+import torch.nn.functional as F
 
 def _pre_hook(
     state_dict,
@@ -405,4 +405,22 @@
         positions = torch.arange(1, timesteps+1)[None, :]
         position_encoding = self.encode(positions, input_dim, x.dtype).to(x.device)
 
-        return x + position_encoding
\ No newline at end of file
+        return x + position_encoding
+
+    def forward_chunk(self, x, cache=None):
+        start_idx = 0
+        pad_left = 0
+        pad_right = 0
+        batch_size, timesteps, input_dim = x.size()
+        if cache is not None:
+            start_idx = cache["start_idx"]
+            pad_left = cache["left"]
+            pad_right = cache["right"]
+        positions = torch.arange(1, timesteps+start_idx+1)[None, :]
+        position_encoding = self.encode(positions, input_dim, x.dtype).to(x.device)
+        outputs = x + position_encoding[:, start_idx: start_idx + timesteps]
+        outputs = outputs.transpose(1,2)
+        outputs = F.pad(outputs, (pad_left, pad_right))
+        outputs = outputs.transpose(1,2)
+        return outputs
+       

--
Gitblit v1.9.1