From d80ac2fd2df4e7fb8a28acfa512bb11472b5cc99 Mon Sep 17 00:00:00 2001
From: liugz18 <57401541+liugz18@users.noreply.github.com>
Date: 星期四, 18 七月 2024 21:34:55 +0800
Subject: [PATCH] Rename 'res' in line 514 to avoid with naming conflict with line 365

---
 funasr/models/rwkv_bat/rwkv_encoder.py |   18 ++++++++----------
 1 files changed, 8 insertions(+), 10 deletions(-)

diff --git a/funasr/models/rwkv_bat/rwkv_encoder.py b/funasr/models/rwkv_bat/rwkv_encoder.py
index c0e5f42..a27088d 100644
--- a/funasr/models/rwkv_bat/rwkv_encoder.py
+++ b/funasr/models/rwkv_bat/rwkv_encoder.py
@@ -44,7 +44,7 @@
         att_dropout_rate: float = 0.0,
         ffn_dropout_rate: float = 0.0,
         dropout_rate: float = 0.0,
-        subsampling_factor: int =4,
+        subsampling_factor: int = 4,
         time_reduction_factor: int = 1,
         kernel: int = 3,
         **kwargs,
@@ -54,7 +54,7 @@
 
         self.embed = RWKVConvInput(
             input_size,
-            [output_size//4, output_size//2, output_size],
+            [output_size // 4, output_size // 2, output_size],
             subsampling_factor,
             conv_kernel_size=kernel,
             output_size=output_size,
@@ -64,7 +64,7 @@
 
         linear_size = output_size * 4 if linear_size is None else linear_size
         attention_size = output_size if attention_size is None else attention_size
-        
+
         self.rwkv_blocks = torch.nn.ModuleList(
             [
                 RWKV(
@@ -122,12 +122,12 @@
                 x, _ = block(x)
         else:
             x = self.rwkv_infer(x)
-            
+
         x = self.final_norm(x)
 
         if self.time_reduction_factor > 1:
-            x = x[:,::self.time_reduction_factor,:]
-            olens = torch.floor_divide(olens-1, self.time_reduction_factor) + 1
+            x = x[:, :: self.time_reduction_factor, :]
+            olens = torch.floor_divide(olens - 1, self.time_reduction_factor) + 1
 
         return x, olens, None
 
@@ -135,9 +135,7 @@
 
         batch_size = xs_pad.shape[0]
 
-        hidden_sizes = [
-            self._output_size for i in range(5)
-        ]
+        hidden_sizes = [self._output_size for i in range(5)]
 
         state = [
             torch.zeros(
@@ -152,7 +150,7 @@
 
         xs_out = []
         for t in range(xs_pad.shape[1]):
-            x_t = xs_pad[:,t,:]
+            x_t = xs_pad[:, t, :]
             for idx, block in enumerate(self.rwkv_blocks):
                 x_t, state = block(x_t, state=state)
             xs_out.append(x_t)

--
Gitblit v1.9.1