From 2779602177ae5374547c7a7e17de0b11a166326d Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期一, 29 四月 2024 15:08:46 +0800
Subject: [PATCH] Merge branch 'dev_gzf_exp' of github.com:alibaba-damo-academy/FunASR into dev_gzf_exp merge

---
 funasr/models/rwkv_bat/rwkv.py |   18 +++++++++---------
 1 files changed, 9 insertions(+), 9 deletions(-)

diff --git a/funasr/models/rwkv_bat/rwkv.py b/funasr/models/rwkv_bat/rwkv.py
index 422e1c8..c777cf2 100644
--- a/funasr/models/rwkv_bat/rwkv.py
+++ b/funasr/models/rwkv_bat/rwkv.py
@@ -1,16 +1,15 @@
-"""Receptance Weighted Key Value (RWKV) block definition.
-
-Based/modified from https://github.com/BlinkDL/RWKV-LM/blob/main/RWKV-v4/src/model.py
-
-"""
-
-from typing import Dict, Optional, Tuple
+#!/usr/bin/env python3
+# -*- encoding: utf-8 -*-
+# Copyright FunASR (https://github.com/alibaba-damo-academy/FunASR). All Rights Reserved.
+#  MIT License  (https://opensource.org/licenses/MIT)
 
 import torch
+from typing import Dict, Optional, Tuple
 
-from funasr.models.rwkv_bat.rwkv_attention import EncoderSelfAttention, DecoderSelfAttention
-from funasr.models.rwkv_bat.rwkv_feed_forward import FeedForward
 from funasr.models.transformer.layer_norm import LayerNorm
+from funasr.models.rwkv_bat.rwkv_feed_forward import FeedForward
+from funasr.models.rwkv_bat.rwkv_attention import EncoderSelfAttention, DecoderSelfAttention
+
 
 class RWKV(torch.nn.Module):
     """RWKV module.
@@ -77,6 +76,7 @@
         x = x + self.dropout_ffn(ffn)
         return x, state
 
+
 class RWKVDecoderLayer(torch.nn.Module):
     """RWKV module.
 

--
Gitblit v1.9.1