游雁
2023-11-16 4ace5a95b052d338947fc88809a440ccd55cf6b4
funasr/modules/repeat.py
@@ -7,32 +7,45 @@
"""Repeat the same layer definition."""
from typing import Dict, List, Optional
from funasr.modules.layer_norm import LayerNorm
import torch
class MultiSequential(torch.nn.Sequential):
    """Multi-input multi-output torch.nn.Sequential."""
    def __init__(self, *args, layer_drop_rate=0.0):
        """Initialize MultiSequential with layer_drop.
        Args:
            layer_drop_rate (float): Probability of dropping out each fn (layer).
        """
        super(MultiSequential, self).__init__(*args)
        self.layer_drop_rate = layer_drop_rate
    def forward(self, *args):
        """Repeat."""
        for m in self:
            args = m(*args)
        _probs = torch.empty(len(self)).uniform_()
        for idx, m in enumerate(self):
            if not self.training or (_probs[idx] >= self.layer_drop_rate):
                args = m(*args)
        return args
def repeat(N, fn):
def repeat(N, fn, layer_drop_rate=0.0):
    """Repeat module N times.
    Args:
        N (int): Number of repeat time.
        fn (Callable): Function to generate module.
        layer_drop_rate (float): Probability of dropping out each fn (layer).
    Returns:
        MultiSequential: Repeated model instance.
    """
    return MultiSequential(*[fn(n) for n in range(N)])
    return MultiSequential(*[fn(n) for n in range(N)], layer_drop_rate=layer_drop_rate)
class MultiBlocks(torch.nn.Module):
@@ -48,14 +61,13 @@
        self,
        block_list: List[torch.nn.Module],
        output_size: int,
        norm_class: torch.nn.Module = torch.nn.LayerNorm,
        norm_args: Optional[Dict] = None,
        norm_class: torch.nn.Module = LayerNorm,
    ) -> None:
        """Construct a MultiBlocks object."""
        super().__init__()
        self.blocks = torch.nn.ModuleList(block_list)
        self.norm_blocks = norm_class(output_size, **norm_args)
        self.norm_blocks = norm_class(output_size)
        self.num_blocks = len(block_list)