From d80ac2fd2df4e7fb8a28acfa512bb11472b5cc99 Mon Sep 17 00:00:00 2001
From: liugz18 <57401541+liugz18@users.noreply.github.com>
Date: 星期四, 18 七月 2024 21:34:55 +0800
Subject: [PATCH] Rename 'res' in line 514 to avoid with naming conflict with line 365

---
 funasr/datasets/audio_datasets/espnet_samplers.py |   17 +++++++++++++----
 1 files changed, 13 insertions(+), 4 deletions(-)

diff --git a/funasr/datasets/audio_datasets/espnet_samplers.py b/funasr/datasets/audio_datasets/espnet_samplers.py
index 3f14d09..004201e 100644
--- a/funasr/datasets/audio_datasets/espnet_samplers.py
+++ b/funasr/datasets/audio_datasets/espnet_samplers.py
@@ -41,6 +41,7 @@
         drop_last=False,
         is_training: bool = True,
         sort_size: int = 1024,
+        start_step: int = 0,
         **kwargs,
     ):
 
@@ -70,7 +71,10 @@
         self.max_token_length = kwargs.get("max_token_length", 2048)
         self.min_token_length = kwargs.get("min_token_length", 0)
         self.length_scale_source = kwargs.get("length_scale_source", 1.0)
-
+        self.start_step = start_step
+        self.batch_num = 1
+        if self.start_step > 0:
+            logging.info(f"Warning, start_step > 0, dataloader start from step: {self.start_step}")
         # super().__init__(dataset, num_replicas=num_replicas, rank=rank,
         #                  shuffle=shuffle, drop_last=drop_last)
 
@@ -92,7 +96,7 @@
         max_len_in_batch = 0  # Tracks the max sample length within the current batch
 
         for idx in sorted_indices:
-          
+
             # original_sample_length = self.dataset.get_source_len(idx)
             # if (
             #     original_sample_length < self.min_token_length
@@ -142,14 +146,19 @@
         # Allocate the batches to the current rank
         start_idx = self.rank * batches_per_rank
         end_idx = start_idx + batches_per_rank
-        rank_batches = buffer_batches[start_idx:end_idx]
+        rank_batches = buffer_batches[start_idx + self.start_step : end_idx]
 
+        self.batch_num = len(rank_batches)
+
+        logging.info(
+            f"rank: {self.rank}, dataloader start from step: {self.start_step}, batch_num: {end_idx-start_idx}, batch_num_after_step: {len(rank_batches)}"
+        )
         # Return an iterator over the batches for the current rank
         return iter(rank_batches)
 
     def __len__(self):
         # Calculate the number of batches per epoch for the current rank
-        return 1
+        return self.batch_num
 
     def set_epoch(self, epoch):
         # Set the epoch for shuffling

--
Gitblit v1.9.1