游雁
2024-03-24 873cfae5c347b940e38e853d8579a6b4e85ada05
funasr/datasets/dataloader_entry.py
@@ -25,6 +25,37 @@
   
   return dataloader_tr, dataloader_val
# @tables.register("dataloader_classes", "DataloaderMapStyle")
class DataloaderMapStyle:
   def __init__(self, frontend=None, tokenizer=None, **kwargs):
      # dataset
      logging.info("Build dataloader")
      dataset_class = tables.dataset_classes.get(kwargs.get("dataset", "AudioDataset"))
      dataset_tr = dataset_class(kwargs.get("train_data_set_list"), frontend=frontend, tokenizer=tokenizer,
                                 is_training=True, **kwargs.get("dataset_conf"))
      dataset_val = dataset_class(kwargs.get("valid_data_set_list"), frontend=frontend, tokenizer=tokenizer,
                                  is_training=False, **kwargs.get("dataset_conf"))
      self.dataset_tr = dataset_tr
      self.dataset_val = dataset_val
      self.kwargs = kwargs
   def build_iter(self, epoch=0):
      # dataloader
      batch_sampler = self.kwargs["dataset_conf"].get("batch_sampler", "BatchSampler")
      batch_sampler_val = None
      if batch_sampler is not None:
         batch_sampler_class = tables.batch_sampler_classes.get(batch_sampler)
         batch_sampler = batch_sampler_class(self.dataset_tr, **self.kwargs.get("dataset_conf"))
         batch_sampler_val = batch_sampler_class(self.dataset_val, is_training=False, **self.kwargs.get("dataset_conf"))
      batch_sampler["batch_sampler"].set_epoch(epoch)
      batch_sampler_val.set_epoch(epohc)
      dataloader_tr = torch.utils.data.DataLoader(self.dataset_tr, collate_fn=self.dataset_tr.collator, **batch_sampler)
      dataloader_val = torch.utils.data.DataLoader(self.dataset_val, collate_fn=self.dataset_val.collator, **batch_sampler_val)
      return dataloader_tr, dataloader_val
@tables.register("dataloader_classes", "DataloaderIterable")
def DataloaderIterable(frontend=None, tokenizer=None, **kwargs):