From 9b4e9cc8a0311e5243d69b73ed073e7ea441982e Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期三, 27 三月 2024 16:05:29 +0800
Subject: [PATCH] train update
---
funasr/datasets/dataloader_entry.py | 35 +++++++++++++++++++++++++++++++++--
1 files changed, 33 insertions(+), 2 deletions(-)
diff --git a/funasr/datasets/dataloader_entry.py b/funasr/datasets/dataloader_entry.py
index 21e3834..abb2828 100644
--- a/funasr/datasets/dataloader_entry.py
+++ b/funasr/datasets/dataloader_entry.py
@@ -4,7 +4,7 @@
from funasr.register import tables
-@tables.register("dataloader_classes", "DataloaderMapStyle")
+# @tables.register("dataloader_classes", "DataloaderMapStyle")
def DataloaderMapStyle(frontend=None, tokenizer=None, **kwargs):
# dataset
logging.info("Build dataloader")
@@ -13,7 +13,7 @@
dataset_val = dataset_class(kwargs.get("valid_data_set_list"), frontend=frontend, tokenizer=tokenizer, is_training=False, **kwargs.get("dataset_conf"))
# dataloader
- batch_sampler = kwargs["dataset_conf"].get("batch_sampler", "DynamicBatchLocalShuffleSampler")
+ batch_sampler = kwargs["dataset_conf"].get("batch_sampler", "BatchSampler")
batch_sampler_val = None
if batch_sampler is not None:
batch_sampler_class = tables.batch_sampler_classes.get(batch_sampler)
@@ -25,6 +25,37 @@
return dataloader_tr, dataloader_val
+@tables.register("dataloader_classes", "DataloaderMapStyle")
+class DataloaderMapStyle:
+ def __init__(self, frontend=None, tokenizer=None, **kwargs):
+ # dataset
+ logging.info("Build dataloader")
+ dataset_class = tables.dataset_classes.get(kwargs.get("dataset", "AudioDataset"))
+ dataset_tr = dataset_class(kwargs.get("train_data_set_list"), frontend=frontend, tokenizer=tokenizer,
+ is_training=True, **kwargs.get("dataset_conf"))
+ dataset_val = dataset_class(kwargs.get("valid_data_set_list"), frontend=frontend, tokenizer=tokenizer,
+ is_training=False, **kwargs.get("dataset_conf"))
+
+ self.dataset_tr = dataset_tr
+ self.dataset_val = dataset_val
+ self.kwargs = kwargs
+
+ def build_iter(self, epoch=0):
+ # dataloader
+ batch_sampler = self.kwargs["dataset_conf"].get("batch_sampler", "BatchSampler")
+ batch_sampler_val = None
+ if batch_sampler is not None:
+ batch_sampler_class = tables.batch_sampler_classes.get(batch_sampler)
+ batch_sampler = batch_sampler_class(self.dataset_tr, **self.kwargs.get("dataset_conf"))
+ batch_sampler_val = batch_sampler_class(self.dataset_val, is_training=False, **self.kwargs.get("dataset_conf"))
+
+ batch_sampler["batch_sampler"].set_epoch(epoch)
+ batch_sampler_val["batch_sampler"].set_epoch(epoch)
+ dataloader_tr = torch.utils.data.DataLoader(self.dataset_tr, collate_fn=self.dataset_tr.collator, **batch_sampler)
+ dataloader_val = torch.utils.data.DataLoader(self.dataset_val, collate_fn=self.dataset_val.collator, **batch_sampler_val)
+
+ return dataloader_tr, dataloader_val
+
@tables.register("dataloader_classes", "DataloaderIterable")
def DataloaderIterable(frontend=None, tokenizer=None, **kwargs):
--
Gitblit v1.9.1