From 873cfae5c347b940e38e853d8579a6b4e85ada05 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期日, 24 三月 2024 00:45:45 +0800
Subject: [PATCH] update
---
funasr/train_utils/trainer.py | 7 +++++++
funasr/bin/train.py | 12 ++++++------
funasr/datasets/audio_datasets/samplers.py | 2 +-
funasr/datasets/dataloader_entry.py | 31 +++++++++++++++++++++++++++++++
4 files changed, 45 insertions(+), 7 deletions(-)
diff --git a/funasr/bin/train.py b/funasr/bin/train.py
index 1db37dd..0ff4ba1 100644
--- a/funasr/bin/train.py
+++ b/funasr/bin/train.py
@@ -149,8 +149,8 @@
# dataset
logging.info("Build dataloader")
dataloader_class = tables.dataloader_classes.get(kwargs["dataset_conf"].get("dataloader", "DataloaderMapStyle"))
+ # dataloader = dataloader_class(**kwargs)
dataloader_tr, dataloader_val = dataloader_class(**kwargs)
-
trainer = Trainer(local_rank=local_rank,
use_ddp=use_ddp,
use_fsdp=use_fsdp,
@@ -172,15 +172,15 @@
except:
writer = None
- if use_ddp or use_fsdp:
- context = Join([model])
- else:
- context = nullcontext()
+ # if use_ddp or use_fsdp:
+ # context = Join([model])
+ # else:
+ context = nullcontext()
for epoch in range(trainer.start_epoch, trainer.max_epoch + 1):
time1 = time.perf_counter()
with context:
-
+ # dataloader_tr, dataloader_val = dataloader.build_iter(epoch)
trainer.train_epoch(
model=model,
optim=optim,
diff --git a/funasr/datasets/audio_datasets/samplers.py b/funasr/datasets/audio_datasets/samplers.py
index a0ff4b6..4d78d52 100644
--- a/funasr/datasets/audio_datasets/samplers.py
+++ b/funasr/datasets/audio_datasets/samplers.py
@@ -212,7 +212,7 @@
def set_epoch(self, epoch):
self.epoch = epoch
-class CustomDistributedDynamicBatchSampler(Sampler):
+class CustomDistributedDynamicBatchSampler(DistributedSampler):
def __init__(self, dataset,
batch_size,
num_replicas=None,
diff --git a/funasr/datasets/dataloader_entry.py b/funasr/datasets/dataloader_entry.py
index a1e4da2..0de7e40 100644
--- a/funasr/datasets/dataloader_entry.py
+++ b/funasr/datasets/dataloader_entry.py
@@ -25,6 +25,37 @@
return dataloader_tr, dataloader_val
+# @tables.register("dataloader_classes", "DataloaderMapStyle")
+class DataloaderMapStyle:
+ def __init__(self, frontend=None, tokenizer=None, **kwargs):
+ # dataset
+ logging.info("Build dataloader")
+ dataset_class = tables.dataset_classes.get(kwargs.get("dataset", "AudioDataset"))
+ dataset_tr = dataset_class(kwargs.get("train_data_set_list"), frontend=frontend, tokenizer=tokenizer,
+ is_training=True, **kwargs.get("dataset_conf"))
+ dataset_val = dataset_class(kwargs.get("valid_data_set_list"), frontend=frontend, tokenizer=tokenizer,
+ is_training=False, **kwargs.get("dataset_conf"))
+
+ self.dataset_tr = dataset_tr
+ self.dataset_val = dataset_val
+ self.kwargs = kwargs
+
+ def build_iter(self, epoch=0):
+ # dataloader
+ batch_sampler = self.kwargs["dataset_conf"].get("batch_sampler", "BatchSampler")
+ batch_sampler_val = None
+ if batch_sampler is not None:
+ batch_sampler_class = tables.batch_sampler_classes.get(batch_sampler)
+ batch_sampler = batch_sampler_class(self.dataset_tr, **self.kwargs.get("dataset_conf"))
+ batch_sampler_val = batch_sampler_class(self.dataset_val, is_training=False, **self.kwargs.get("dataset_conf"))
+
+ batch_sampler["batch_sampler"].set_epoch(epoch)
+ batch_sampler_val.set_epoch(epohc)
+ dataloader_tr = torch.utils.data.DataLoader(self.dataset_tr, collate_fn=self.dataset_tr.collator, **batch_sampler)
+ dataloader_val = torch.utils.data.DataLoader(self.dataset_val, collate_fn=self.dataset_val.collator, **batch_sampler_val)
+
+ return dataloader_tr, dataloader_val
+
@tables.register("dataloader_classes", "DataloaderIterable")
def DataloaderIterable(frontend=None, tokenizer=None, **kwargs):
diff --git a/funasr/train_utils/trainer.py b/funasr/train_utils/trainer.py
index 77eee60..d0023fd 100644
--- a/funasr/train_utils/trainer.py
+++ b/funasr/train_utils/trainer.py
@@ -249,6 +249,9 @@
speed_stats = {}
time5 = time.perf_counter()
iterator_stop = torch.tensor(0).to(self.device)
+ dist.barrier()
+ print(f"before iter, iterator_stop: {iterator_stop}\n")
+ dataloader_train.batch_sampler.set_epoch(epoch)
for batch_idx, batch in enumerate(dataloader_train):
if self.use_ddp or self.use_fsdp:
dist.all_reduce(iterator_stop, dist.ReduceOp.SUM)
@@ -392,9 +395,13 @@
speed_stats = {}
time5 = time.perf_counter()
iterator_stop = torch.tensor(0).to(self.device)
+ dist.barrier()
+ print(f"before iter, iterator_stop: {iterator_stop}\n")
for batch_idx, batch in enumerate(dataloader_val):
if self.use_ddp or self.use_fsdp:
dist.all_reduce(iterator_stop, dist.ReduceOp.SUM)
+ if epoch >= 1:
+ print(f"iterator_stop: {iterator_stop}\n")
if iterator_stop > 0:
break
time1 = time.perf_counter()
--
Gitblit v1.9.1