From 5d74e107fc5696b70e75003c278f8babd17161e8 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期日, 24 三月 2024 00:58:56 +0800
Subject: [PATCH] update
---
funasr/train_utils/trainer.py | 9 ++++++++-
1 files changed, 8 insertions(+), 1 deletions(-)
diff --git a/funasr/train_utils/trainer.py b/funasr/train_utils/trainer.py
index 77eee60..23c18d9 100644
--- a/funasr/train_utils/trainer.py
+++ b/funasr/train_utils/trainer.py
@@ -249,6 +249,9 @@
speed_stats = {}
time5 = time.perf_counter()
iterator_stop = torch.tensor(0).to(self.device)
+ dist.barrier()
+ print(f"before iter, iterator_stop: {iterator_stop}\n")
+ dataloader_train.batch_sampler.set_epoch(epoch)
for batch_idx, batch in enumerate(dataloader_train):
if self.use_ddp or self.use_fsdp:
dist.all_reduce(iterator_stop, dist.ReduceOp.SUM)
@@ -392,9 +395,13 @@
speed_stats = {}
time5 = time.perf_counter()
iterator_stop = torch.tensor(0).to(self.device)
+ dist.barrier()
+ print(f"before iter, iterator_stop: {iterator_stop}\n")
for batch_idx, batch in enumerate(dataloader_val):
if self.use_ddp or self.use_fsdp:
dist.all_reduce(iterator_stop, dist.ReduceOp.SUM)
+ if epoch >= 1:
+ print(f"iterator_stop: {iterator_stop}\n")
if iterator_stop > 0:
break
time1 = time.perf_counter()
@@ -410,7 +417,7 @@
# Apply weighted averaging for loss and stats
loss = (loss * weight.type(loss.dtype)).sum()
# if distributed, this method can also apply all_reduce()
- stats, weight = recursive_average(stats, weight, distributed=True)
+ # stats, weight = recursive_average(stats, weight, distributed=True)
if self.use_ddp or self.use_fsdp:
dist.all_reduce(weight, op=dist.ReduceOp.SUM)
# Now weight is summation over all workers
--
Gitblit v1.9.1