From ed952ff630a1ad89df488d3c657ef736129e6c10 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期日, 24 三月 2024 15:11:02 +0800
Subject: [PATCH] finetune
---
funasr/train_utils/trainer.py | 15 ++++++---------
1 files changed, 6 insertions(+), 9 deletions(-)
diff --git a/funasr/train_utils/trainer.py b/funasr/train_utils/trainer.py
index c665394..2d47fc1 100644
--- a/funasr/train_utils/trainer.py
+++ b/funasr/train_utils/trainer.py
@@ -371,8 +371,7 @@
if self.use_ddp or self.use_fsdp:
dist.barrier()
-
- iterator_stop = torch.tensor(0).to(self.device)
+ iterator_stop = torch.tensor(0).to(self.device)
@@ -402,12 +401,10 @@
iterator_stop = torch.tensor(0).to(self.device)
dataloader_val.batch_sampler.set_epoch(epoch)
for batch_idx, batch in enumerate(dataloader_val):
- # if self.use_ddp or self.use_fsdp:
- # dist.all_reduce(iterator_stop, dist.ReduceOp.SUM)
- # if epoch >= 1:
- # print(f"iterator_stop: {iterator_stop}\n")
- # if iterator_stop > 0:
- # break
+ if self.use_ddp or self.use_fsdp:
+ dist.all_reduce(iterator_stop, dist.ReduceOp.SUM)
+ if iterator_stop > 0:
+ break
time1 = time.perf_counter()
speed_stats["data_load"] = f"{time1 - time5:0.3f}"
batch = to_device(batch, self.device)
@@ -467,7 +464,7 @@
if self.use_ddp or self.use_fsdp:
dist.barrier()
- iterator_stop = torch.tensor(0).to(self.device)
+ iterator_stop = torch.tensor(0).to(self.device)
def log(self,
--
Gitblit v1.9.1