From 8dab6d184a034ca86eafa644ea0d2100aadfe27d Mon Sep 17 00:00:00 2001
From: jmwang66 <wangjiaming.wjm@alibaba-inc.com>
Date: 星期二, 09 五月 2023 10:58:33 +0800
Subject: [PATCH] Merge pull request #473 from alibaba-damo-academy/dev_smohan
---
funasr/train/distributed_utils.py | 4 ++--
1 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/funasr/train/distributed_utils.py b/funasr/train/distributed_utils.py
index c897930..13f5744 100644
--- a/funasr/train/distributed_utils.py
+++ b/funasr/train/distributed_utils.py
@@ -53,7 +53,7 @@
# https://pytorch.org/docs/stable/distributed.html#torch.distributed.init_process_group
os.environ.setdefault("NCCL_BLOCKING_WAIT", "1")
- torch.distributed.init_process_group(backend='nccl',
+ torch.distributed.init_process_group(backend=self.dist_backend,
init_method=self.dist_init_method,
world_size=args.dist_world_size,
rank=args.dist_rank)
@@ -113,7 +113,7 @@
# https://pytorch.org/docs/stable/distributed.html#torch.distributed.init_process_group
os.environ.setdefault("NCCL_BLOCKING_WAIT", "1")
- torch.distributed.init_process_group(backend='nccl', init_method='env://')
+ torch.distributed.init_process_group(backend=self.dist_backend, init_method='env://')
self.dist_rank = torch.distributed.get_rank()
self.dist_world_size = torch.distributed.get_world_size()
self.local_rank = args.local_rank
--
Gitblit v1.9.1