From 33d3d2084403fd34b79c835d2f2fe04f6cd8f738 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期三, 13 九月 2023 09:33:54 +0800
Subject: [PATCH] Merge branch 'main' of github.com:alibaba-damo-academy/FunASR add
---
funasr/bin/build_trainer.py | 15 +++++++++------
1 files changed, 9 insertions(+), 6 deletions(-)
diff --git a/funasr/bin/build_trainer.py b/funasr/bin/build_trainer.py
index 7979d25..52aa509 100644
--- a/funasr/bin/build_trainer.py
+++ b/funasr/bin/build_trainer.py
@@ -532,11 +532,9 @@
args = build_args(args, parser, extra_task_params)
if args.local_rank is not None:
- args.distributed = True
- args.simple_ddp = True
+ distributed = True
else:
- args.distributed = False
- args.ngpu = 1
+ distributed = False
args.local_rank = args.local_rank if args.local_rank is not None else 0
local_rank = args.local_rank
if "CUDA_VISIBLE_DEVICES" in os.environ.keys():
@@ -593,6 +591,12 @@
args.batch_type = "length"
args.oss_bucket = None
args.input_size = None
+ if distributed:
+ args.distributed = True
+ args.simple_ddp = True
+ else:
+ args.distributed = False
+ args.ngpu = 1
if optim is not None:
args.optim = optim
if lr is not None:
@@ -610,6 +614,7 @@
if batch_bins is not None:
if args.dataset_type == "small":
args.batch_bins = batch_bins
+ args.dataset_conf["batch_conf"]["batch_size"] = batch_bins
elif args.dataset_type == "large":
args.dataset_conf["batch_conf"]["batch_size"] = batch_bins
else:
@@ -627,8 +632,6 @@
torch.backends.cudnn.deterministic = args.cudnn_deterministic
# ddp init
- os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu_id)
- args.distributed = args.ngpu > 1 or args.dist_world_size > 1
distributed_option = build_distributed(args)
# for logging
--
Gitblit v1.9.1