From fc246ab820cf57ba08afbe3cbeb4d471036eb83c Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期四, 07 十二月 2023 00:27:00 +0800
Subject: [PATCH] funasr2
---
funasr/cli/train_cli.py | 14 ++++----------
1 files changed, 4 insertions(+), 10 deletions(-)
diff --git a/funasr/cli/train_cli.py b/funasr/cli/train_cli.py
index 7e15875..54cd2e8 100644
--- a/funasr/cli/train_cli.py
+++ b/funasr/cli/train_cli.py
@@ -37,7 +37,7 @@
@hydra.main()
def main(kwargs: DictConfig):
# preprocess_config(kwargs)
- import pdb; pdb.set_trace()
+ # import pdb; pdb.set_trace()
# set random seed
set_all_random_seed(kwargs.get("seed", 0))
torch.backends.cudnn.enabled = kwargs.get("cudnn_enabled", torch.backends.cudnn.enabled)
@@ -46,7 +46,7 @@
local_rank = int(os.environ.get('LOCAL_RANK', 0))
# Check if we are using DDP or FSDP
- use_ddp = 'WORLD_SIZE' in os.environ
+ use_ddp = 'WORLD_SIZE' in os.environ and int(os.environ["WORLD_SIZE"]) > 1
use_fsdp = kwargs.get("use_fsdp", None)
if use_ddp or use_fsdp:
dist.init_process_group(backend=kwargs.get("backend", "nccl"), init_method='env://')
@@ -109,7 +109,8 @@
if use_ddp:
model = model.cuda(local_rank)
- model = DDP(model, device_ids=[local_rank])
+ model = DDP(model, device_ids=[local_rank],
+ find_unused_parameters=kwargs.get("train_conf", {}).get("find_unused_parameters", False))
elif use_fsdp:
model = FSDP(model).cuda(local_rank)
else:
@@ -157,13 +158,6 @@
torch.distributed.destroy_process_group()
-
-def train(epoch, model, op):
- pass
-
-def val():
- pass
-
if __name__ == "__main__":
main()
\ No newline at end of file
--
Gitblit v1.9.1