From b9d1425028e480aa2c8dbd3502207e443dcd2060 Mon Sep 17 00:00:00 2001
From: 嘉渊 <wangjiaming.wjm@alibaba-inc.com>
Date: 星期二, 25 四月 2023 01:09:03 +0800
Subject: [PATCH] update

---
 funasr/bin/train.py |   51 +++++++++++++++++++++++++++++++++++++++++++++++----
 1 files changed, 47 insertions(+), 4 deletions(-)

diff --git a/funasr/bin/train.py b/funasr/bin/train.py
index 9c8f672..d3ebaac 100755
--- a/funasr/bin/train.py
+++ b/funasr/bin/train.py
@@ -21,7 +21,9 @@
 from funasr.torch_utils.set_all_random_seed import set_all_random_seed
 from funasr.utils.nested_dict_action import NestedDictAction
 from funasr.utils.prepare_data import prepare_data
+from funasr.utils.types import int_or_none
 from funasr.utils.types import str2bool
+from funasr.utils.types import str2triple_str
 from funasr.utils.types import str_or_none
 from funasr.utils.yaml_no_alias_safe_dump import yaml_no_alias_safe_dump
 
@@ -58,18 +60,51 @@
     )
     parser.add_argument(
         "--dist_world_size",
-        default=None,
+        type=int,
+        default=1,
         help="number of nodes for distributed training",
     )
     parser.add_argument(
         "--dist_rank",
+        type=int,
         default=None,
         help="node rank for distributed training",
     )
     parser.add_argument(
         "--local_rank",
+        type=int,
         default=None,
         help="local rank for distributed training",
+    )
+    parser.add_argument(
+        "--dist_master_addr",
+        default=None,
+        type=str_or_none,
+        help="The master address for distributed training. "
+             "This value is used when dist_init_method == 'env://'",
+    )
+    parser.add_argument(
+        "--dist_master_port",
+        default=None,
+        type=int_or_none,
+        help="The master port for distributed training"
+             "This value is used when dist_init_method == 'env://'",
+    )
+    parser.add_argument(
+        "--dist_launcher",
+        default=None,
+        type=str_or_none,
+        choices=["slurm", "mpi", None],
+        help="The launcher type for distributed training",
+    )
+    parser.add_argument(
+        "--multiprocessing_distributed",
+        default=True,
+        type=str2bool,
+        help="Use multi-processing distributed training to launch "
+             "N processes per node, which has N GPUs. This is the "
+             "fastest way to use PyTorch for either single node or "
+             "multi node data parallel training",
     )
     parser.add_argument(
         "--unused_parameters",
@@ -263,6 +298,12 @@
         help="whether to use dataloader for large dataset",
     )
     parser.add_argument(
+        "--dataset_conf",
+        action=NestedDictAction,
+        default=dict(),
+        help=f"The keyword arguments for dataset",
+    )
+    parser.add_argument(
         "--train_data_file",
         type=str,
         default=None,
@@ -276,18 +317,21 @@
     )
     parser.add_argument(
         "--train_data_path_and_name_and_type",
+        type=str2triple_str,
         action="append",
         default=[],
         help="e.g. '--train_data_path_and_name_and_type some/path/a.scp,foo,sound'. ",
     )
     parser.add_argument(
         "--valid_data_path_and_name_and_type",
+        type=str2triple_str,
         action="append",
         default=[],
     )
     parser.add_argument(
         "--train_shape_file",
-        type=str, action="append",
+        type=str,
+        action="append",
         default=[],
     )
     parser.add_argument(
@@ -434,7 +478,6 @@
     args, extra_task_params = parser.parse_known_args()
     if extra_task_params:
         args = build_args(args, parser, extra_task_params)
-        # args = argparse.Namespace(**vars(args), **vars(task_args))
 
     # set random seed
     set_all_random_seed(args.seed)
@@ -444,7 +487,7 @@
 
     # ddp init
     os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu_id)
-    args.distributed = args.dist_world_size > 1
+    args.distributed = args.ngpu > 1 or args.dist_world_size > 1
     distributed_option = build_distributed(args)
 
     # for logging

--
Gitblit v1.9.1