From 420849cfb3113c9e7475e5e72b3f8d7912fffe08 Mon Sep 17 00:00:00 2001
From: 嘉渊 <wangjiaming.wjm@alibaba-inc.com>
Date: 星期一, 24 四月 2023 19:42:51 +0800
Subject: [PATCH] update
---
funasr/bin/train.py | 25 +++++++++++++++++++++----
1 files changed, 21 insertions(+), 4 deletions(-)
diff --git a/funasr/bin/train.py b/funasr/bin/train.py
index f056b08..26e0e6a 100755
--- a/funasr/bin/train.py
+++ b/funasr/bin/train.py
@@ -19,7 +19,6 @@
from funasr.torch_utils.model_summary import model_summary
from funasr.torch_utils.pytorch_version import pytorch_cudnn_version
from funasr.torch_utils.set_all_random_seed import set_all_random_seed
-from funasr.utils import config_argparse
from funasr.utils.prepare_data import prepare_data
from funasr.utils.types import str2bool
from funasr.utils.types import str_or_none
@@ -27,7 +26,7 @@
def get_parser():
- parser = config_argparse.ArgumentParser(
+ parser = argparse.ArgumentParser(
description="FunASR Common Training Parser",
)
@@ -77,6 +76,12 @@
default=False,
help="Whether to use the find_unused_parameters in "
"torch.nn.parallel.DistributedDataParallel ",
+ )
+ parser.add_argument(
+ "--gpu_id",
+ type=int,
+ default=0,
+ help="local gpu id.",
)
# cudnn related
@@ -280,6 +285,17 @@
default=[],
)
parser.add_argument(
+ "--train_shape_file",
+ type=str, action="append",
+ default=[],
+ )
+ parser.add_argument(
+ "--valid_shape_file",
+ type=str,
+ action="append",
+ default=[],
+ )
+ parser.add_argument(
"--use_preprocessor",
type=str2bool,
default=True,
@@ -390,8 +406,8 @@
parser = get_parser()
args, extra_task_params = parser.parse_known_args()
if extra_task_params:
- task_args = build_args(args, extra_task_params)
- args = argparse.Namespace(**vars(args), **vars(task_args))
+ args = build_args(args, parser, extra_task_params)
+ # args = argparse.Namespace(**vars(args), **vars(task_args))
# set random seed
set_all_random_seed(args.seed)
@@ -400,6 +416,7 @@
torch.backends.cudnn.deterministic = args.cudnn_deterministic
# ddp init
+ os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu_id)
args.distributed = args.dist_world_size > 1
distributed_option = build_distributed(args)
--
Gitblit v1.9.1