From 520bbc3d5cd9e8039b3287a5a5eea28d2976f26f Mon Sep 17 00:00:00 2001
From: 嘉渊 <wangjiaming.wjm@alibaba-inc.com>
Date: 星期三, 14 六月 2023 19:32:49 +0800
Subject: [PATCH] update repo
---
funasr/bin/train.py | 20 +++++++++++++-------
1 files changed, 13 insertions(+), 7 deletions(-)
diff --git a/funasr/bin/train.py b/funasr/bin/train.py
index 53e5bde..21e1943 100755
--- a/funasr/bin/train.py
+++ b/funasr/bin/train.py
@@ -272,8 +272,8 @@
parser.add_argument(
"--init_param",
type=str,
+ action="append",
default=[],
- nargs="*",
help="Specify the file path used for initialization of parameters. "
"The format is '<file_path>:<src_key>:<dst_key>:<exclude_keys>', "
"where file_path is the model file path, "
@@ -335,6 +335,12 @@
help="dev dataset",
)
parser.add_argument(
+ "--data_file_names",
+ type=str,
+ default="wav.scp,text",
+ help="input data files",
+ )
+ parser.add_argument(
"--speed_perturb",
type=float,
nargs="+",
@@ -346,12 +352,6 @@
type=str2bool,
default=True,
help="Apply preprocessing to data or not",
- )
- parser.add_argument(
- "--embed_path",
- type=str,
- default=None,
- help="for model which requires embeds",
)
# optimization related
@@ -519,6 +519,12 @@
dtype=getattr(torch, args.train_dtype),
device="cuda" if args.ngpu > 0 else "cpu",
)
+ for t in args.freeze_param:
+ for k, p in model.named_parameters():
+ if k.startswith(t + ".") or k == t:
+ logging.info(f"Setting {k}.requires_grad = False")
+ p.requires_grad = False
+
optimizers = build_optimizer(args, model=model)
schedulers = build_scheduler(args, optimizers)
--
Gitblit v1.9.1