From 54931dd4e1a099d7d6f144c4e12e5453deb3aa26 Mon Sep 17 00:00:00 2001
From: 雾聪 <wucong.lyb@alibaba-inc.com>
Date: 星期三, 28 六月 2023 10:41:57 +0800
Subject: [PATCH] Merge branch 'main' of https://github.com/alibaba-damo-academy/FunASR into main
---
funasr/bin/train.py | 39 ++++++++++++++++++++++++++++++++++++---
1 files changed, 36 insertions(+), 3 deletions(-)
diff --git a/funasr/bin/train.py b/funasr/bin/train.py
index 56a9269..1dc3fb5 100755
--- a/funasr/bin/train.py
+++ b/funasr/bin/train.py
@@ -1,4 +1,6 @@
#!/usr/bin/env python3
+# Copyright FunASR (https://github.com/alibaba-damo-academy/FunASR). All Rights Reserved.
+# MIT License (https://opensource.org/licenses/MIT)
import argparse
import logging
@@ -16,6 +18,7 @@
from funasr.build_utils.build_scheduler import build_scheduler
from funasr.build_utils.build_trainer import build_trainer
from funasr.text.phoneme_tokenizer import g2p_choices
+from funasr.torch_utils.load_pretrained_model import load_pretrained_model
from funasr.torch_utils.model_summary import model_summary
from funasr.torch_utils.pytorch_version import pytorch_cudnn_version
from funasr.torch_utils.set_all_random_seed import set_all_random_seed
@@ -271,8 +274,8 @@
parser.add_argument(
"--init_param",
type=str,
+ action="append",
default=[],
- nargs="*",
help="Specify the file path used for initialization of parameters. "
"The format is '<file_path>:<src_key>:<dst_key>:<exclude_keys>', "
"where file_path is the model file path, "
@@ -298,7 +301,7 @@
"--freeze_param",
type=str,
default=[],
- nargs="*",
+ action="append",
help="Freeze parameters",
)
@@ -333,7 +336,19 @@
default="validation",
help="dev dataset",
)
-
+ parser.add_argument(
+ "--data_file_names",
+ type=str,
+ default="wav.scp,text",
+ help="input data files",
+ )
+ parser.add_argument(
+ "--speed_perturb",
+ type=float,
+ nargs="+",
+ default=None,
+ help="speed perturb",
+ )
parser.add_argument(
"--use_preprocessor",
type=str2bool,
@@ -506,6 +521,12 @@
dtype=getattr(torch, args.train_dtype),
device="cuda" if args.ngpu > 0 else "cpu",
)
+ for t in args.freeze_param:
+ for k, p in model.named_parameters():
+ if k.startswith(t + ".") or k == t:
+ logging.info(f"Setting {k}.requires_grad = False")
+ p.requires_grad = False
+
optimizers = build_optimizer(args, model=model)
schedulers = build_scheduler(args, optimizers)
@@ -530,6 +551,18 @@
else:
yaml_no_alias_safe_dump(vars(args), f, indent=4, sort_keys=False)
+ for p in args.init_param:
+ logging.info(f"Loading pretrained params from {p}")
+ load_pretrained_model(
+ model=model,
+ init_param=p,
+ ignore_init_mismatch=args.ignore_init_mismatch,
+ map_location=f"cuda:{torch.cuda.current_device()}"
+ if args.ngpu > 0
+ else "cpu",
+ oss_bucket=args.oss_bucket,
+ )
+
# dataloader for training/validation
train_dataloader, valid_dataloader = build_dataloader(args)
--
Gitblit v1.9.1