From fae856e23d45fd27d5fd55fd036e8e3fc7b24915 Mon Sep 17 00:00:00 2001
From: 雾聪 <wucong.lyb@alibaba-inc.com>
Date: 星期五, 02 六月 2023 23:00:08 +0800
Subject: [PATCH] update funasr-onnx-offline

---
 funasr/bin/train.py |   14 +++++++-------
 1 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/funasr/bin/train.py b/funasr/bin/train.py
index 0e95d77..21e1943 100755
--- a/funasr/bin/train.py
+++ b/funasr/bin/train.py
@@ -272,8 +272,8 @@
     parser.add_argument(
         "--init_param",
         type=str,
+        action="append",
         default=[],
-        nargs="*",
         help="Specify the file path used for initialization of parameters. "
              "The format is '<file_path>:<src_key>:<dst_key>:<exclude_keys>', "
              "where file_path is the model file path, "
@@ -352,12 +352,6 @@
         type=str2bool,
         default=True,
         help="Apply preprocessing to data or not",
-    )
-    parser.add_argument(
-        "--embed_path",
-        type=str,
-        default=None,
-        help="for model which requires embeds",
     )
 
     # optimization related
@@ -525,6 +519,12 @@
         dtype=getattr(torch, args.train_dtype),
         device="cuda" if args.ngpu > 0 else "cpu",
     )
+    for t in args.freeze_param:
+        for k, p in model.named_parameters():
+            if k.startswith(t + ".") or k == t:
+                logging.info(f"Setting {k}.requires_grad = False")
+                p.requires_grad = False
+
     optimizers = build_optimizer(args, model=model)
     schedulers = build_scheduler(args, optimizers)
 

--
Gitblit v1.9.1