From 0063e4356a8e8e7bb984a6d847afbb11d5fbaa4a Mon Sep 17 00:00:00 2001
From: hnluo <haoneng.lhn@alibaba-inc.com>
Date: 星期五, 21 七月 2023 15:07:34 +0800
Subject: [PATCH] Merge pull request #768 from alibaba-damo-academy/dev_lhn

---
 funasr/bin/train.py |   15 +++++++++++++++
 1 files changed, 15 insertions(+), 0 deletions(-)

diff --git a/funasr/bin/train.py b/funasr/bin/train.py
index 1dc3fb5..f5d10c4 100755
--- a/funasr/bin/train.py
+++ b/funasr/bin/train.py
@@ -28,6 +28,7 @@
 from funasr.utils.types import str2bool
 from funasr.utils.types import str_or_none
 from funasr.utils.yaml_no_alias_safe_dump import yaml_no_alias_safe_dump
+from funasr.modules.lora.utils import mark_only_lora_as_trainable
 
 
 def get_parser():
@@ -478,6 +479,18 @@
         default=None,
         help="oss bucket.",
     )
+    parser.add_argument(
+        "--enable_lora",
+        type=str2bool,
+        default=False,
+        help="Apply lora for finetuning.",
+    )
+    parser.add_argument(
+        "--lora_bias",
+        type=str,
+        default="none",
+        help="lora bias.",
+    )
 
     return parser
 
@@ -521,6 +534,8 @@
         dtype=getattr(torch, args.train_dtype),
         device="cuda" if args.ngpu > 0 else "cpu",
     )
+    if args.enable_lora:
+        mark_only_lora_as_trainable(model, args.lora_bias)
     for t in args.freeze_param:
         for k, p in model.named_parameters():
             if k.startswith(t + ".") or k == t:

--
Gitblit v1.9.1