From 7498bd7388afdde8d5e6f8a4cb6aeb8be8ac60fa Mon Sep 17 00:00:00 2001
From: shixian.shi <shixian.shi@alibaba-inc.com>
Date: 星期五, 08 三月 2024 11:37:46 +0800
Subject: [PATCH] update code
---
funasr/bin/train.py | 35 ++++++++++++++++++++++-------------
1 files changed, 22 insertions(+), 13 deletions(-)
diff --git a/funasr/bin/train.py b/funasr/bin/train.py
index d916509..3c93371 100644
--- a/funasr/bin/train.py
+++ b/funasr/bin/train.py
@@ -35,7 +35,7 @@
assert "model" in kwargs
if "model_conf" not in kwargs:
- logging.info("download models from model hub: {}".format(kwargs.get("model_hub", "ms")))
+ logging.info("download models from model hub: {}".format(kwargs.get("hub", "ms")))
kwargs = download_model(is_training=kwargs.get("is_training", True), **kwargs)
@@ -44,14 +44,16 @@
def main(**kwargs):
print(kwargs)
+
# set random seed
- tables.print()
set_all_random_seed(kwargs.get("seed", 0))
torch.backends.cudnn.enabled = kwargs.get("cudnn_enabled", torch.backends.cudnn.enabled)
torch.backends.cudnn.benchmark = kwargs.get("cudnn_benchmark", torch.backends.cudnn.benchmark)
torch.backends.cudnn.deterministic = kwargs.get("cudnn_deterministic", True)
local_rank = int(os.environ.get('LOCAL_RANK', 0))
+ if local_rank == 0:
+ tables.print()
# Check if we are using DDP or FSDP
use_ddp = 'WORLD_SIZE' in os.environ and int(os.environ["WORLD_SIZE"]) > 1
use_fsdp = kwargs.get("use_fsdp", None)
@@ -83,7 +85,9 @@
# build model
model_class = tables.model_classes.get(kwargs["model"])
- model = model_class(**kwargs, **kwargs["model_conf"], vocab_size=len(tokenizer.token_list))
+ vocab_size = len(tokenizer.token_list) if hasattr(tokenizer, "token_list") else None
+ vocab_size = len(tokenizer.get_vocab()) if hasattr(tokenizer, "get_vocab") else vocab_size
+ model = model_class(**kwargs, **kwargs["model_conf"], vocab_size=vocab_size)
@@ -94,17 +98,22 @@
init_param = (init_param,)
logging.info("init_param is not None: %s", init_param)
for p in init_param:
- logging.info(f"Loading pretrained params from {p}")
- load_pretrained_model(
- model=model,
- path=p,
- ignore_init_mismatch=kwargs.get("ignore_init_mismatch", True),
- oss_bucket=kwargs.get("oss_bucket", None),
- scope_map=kwargs.get("scope_map", None),
- excludes=kwargs.get("excludes", None),
- )
- else:
+ if os.path.exists(p):
+ logging.info(f"Loading pretrained params from {p}")
+ load_pretrained_model(
+ model=model,
+ path=p,
+ ignore_init_mismatch=kwargs.get("ignore_init_mismatch", True),
+ oss_bucket=kwargs.get("oss_bucket", None),
+ scope_map=kwargs.get("scope_map", []),
+ excludes=kwargs.get("excludes", None),
+ )
+ else:
+ logging.info(f"Checkpoint does not exist, init randomly: {p}")
+ elif kwargs.get("init", None):
initialize(model, kwargs.get("init", "kaiming_normal"))
+ else:
+ print("No initialize method")
# freeze_param
--
Gitblit v1.9.1