From a3bb4013c39faa1d006dcb4d6d87ec9a6bb3770c Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期二, 27 二月 2024 10:06:22 +0800
Subject: [PATCH] vad
---
funasr/bin/train.py | 29 ++++++++++++++++++-----------
1 files changed, 18 insertions(+), 11 deletions(-)
diff --git a/funasr/bin/train.py b/funasr/bin/train.py
index 0661452..44d84e7 100644
--- a/funasr/bin/train.py
+++ b/funasr/bin/train.py
@@ -85,7 +85,9 @@
# build model
model_class = tables.model_classes.get(kwargs["model"])
- model = model_class(**kwargs, **kwargs["model_conf"], vocab_size=len(tokenizer.token_list))
+ vocab_size = len(tokenizer.token_list) if hasattr(tokenizer, "token_list") else None
+ vocab_size = len(tokenizer.get_vocab()) if hasattr(tokenizer, "get_vocab") else vocab_size
+ model = model_class(**kwargs, **kwargs["model_conf"], vocab_size=vocab_size)
@@ -96,17 +98,22 @@
init_param = (init_param,)
logging.info("init_param is not None: %s", init_param)
for p in init_param:
- logging.info(f"Loading pretrained params from {p}")
- load_pretrained_model(
- model=model,
- path=p,
- ignore_init_mismatch=kwargs.get("ignore_init_mismatch", True),
- oss_bucket=kwargs.get("oss_bucket", None),
- scope_map=kwargs.get("scope_map", None),
- excludes=kwargs.get("excludes", None),
- )
- else:
+ if os.path.exists(p):
+ logging.info(f"Loading pretrained params from {p}")
+ load_pretrained_model(
+ model=model,
+ path=p,
+ ignore_init_mismatch=kwargs.get("ignore_init_mismatch", True),
+ oss_bucket=kwargs.get("oss_bucket", None),
+ scope_map=kwargs.get("scope_map", None),
+ excludes=kwargs.get("excludes", None),
+ )
+ else:
+ logging.info(f"Checkpoint does not exist, init randomly: {p}")
+ elif kwargs.get("init", None):
initialize(model, kwargs.get("init", "kaiming_normal"))
+ else:
+ print("No initialize method")
# freeze_param
--
Gitblit v1.9.1