From 497edf4c9d6c1565a4bcf1a3edfcd47ffec8c10d Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期三, 21 二月 2024 11:30:59 +0800
Subject: [PATCH] bugfix
---
funasr/train_utils/load_pretrained_model.py | 79 +++++++++++++++++++++++++++++++--------
1 files changed, 62 insertions(+), 17 deletions(-)
diff --git a/funasr/train_utils/load_pretrained_model.py b/funasr/train_utils/load_pretrained_model.py
index ceab4ee..ff96ebb 100644
--- a/funasr/train_utils/load_pretrained_model.py
+++ b/funasr/train_utils/load_pretrained_model.py
@@ -75,6 +75,7 @@
return assignment_map
+
def load_pretrained_model(
path: str,
model: torch.nn.Module,
@@ -94,25 +95,69 @@
"""
obj = model
-
+ dst_state = obj.state_dict()
+ # import pdb;
+ # pdb.set_trace()
+ print(f"ckpt: {path}")
if oss_bucket is None:
src_state = torch.load(path, map_location=map_location)
else:
buffer = BytesIO(oss_bucket.get_object(path).read())
src_state = torch.load(buffer, map_location=map_location)
- src_state = src_state["model"] if "model" in src_state else src_state
-
- if excludes is not None:
- for e in excludes.split(","):
- src_state = {k: v for k, v in src_state.items() if not k.startswith(e)}
-
- dst_state = obj.state_dict()
- src_state = assigment_scope_map(dst_state, src_state, scope_map)
-
- if ignore_init_mismatch:
- src_state = filter_state_dict(dst_state, src_state)
-
- logging.debug("Loaded src_state keys: {}".format(src_state.keys()))
- logging.debug("Loaded dst_state keys: {}".format(dst_state.keys()))
- dst_state.update(src_state)
- obj.load_state_dict(dst_state, strict=True)
+ if "state_dict" in src_state:
+ src_state = src_state["state_dict"]
+
+ for k in dst_state.keys():
+ if not k.startswith("module.") and "module." + k in src_state.keys():
+ k_ddp = "module." + k
+ else:
+ k_ddp = k
+ if k_ddp in src_state:
+ dst_state[k] = src_state[k_ddp]
+ else:
+ print(f"Miss key in ckpt: model: {k}, ckpt: {k_ddp}")
+
+ flag = obj.load_state_dict(dst_state, strict=True)
+ print(flag)
+
+# def load_pretrained_model(
+# path: str,
+# model: torch.nn.Module,
+# ignore_init_mismatch: bool,
+# map_location: str = "cpu",
+# oss_bucket=None,
+# scope_map=None,
+# excludes=None,
+# ):
+# """Load a model state and set it to the model.
+#
+# Args:
+# init_param: <file_path>:<src_key>:<dst_key>:<exclude_Keys>
+#
+# Examples:
+#
+# """
+#
+# obj = model
+#
+# if oss_bucket is None:
+# src_state = torch.load(path, map_location=map_location)
+# else:
+# buffer = BytesIO(oss_bucket.get_object(path).read())
+# src_state = torch.load(buffer, map_location=map_location)
+# src_state = src_state["model"] if "model" in src_state else src_state
+#
+# if excludes is not None:
+# for e in excludes.split(","):
+# src_state = {k: v for k, v in src_state.items() if not k.startswith(e)}
+#
+# dst_state = obj.state_dict()
+# src_state = assigment_scope_map(dst_state, src_state, scope_map)
+#
+# if ignore_init_mismatch:
+# src_state = filter_state_dict(dst_state, src_state)
+#
+# logging.debug("Loaded src_state keys: {}".format(src_state.keys()))
+# logging.debug("Loaded dst_state keys: {}".format(dst_state.keys()))
+# dst_state.update(src_state)
+# obj.load_state_dict(dst_state, strict=True)
--
Gitblit v1.9.1