From d80ac2fd2df4e7fb8a28acfa512bb11472b5cc99 Mon Sep 17 00:00:00 2001
From: liugz18 <57401541+liugz18@users.noreply.github.com>
Date: 星期四, 18 七月 2024 21:34:55 +0800
Subject: [PATCH] Rename 'res' in line 514 to avoid with naming conflict with line 365

---
 funasr/train_utils/load_pretrained_model.py |  159 ++++++++++++++++++++++------------------------------
 1 files changed, 67 insertions(+), 92 deletions(-)

diff --git a/funasr/train_utils/load_pretrained_model.py b/funasr/train_utils/load_pretrained_model.py
index a6596a0..8ed613c 100644
--- a/funasr/train_utils/load_pretrained_model.py
+++ b/funasr/train_utils/load_pretrained_model.py
@@ -7,121 +7,96 @@
 import torch
 import torch.nn
 import torch.optim
-
-
-def filter_state_dict(
-    dst_state: Dict[str, Union[float, torch.Tensor]],
-    src_state: Dict[str, Union[float, torch.Tensor]],
-):
-    """Filter name, size mismatch instances between dicts.
-
-    Args:
-        dst_state: reference state dict for filtering
-        src_state: target state dict for filtering
-
-    """
-    match_state = {}
-    for key, value in src_state.items():
-        if key in dst_state and (dst_state[key].size() == src_state[key].size()):
-            match_state[key] = value
-        else:
-            if key not in dst_state:
-                logging.warning(
-                    f"Filter out {key} from pretrained dict"
-                    + " because of name not found in target dict"
-                )
-            else:
-                logging.warning(
-                    f"Filter out {key} from pretrained dict"
-                    + " because of size mismatch"
-                    + f"({dst_state[key].size()}-{src_state[key].size()})"
-                )
-    return match_state
+import pdb
 
 
 def load_pretrained_model(
-    init_param: str,
+    path: str,
     model: torch.nn.Module,
-    ignore_init_mismatch: bool,
+    ignore_init_mismatch: bool = True,
     map_location: str = "cpu",
     oss_bucket=None,
+    scope_map=[],
+    excludes=None,
+    **kwargs,
 ):
     """Load a model state and set it to the model.
 
     Args:
-        init_param: <file_path>:<src_key>:<dst_key>:<exclude_Keys>
+            init_param: <file_path>:<src_key>:<dst_key>:<exclude_Keys>
 
     Examples:
-        >>> load_pretrained_model("somewhere/model.pb", model)
-        >>> load_pretrained_model("somewhere/model.pb:decoder:decoder", model)
-        >>> load_pretrained_model("somewhere/model.pb:decoder:decoder:", model)
-        >>> load_pretrained_model(
-        ...     "somewhere/model.pb:decoder:decoder:decoder.embed", model
-        ... )
-        >>> load_pretrained_model("somewhere/decoder.pb::decoder", model)
+
     """
-    sps = init_param.split(":", 4)
-    if len(sps) == 4:
-        path, src_key, dst_key, excludes = sps
-    elif len(sps) == 3:
-        path, src_key, dst_key = sps
-        excludes = None
-    elif len(sps) == 2:
-        path, src_key = sps
-        dst_key, excludes = None, None
-    else:
-        (path,) = sps
-        src_key, dst_key, excludes = None, None, None
-    if src_key == "":
-        src_key = None
-    if dst_key == "":
-        dst_key = None
 
-    if dst_key is None:
-        obj = model
-    else:
+    obj = model
+    dst_state = obj.state_dict()
 
-        def get_attr(obj: Any, key: str):
-            """Get an nested attribute.
-
-            >>> class A(torch.nn.Module):
-            ...     def __init__(self):
-            ...         super().__init__()
-            ...         self.linear = torch.nn.Linear(10, 10)
-            >>> a = A()
-            >>> assert A.linear.weight is get_attr(A, 'linear.weight')
-
-            """
-            if key.strip() == "":
-                return obj
-            for k in key.split("."):
-                obj = getattr(obj, k)
-            return obj
-
-        obj = get_attr(model, dst_key)
+    logging.info(f"ckpt: {path}")
 
     if oss_bucket is None:
         src_state = torch.load(path, map_location=map_location)
     else:
         buffer = BytesIO(oss_bucket.get_object(path).read())
         src_state = torch.load(buffer, map_location=map_location)
+
+    src_state = src_state["state_dict"] if "state_dict" in src_state else src_state
+    src_state = src_state["model_state_dict"] if "model_state_dict" in src_state else src_state
     src_state = src_state["model"] if "model" in src_state else src_state
+
+    if isinstance(scope_map, str):
+        scope_map = scope_map.split(",")
+    scope_map += ["module.", "None"]
+    logging.info(f"scope_map: {scope_map}")
+
     if excludes is not None:
-        for e in excludes.split(","):
-            src_state = {k: v for k, v in src_state.items() if not k.startswith(e)}
+        if isinstance(excludes, str):
+            excludes = excludes.split(",")
 
-    if src_key is not None:
-        src_state = {
-            k[len(src_key) + 1 :]: v
-            for k, v in src_state.items()
-            if k.startswith(src_key)
-        }
+    logging.info(f"excludes: {excludes}")
 
-    dst_state = obj.state_dict()
-    if ignore_init_mismatch:
-        src_state = filter_state_dict(dst_state, src_state)
+    for k in dst_state.keys():
+        excludes_flag = False
+        if excludes is not None:
+            for k_ex in excludes:
+                if k.startswith(k_ex):
+                    logging.info(f"key: {k} matching: {k_ex}, excluded")
+                    excludes_flag = True
+                    break
+        if excludes_flag:
+            continue
 
-    logging.debug("Loaded src_state keys: {}".format(src_state.keys()))
-    logging.debug("Loaded dst_state keys: {}".format(dst_state.keys()))
-    dst_state.update(src_state)
-    obj.load_state_dict(dst_state)
+        k_src = k
+
+        if scope_map is not None:
+            src_prefix = ""
+            dst_prefix = ""
+            for i in range(0, len(scope_map), 2):
+                src_prefix = scope_map[i] if scope_map[i].lower() != "none" else ""
+                dst_prefix = scope_map[i + 1] if scope_map[i + 1].lower() != "none" else ""
+
+                if dst_prefix == "" and (src_prefix + k) in src_state.keys():
+                    k_src = src_prefix + k
+                    if not k_src.startswith("module."):
+                        logging.info(f"init param, map: {k} from {k_src} in ckpt")
+                elif (
+                    k.startswith(dst_prefix)
+                    and k.replace(dst_prefix, src_prefix, 1) in src_state.keys()
+                ):
+                    k_src = k.replace(dst_prefix, src_prefix, 1)
+                    if not k_src.startswith("module."):
+                        logging.info(f"init param, map: {k} from {k_src} in ckpt")
+
+        if k_src in src_state.keys():
+            if ignore_init_mismatch and dst_state[k].shape != src_state[k_src].shape:
+                logging.info(
+                    f"ignore_init_mismatch:{ignore_init_mismatch}, dst: {k, dst_state[k].shape}, src: {k_src, src_state[k_src].shape}"
+                )
+            else:
+                dst_state[k] = src_state[k_src]
+
+        else:
+            print(f"Warning, miss key in ckpt: {k}, {path}")
+
+    flag = obj.load_state_dict(dst_state, strict=True)
+    logging.info(f"Loading ckpt: {path}, status: {flag}")

--
Gitblit v1.9.1