From b349739f5d6302048c179eeaadb4432acc541cd5 Mon Sep 17 00:00:00 2001
From: 语帆 <yf352572@alibaba-inc.com>
Date: 星期四, 22 二月 2024 17:27:02 +0800
Subject: [PATCH] test
---
funasr/auto/auto_model.py | 4 +++-
funasr/train_utils/load_pretrained_model.py | 8 +++++---
2 files changed, 8 insertions(+), 4 deletions(-)
diff --git a/funasr/auto/auto_model.py b/funasr/auto/auto_model.py
index a5341ea..23b80d7 100644
--- a/funasr/auto/auto_model.py
+++ b/funasr/auto/auto_model.py
@@ -172,12 +172,14 @@
# build model
model_class = tables.model_classes.get(kwargs["model"])
+ pdb.set_trace()
model = model_class(**kwargs, **kwargs["model_conf"], vocab_size=vocab_size)
-
+ pdb.set_trace()
model.to(device)
# init_param
init_param = kwargs.get("init_param", None)
+ pdb.set_trace()
if init_param is not None:
logging.info(f"Loading pretrained params from {init_param}")
load_pretrained_model(
diff --git a/funasr/train_utils/load_pretrained_model.py b/funasr/train_utils/load_pretrained_model.py
index 5ba9bb7..aec31e3 100644
--- a/funasr/train_utils/load_pretrained_model.py
+++ b/funasr/train_utils/load_pretrained_model.py
@@ -7,7 +7,7 @@
import torch
import torch.nn
import torch.optim
-
+import pdb
def filter_state_dict(
dst_state: Dict[str, Union[float, torch.Tensor]],
@@ -99,14 +99,16 @@
# import pdb;
# pdb.set_trace()
print(f"ckpt: {path}")
+ pdb.set_trace()
if oss_bucket is None:
src_state = torch.load(path, map_location=map_location)
else:
buffer = BytesIO(oss_bucket.get_object(path).read())
src_state = torch.load(buffer, map_location=map_location)
+ pdb.set_trace()
if "state_dict" in src_state:
src_state = src_state["state_dict"]
-
+ pdb.set_trace()
for k in dst_state.keys():
if not k.startswith("module.") and "module." + k in src_state.keys():
k_ddp = "module." + k
@@ -116,7 +118,7 @@
dst_state[k] = src_state[k_ddp]
else:
print(f"Miss key in ckpt: model: {k}, ckpt: {k_ddp}")
-
+ pdb.set_trace()
flag = obj.load_state_dict(dst_state, strict=True)
# print(flag)
--
Gitblit v1.9.1