| | |
| | | import torch.nn |
| | | import torch.optim |
| | | import pdb |
| | | import copy |
| | | |
| | | |
| | | def load_pretrained_model( |
| | |
| | | logging.info(f"ckpt: {path}") |
| | | |
| | | if oss_bucket is None: |
| | | src_state = torch.load(path, map_location=map_location) |
| | | ori_state = torch.load(path, map_location=map_location) |
| | | else: |
| | | buffer = BytesIO(oss_bucket.get_object(path).read()) |
| | | src_state = torch.load(buffer, map_location=map_location) |
| | | ori_state = torch.load(buffer, map_location=map_location) |
| | | |
| | | src_state = copy.deepcopy(ori_state) |
| | | src_state = src_state["state_dict"] if "state_dict" in src_state else src_state |
| | | src_state = src_state["model_state_dict"] if "model_state_dict" in src_state else src_state |
| | | src_state = src_state["model"] if "model" in src_state else src_state |
| | |
| | | if excludes is not None: |
| | | if isinstance(excludes, str): |
| | | excludes = excludes.split(",") |
| | | |
| | | logging.info(f"excludes: {excludes}") |
| | | |
| | | for k in dst_state.keys(): |
| | | |
| | | for k_ex in excludes: |
| | | if k.startswith(k_ex): |
| | | logging.info(f"key: {{k}} matching: {k_ex}, excluded") |
| | | continue |
| | | excludes_flag = False |
| | | if excludes is not None: |
| | | for k_ex in excludes: |
| | | if k.startswith(k_ex): |
| | | logging.info(f"key: {k} matching: {k_ex}, excluded") |
| | | excludes_flag = True |
| | | break |
| | | if excludes_flag: |
| | | continue |
| | | |
| | | k_src = k |
| | | |
| | |
| | | ) |
| | | else: |
| | | dst_state[k] = src_state[k_src] |
| | | |
| | | else: |
| | | logging.info(f"Warning, miss key in ckpt: {k}, mapped: {k_src}") |
| | | print(f"Warning, miss key in ckpt: {k}, {path}") |
| | | |
| | | flag = obj.load_state_dict(dst_state, strict=True) |
| | | logging.info(f"Loading ckpt: {path}, status: {flag}") |