游雁
2023-10-10 580b11b57ac4b62f7e2acda73813a4e10e8e4cd3
funasr/torch_utils/load_pretrained_model.py
@@ -52,13 +52,13 @@
        init_param: <file_path>:<src_key>:<dst_key>:<exclude_Keys>
    Examples:
        >>> load_pretrained_model("somewhere/model.pth", model)
        >>> load_pretrained_model("somewhere/model.pth:decoder:decoder", model)
        >>> load_pretrained_model("somewhere/model.pth:decoder:decoder:", model)
        >>> load_pretrained_model("somewhere/model.pb", model)
        >>> load_pretrained_model("somewhere/model.pb:decoder:decoder", model)
        >>> load_pretrained_model("somewhere/model.pb:decoder:decoder:", model)
        >>> load_pretrained_model(
        ...     "somewhere/model.pth:decoder:decoder:decoder.embed", model
        ...     "somewhere/model.pb:decoder:decoder:decoder.embed", model
        ... )
        >>> load_pretrained_model("somewhere/decoder.pth::decoder", model)
        >>> load_pretrained_model("somewhere/decoder.pb::decoder", model)
    """
    sps = init_param.split(":", 4)
    if len(sps) == 4:
@@ -120,6 +120,6 @@
    if ignore_init_mismatch:
        src_state = filter_state_dict(dst_state, src_state)
    logging.info("Loaded src_state keys: {}".format(src_state.keys()))
    # logging.info("Loaded src_state keys: {}".format(src_state.keys()))
    dst_state.update(src_state)
    obj.load_state_dict(dst_state)