ds
游雁
2024-05-20 a19ee46d2c9d4a98951cbc9f592ff459d75b9f69
ds
2个文件已修改
13 ■■■■ 已修改文件
funasr/train_utils/average_nbest_models.py 3 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
funasr/train_utils/trainer_ds.py 10 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
funasr/train_utils/average_nbest_models.py
@@ -62,7 +62,8 @@
    # Check if we have any state_dicts to average
    if len(state_dicts) < 1:
        raise RuntimeError("No checkpoints found for averaging.")
        print("No checkpoints found for averaging.")
        return
    # Average or sum weights
    avg_state_dict = OrderedDict()
funasr/train_utils/trainer_ds.py
@@ -168,8 +168,7 @@
        """
        step_in_epoch = None if step is None else step_in_epoch
        if self.use_deepspeed:
            with torch.no_grad():
                model.save_checkpoint(save_dir=model_dir, tag=tag, client_state=info_dict)
            logging.info(f"Save checkpoint: {epoch}, rank: {self.local_rank}\n")
            # self.step_or_epoch += 1
            state = {
@@ -273,8 +272,7 @@
        elif self.use_fsdp:
            pass
        step_in_epoch = None if step is None else step_in_epoch
        if self.rank == 0:
        elif self.rank == 0:
            logging.info(f"Save checkpoint: {epoch}, rank: {self.local_rank}\n")
            # self.step_or_epoch += 1
            state = {
@@ -385,7 +383,7 @@
            if self.use_deepspeed:
                ckpt = os.path.join(self.output_dir, "model.pt")
                if os.path.isfile(ckpt):
                if os.path.exists(ckpt):
                    _, checkpoint = model_engine.load_checkpoint(self.output_dir, "model.pt")
                    self.saved_ckpts = checkpoint["saved_ckpts"]
@@ -712,7 +710,7 @@
                    "data_split_num": kwargs.get("data_split_num", 1),
                    "log_step": batch_idx + kwargs.get("start_step", 0),
                    "batch_total": batch_idx,
                    "step_in_epoch": step_in_epoch,
                    "step_in_epoch": batch_idx,
                    "lr": 0.0,
                }