| | |
| | | from torch.distributed.fsdp import FullyShardedDataParallel as FSDP |
| | | from torch.distributed.algorithms.join import Join |
| | | from torch.distributed.fsdp.sharded_grad_scaler import ShardedGradScaler |
| | | from tensorboardX import SummaryWriter |
| | | from funasr.train_utils.average_nbest_models import average_checkpoints |
| | | |
| | | from funasr.register import tables |
| | |
| | | from funasr.train_utils.trainer import Trainer |
| | | from funasr.schedulers import scheduler_classes |
| | | from funasr.train_utils.initialize import initialize |
| | | from funasr.download.download_from_hub import download_model |
| | | from funasr.download.download_model_from_hub import download_model |
| | | from funasr.models.lora.utils import mark_only_lora_as_trainable |
| | | from funasr.train_utils.set_all_random_seed import set_all_random_seed |
| | | from funasr.train_utils.load_pretrained_model import load_pretrained_model |
| | |
| | | tensorboard_dir = os.path.join(kwargs.get("output_dir"), "tensorboard") |
| | | os.makedirs(tensorboard_dir, exist_ok=True) |
| | | try: |
| | | from tensorboardX import SummaryWriter |
| | | |
| | | writer = SummaryWriter(tensorboard_dir) # if trainer.rank == 0 else None |
| | | except: |
| | | writer = None |
| | |
| | | ) |
| | | trainer.start_step = 0 |
| | | |
| | | torch.cuda.empty_cache() |
| | | with torch.cuda.device(kwargs["device"]): |
| | | torch.cuda.empty_cache() |
| | | |
| | | time_escaped = (time.perf_counter() - time_slice_i) / 3600.0 |
| | | logging.info( |
| | | f"rank: {local_rank}, " |
| | | f"time_escaped_epoch: {time_escaped:.3f} hours, " |
| | | f"estimated to finish {dataloader.data_split_num} data_slices, remaining: {(dataloader.data_split_num-data_split_i)*time_escaped:.3f} hours" |
| | | f"epoch: {((trainer.max_epoch - epoch - 1)*dataloader.data_split_num + dataloader.data_split_num-data_split_i)*time_escaped:.3f} hours\n" |
| | | f"estimated to finish {dataloader.data_split_num} data_slices, remaining: {dataloader.data_split_num-data_split_i} slices, {(dataloader.data_split_num-data_split_i)*time_escaped:.3f} hours, " |
| | | f"epoch: {trainer.max_epoch - epoch} epochs, {((trainer.max_epoch - epoch - 1)*dataloader.data_split_num + dataloader.data_split_num-data_split_i)*time_escaped:.3f} hours\n" |
| | | ) |
| | | |
| | | trainer.start_data_split_i = 0 |