嘉渊
2023-04-24 7e25315a4f8e9dc5aa3c8162fa176fb2d2be19cc
funasr/bin/train.py
@@ -1,3 +1,5 @@
#!/usr/bin/env python3
import argparse
import logging
import os
@@ -12,6 +14,7 @@
from funasr.build_utils.build_model import build_model
from funasr.build_utils.build_optimizer import build_optimizer
from funasr.build_utils.build_scheduler import build_scheduler
from funasr.build_utils.build_trainer import build_trainer
from funasr.text.phoneme_tokenizer import g2p_choices
from funasr.torch_utils.model_summary import model_summary
from funasr.torch_utils.pytorch_version import pytorch_cudnn_version
@@ -420,16 +423,16 @@
    prepare_data(args, distributed_option)
    model = build_model(args)
    optimizer = build_optimizer(args, model=model)
    scheduler = build_scheduler(args, optimizer)
    optimizers = build_optimizer(args, model=model)
    schedulers = build_scheduler(args, optimizers)
    logging.info("world size: {}, rank: {}, local_rank: {}".format(distributed_option.dist_world_size,
                                                                   distributed_option.dist_rank,
                                                                   distributed_option.local_rank))
    logging.info(pytorch_cudnn_version())
    logging.info(model_summary(model))
    logging.info("Optimizer: {}".format(optimizer))
    logging.info("Scheduler: {}".format(scheduler))
    logging.info("Optimizer: {}".format(optimizers))
    logging.info("Scheduler: {}".format(schedulers))
    # dump args to config.yaml
    if not distributed_option.distributed or distributed_option.dist_rank == 0:
@@ -443,4 +446,18 @@
            else:
                yaml_no_alias_safe_dump(vars(args), f, indent=4, sort_keys=False)
    # dataloader for training/validation
    train_dataloader, valid_dataloader = build_dataloader(args)
    # Trainer, including model, optimizers, etc.
    trainer = build_trainer(
        args=args,
        model=model,
        optimizers=optimizers,
        schedulers=schedulers,
        train_dataloader=train_dataloader,
        valid_dataloader=valid_dataloader,
        distributed_option=distributed_option
    )
    trainer.run()