嘉渊
2023-04-27 32feb7d2be2dd1994401b613e95d1c3c1fd8a0e0
funasr/datasets/small_datasets/sequence_iter_factory.py
@@ -83,7 +83,7 @@
            args.max_update = len(bs_list) * args.max_epoch
            logging.info("Max update: {}".format(args.max_update))
        if args.distributed:
        if args.distributed and mode=="train":
            world_size = torch.distributed.get_world_size()
            rank = torch.distributed.get_rank()
            for batch in batches:
@@ -103,7 +103,7 @@
        self.num_iters_per_epoch = None
        self.shuffle = mode == "train"
        self.seed = args.seed
        self.num_workers = args.num_workers
        self.num_workers = args.dataset_conf.get("num_workers", 8)
        self.collate_fn = collate_fn
        self.pin_memory = args.ngpu > 0