From 4e37a5fda20f0878b593b8ba2b9ea46db63743b5 Mon Sep 17 00:00:00 2001
From: 嘉渊 <wangjiaming.wjm@alibaba-inc.com>
Date: 星期四, 11 五月 2023 14:16:28 +0800
Subject: [PATCH] update repo

---
 funasr/datasets/small_datasets/sequence_iter_factory.py |    6 +++---
 1 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/funasr/datasets/small_datasets/sequence_iter_factory.py b/funasr/datasets/small_datasets/sequence_iter_factory.py
index 810389e..c35314f 100644
--- a/funasr/datasets/small_datasets/sequence_iter_factory.py
+++ b/funasr/datasets/small_datasets/sequence_iter_factory.py
@@ -62,7 +62,7 @@
         # sampler
         dataset_conf = args.dataset_conf
         batch_sampler = LengthBatchSampler(
-            batch_bins=dataset_conf["batch_size"] * args.ngpu,
+            batch_bins=dataset_conf["batch_conf"]["batch_size"] * args.ngpu,
             shape_files=shape_files,
             sort_in_batch=dataset_conf["sort_in_batch"] if hasattr(dataset_conf, "sort_in_batch") else "descending",
             sort_batch=dataset_conf["sort_batch"] if hasattr(dataset_conf, "sort_batch") else "ascending",
@@ -83,7 +83,7 @@
             args.max_update = len(bs_list) * args.max_epoch
             logging.info("Max update: {}".format(args.max_update))
 
-        if args.distributed:
+        if args.distributed and mode=="train":
             world_size = torch.distributed.get_world_size()
             rank = torch.distributed.get_rank()
             for batch in batches:
@@ -103,7 +103,7 @@
         self.num_iters_per_epoch = None
         self.shuffle = mode == "train"
         self.seed = args.seed
-        self.num_workers = args.num_workers
+        self.num_workers = args.dataset_conf.get("num_workers", 8)
         self.collate_fn = collate_fn
         self.pin_memory = args.ngpu > 0
 

--
Gitblit v1.9.1