From da9ac240cb3298248acb3262ed96b87fa3c1fa56 Mon Sep 17 00:00:00 2001
From: speech_asr <wangjiaming.wjm@alibaba-inc.com>
Date: 星期二, 18 四月 2023 17:52:50 +0800
Subject: [PATCH] update
---
funasr/datasets/small_datasets/build_loader.py | 26 ++++++++++++++++++++------
1 files changed, 20 insertions(+), 6 deletions(-)
diff --git a/funasr/datasets/small_datasets/build_loader.py b/funasr/datasets/small_datasets/build_loader.py
index a96627a..a7181a4 100644
--- a/funasr/datasets/small_datasets/build_loader.py
+++ b/funasr/datasets/small_datasets/build_loader.py
@@ -1,15 +1,18 @@
import logging
import os
+import numpy as np
+import torch
+
from funasr.datasets.small_datasets.dataset import ESPnetDataset
from funasr.datasets.small_datasets.preprocessor import build_preprocess
-from funasr.samplers.length_batch_sampler import LengthBatchSampler
+from funasr.datasets.small_datasets.length_batch_sampler import LengthBatchSampler
def build_dataloader(args, mode="train"):
preprocess_fn = build_preprocess(args, train=mode == "train")
dest_sample_rate = args.frontend_conf["fs"] if (
- args.frontend_conf is not None and "fs" in args.frontend_conf) else 16000
+ args.frontend_conf is not None and "fs" in args.frontend_conf) else 16000
if mode == "train":
data_path_and_name_and_type = args.train_data_path_and_name_and_type
shape_files = args.train_shape_file
@@ -23,10 +26,6 @@
preprocess=preprocess_fn,
dest_sample_rate=dest_sample_rate,
)
- if os.path.exists(os.path.join(data_path_and_name_and_type[0][0].parent, "utt2category")):
- utt2category_file = os.path.join(data_path_and_name_and_type[0][0].parent, "utt2category")
- else:
- utt2category_file = None
dataset_conf = args.dataset_conf
batch_sampler = LengthBatchSampler(
@@ -46,3 +45,18 @@
f"[{mode}] mini-batch sizes summary: N-batch={len(bs_list)}, "
f"mean={np.mean(bs_list):.1f}, min={np.min(bs_list)}, max={np.max(bs_list)}"
)
+
+ if args.scheduler == "tri_stage" and mode == "train":
+ args.max_update = len(bs_list) * args.max_epoch
+ logging.info("Max update: {}".format(args.max_update))
+
+ if args.distributed:
+ world_size = torch.distributed.get_world_size()
+ rank = torch.distributed.get_rank()
+ for batch in batches:
+ if len(batch) < world_size:
+ raise RuntimeError(
+ f"The batch-size must be equal or more than world_size: "
+ f"{len(batch)} < {world_size}"
+ )
+ batches = [batch[rank::world_size] for batch in batches]
--
Gitblit v1.9.1