From e9d2cfc3a134b00f4e98271fbee3838d1ccecbcc Mon Sep 17 00:00:00 2001
From: VirtuosoQ <2416050435@qq.com>
Date: 星期五, 26 四月 2024 14:59:30 +0800
Subject: [PATCH] FunASR java http client
---
funasr/datasets/dataloader_entry.py | 22 ++++++++++++++++++----
1 files changed, 18 insertions(+), 4 deletions(-)
diff --git a/funasr/datasets/dataloader_entry.py b/funasr/datasets/dataloader_entry.py
index 0de7e40..70da722 100644
--- a/funasr/datasets/dataloader_entry.py
+++ b/funasr/datasets/dataloader_entry.py
@@ -4,7 +4,7 @@
from funasr.register import tables
-@tables.register("dataloader_classes", "DataloaderMapStyle")
+# @tables.register("dataloader_classes", "DataloaderMapStyle")
def DataloaderMapStyle(frontend=None, tokenizer=None, **kwargs):
# dataset
logging.info("Build dataloader")
@@ -25,7 +25,7 @@
return dataloader_tr, dataloader_val
-# @tables.register("dataloader_classes", "DataloaderMapStyle")
+@tables.register("dataloader_classes", "DataloaderMapStyle")
class DataloaderMapStyle:
def __init__(self, frontend=None, tokenizer=None, **kwargs):
# dataset
@@ -40,7 +40,21 @@
self.dataset_val = dataset_val
self.kwargs = kwargs
- def build_iter(self, epoch=0):
+ # split dataset
+ self.data_split_num = kwargs["dataset_conf"].get("data_split_num", 1)
+ self.dataset_class = dataset_class
+ self.frontend = frontend
+ self.tokenizer = tokenizer
+ self.kwargs = kwargs
+
+ def build_iter(self, epoch=0, data_split_i=0, **kwargs):
+
+ # reload dataset slice
+ if self.data_split_num > 1:
+ del self.dataset_tr
+ self.dataset_tr = self.dataset_class(self.kwargs.get("train_data_set_list"), frontend=self.frontend, tokenizer=self.tokenizer,
+ is_training=True, **self.kwargs.get("dataset_conf"), data_split_i=data_split_i)
+
# dataloader
batch_sampler = self.kwargs["dataset_conf"].get("batch_sampler", "BatchSampler")
batch_sampler_val = None
@@ -50,7 +64,7 @@
batch_sampler_val = batch_sampler_class(self.dataset_val, is_training=False, **self.kwargs.get("dataset_conf"))
batch_sampler["batch_sampler"].set_epoch(epoch)
- batch_sampler_val.set_epoch(epohc)
+ batch_sampler_val["batch_sampler"].set_epoch(epoch)
dataloader_tr = torch.utils.data.DataLoader(self.dataset_tr, collate_fn=self.dataset_tr.collator, **batch_sampler)
dataloader_val = torch.utils.data.DataLoader(self.dataset_val, collate_fn=self.dataset_val.collator, **batch_sampler_val)
--
Gitblit v1.9.1