From a85312c0ebdf0fbbda29868baddc10ef6e25760a Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期日, 24 三月 2024 15:29:14 +0800
Subject: [PATCH] finetune
---
funasr/bin/train.py | 16 ++++++++--------
1 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/funasr/bin/train.py b/funasr/bin/train.py
index e446e54..d19b79a 100644
--- a/funasr/bin/train.py
+++ b/funasr/bin/train.py
@@ -150,8 +150,8 @@
# dataset
logging.info("Build dataloader")
dataloader_class = tables.dataloader_classes.get(kwargs["dataset_conf"].get("dataloader", "DataloaderMapStyle"))
- # dataloader = dataloader_class(**kwargs)
- dataloader_tr, dataloader_val = dataloader_class(**kwargs)
+ dataloader = dataloader_class(**kwargs)
+ # dataloader_tr, dataloader_val = dataloader_class(**kwargs)
trainer = Trainer(local_rank=local_rank,
use_ddp=use_ddp,
use_fsdp=use_fsdp,
@@ -173,15 +173,15 @@
except:
writer = None
- if use_ddp or use_fsdp:
- context = Join([model])
- else:
- context = nullcontext()
-
+ # if use_ddp or use_fsdp:
+ # context = Join([model])
+ # else:
+ # context = nullcontext()
+ context = nullcontext()
for epoch in range(trainer.start_epoch, trainer.max_epoch + 1):
time1 = time.perf_counter()
with context:
- # dataloader_tr, dataloader_val = dataloader.build_iter(epoch)
+ dataloader_tr, dataloader_val = dataloader.build_iter(epoch)
trainer.train_epoch(
model=model,
optim=optim,
--
Gitblit v1.9.1