From 92b14aaa2a98e2872f1c8b62551ecaf82bc5688a Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期一, 01 七月 2024 11:25:23 +0800
Subject: [PATCH] update

---
 funasr/bin/train_ds.py |    6 ++++--
 1 files changed, 4 insertions(+), 2 deletions(-)

diff --git a/funasr/bin/train_ds.py b/funasr/bin/train_ds.py
index 41ecbe4..415904e 100644
--- a/funasr/bin/train_ds.py
+++ b/funasr/bin/train_ds.py
@@ -27,7 +27,7 @@
 from funasr.train_utils.trainer_ds import Trainer
 from funasr.schedulers import scheduler_classes
 from funasr.train_utils.initialize import initialize
-from funasr.download.download_from_hub import download_model
+from funasr.download.download_model_from_hub import download_model
 from funasr.models.lora.utils import mark_only_lora_as_trainable
 from funasr.train_utils.set_all_random_seed import set_all_random_seed
 from funasr.train_utils.load_pretrained_model import load_pretrained_model
@@ -83,6 +83,8 @@
         logging.info(f"use_ddp: {use_ddp}, use_fsdp: {use_fsdp}")
         dist.init_process_group(backend=kwargs.get("backend", "nccl"), init_method="env://")
         torch.cuda.set_device(local_rank)
+
+    # rank = dist.get_rank()
 
     logging.info("Build model, frontend, tokenizer")
     device = kwargs.get("device", "cuda")
@@ -144,7 +146,7 @@
     dataloader = dataloader_class(**kwargs)
     # dataloader_tr, dataloader_val = dataloader_class(**kwargs)
 
-    scaler = GradScaler(enabled=trainer.use_fp16) if trainer.use_fp16 else None
+    scaler = GradScaler(enabled=True) if trainer.use_fp16 or trainer.use_bf16 else None
     scaler = ShardedGradScaler(enabled=trainer.use_fp16) if trainer.use_fsdp else scaler
 
     trainer.resume_checkpoint(

--
Gitblit v1.9.1