From c1e365fea09aafda387cac12fdff43d28c598979 Mon Sep 17 00:00:00 2001
From: BienBoy <92378515+BienBoy@users.noreply.github.com>
Date: 星期六, 01 二月 2025 23:29:34 +0800
Subject: [PATCH] fix: resolve unexpected 'out of memory' issue in multi-GPU setup (#2373)
---
examples/industrial_data_pretraining/contextual_paraformer/finetune.sh | 4 ++--
1 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/examples/industrial_data_pretraining/contextual_paraformer/finetune.sh b/examples/industrial_data_pretraining/contextual_paraformer/finetune.sh
index f82a67d..b8b649e 100644
--- a/examples/industrial_data_pretraining/contextual_paraformer/finetune.sh
+++ b/examples/industrial_data_pretraining/contextual_paraformer/finetune.sh
@@ -48,14 +48,14 @@
mkdir -p ${output_dir}
echo "log_file: ${log_file}"
-deepspeed_config=${workspace}../../ds_stage1.json
+deepspeed_config=${workspace}/../../ds_stage1.json
DISTRIBUTED_ARGS="
--nnodes ${WORLD_SIZE:-1} \
--nproc_per_node $gpu_num \
--node_rank ${RANK:-0} \
--master_addr ${MASTER_ADDR:-127.0.0.1} \
- --master_port ${MASTER_PORT: 26669}
+ --master_port ${MASTER_PORT:-26669}
"
echo $DISTRIBUTED_ARGS
--
Gitblit v1.9.1