From 7012ca2efc130103c4acd24e3678c7ae280f8db4 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期三, 13 十二月 2023 20:08:55 +0800
Subject: [PATCH] funasr2 paraformer biciparaformer contextuaparaformer

---
 funasr/bin/train.py |    7 +------
 1 files changed, 1 insertions(+), 6 deletions(-)

diff --git a/funasr/cli/train_cli.py b/funasr/bin/train.py
similarity index 96%
rename from funasr/cli/train_cli.py
rename to funasr/bin/train.py
index a22d5d4..4187476 100644
--- a/funasr/cli/train_cli.py
+++ b/funasr/bin/train.py
@@ -19,18 +19,13 @@
 # from funasr.tokenizer.token_id_converter import TokenIDConverter
 from funasr.tokenizer.funtoken import build_tokenizer
 from funasr.datasets.dataset_jsonl import AudioDataset
-from funasr.cli.trainer import Trainer
+from funasr.utils.trainer import Trainer
 # from funasr.utils.load_fr_py import load_class_from_path
 from funasr.utils.dynamic_import import dynamic_import
 import torch.distributed as dist
 from torch.nn.parallel import DistributedDataParallel as DDP
 from torch.distributed.fsdp import FullyShardedDataParallel as FSDP
 from funasr.utils.download_from_hub import download_model
-
-def preprocess_config(cfg: DictConfig):
-	for key, value in cfg.items():
-		if value == 'None':
-			cfg[key] = None
 
 @hydra.main(config_name=None, version_base=None)
 def main_hydra(kwargs: DictConfig):

--
Gitblit v1.9.1