From 23a749f956affdfe078f8b8eeaa59b830efb5ff7 Mon Sep 17 00:00:00 2001
From: 嘉渊 <wangjiaming.wjm@alibaba-inc.com>
Date: 星期四, 11 五月 2023 17:55:08 +0800
Subject: [PATCH] update repo
---
egs/aishell/data2vec_transformer_finetune/conf/train_asr_transformer_12e_6d_3072_768.yaml | 39 +++++++++++++++++++++++++++------------
1 files changed, 27 insertions(+), 12 deletions(-)
diff --git a/egs/aishell/data2vec_transformer_finetune/conf/train_asr_transformer_12e_6d_3072_768.yaml b/egs/aishell/data2vec_transformer_finetune/conf/train_asr_transformer_12e_6d_3072_768.yaml
index 5bc5236..ad3ad2e 100644
--- a/egs/aishell/data2vec_transformer_finetune/conf/train_asr_transformer_12e_6d_3072_768.yaml
+++ b/egs/aishell/data2vec_transformer_finetune/conf/train_asr_transformer_12e_6d_3072_768.yaml
@@ -30,25 +30,28 @@
require_same_masks: true
mask_dropout: 0
+# frontend related
+frontend: wav_frontend
+frontend_conf:
+ fs: 16000
+ window: hamming
+ n_mels: 80
+ frame_length: 25
+ frame_shift: 10
+ lfr_m: 1
+ lfr_n: 1
+
# hybrid CTC/attention
model_conf:
ctc_weight: 1.0
lsm_weight: 0.1 # label smoothing option
length_normalized_loss: false
-# for logger
-log_interval: 50
-
-# minibatch related
-batch_type: length
-batch_bins: 16000
-num_workers: 16
-
# optimization related
accum_grad: 1
grad_clip: 5
patience: none
-max_epoch: 50
+max_epoch: 150
val_scheduler_criterion:
- valid
- acc
@@ -57,8 +60,6 @@
- cer_ctc
- min
keep_nbest_models: 10
-unused_parameters: true
-normalize: None
# NoamLR is deprecated. Use WarmupLR.
# The following is equivalent setting for NoamLR:
@@ -92,4 +93,18 @@
time_mask_width_range:
- 0
- 40
- num_time_mask: 2
\ No newline at end of file
+ num_time_mask: 2
+
+dataset_conf:
+ shuffle: True
+ shuffle_conf:
+ shuffle_size: 2048
+ sort_size: 500
+ batch_conf:
+ batch_type: token
+ batch_size: 25000
+ num_workers: 8
+
+log_interval: 50
+unused_parameters: true
+normalize: None
\ No newline at end of file
--
Gitblit v1.9.1