From 3eee773814c392e497557bbad501e0add4c8eca9 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期日, 09 六月 2024 02:11:42 +0800
Subject: [PATCH] fix bug

---
 funasr/datasets/openai_datasets/datasets.py |   14 +++++++++-----
 1 files changed, 9 insertions(+), 5 deletions(-)

diff --git a/funasr/datasets/openai_datasets/datasets.py b/funasr/datasets/openai_datasets/datasets.py
index 8cb0926..9c74ef1 100644
--- a/funasr/datasets/openai_datasets/datasets.py
+++ b/funasr/datasets/openai_datasets/datasets.py
@@ -62,6 +62,7 @@
         self.pattern = re.compile(r"(<\|startofspeech\|>.*?<\|endofspeech\|>)")
         # self.kwargs = kwargs
         self.max_token_length = kwargs.get("max_token_length", 1024)
+        self.batch_size_scale_ratio_max = kwargs.get("batch_size_scale_ratio_max", 1.5)
 
     def get_source_len(self, index):
         item = self.index_ds[index]
@@ -154,12 +155,15 @@
                 fbank_beg.append(fbank_beg_i)
 
             if len(input_ids) > self.max_token_length:
+                logging.info(
+                    f"input_ids > max_token_length: {len(input_ids)}>{self.max_token_length}, {item}"
+                )
                 badcase_flag = True
             if badcase_flag:
                 continue
-            input_ids = torch.tensor(input_ids, dtype=torch.int64)
-            attention_mask = torch.tensor([len(input_ids)], dtype=torch.int32)
-            labels = torch.tensor(labels, dtype=torch.int64)
+            input_ids = torch.tensor(input_ids, dtype=torch.int64)  # [: self.max_token_length]
+            attention_mask = torch.tensor([1] * len(input_ids), dtype=torch.int32)
+            labels = torch.tensor(labels, dtype=torch.int64)  # [: self.max_token_length]
 
             fbank = speech[0, :, :]
             fbank_lens = speech_lengths
@@ -207,12 +211,12 @@
 
             if self.batch_type != "example":
                 b, t = outputs["input_ids"].shape
-                if b * t > self.batch_size * 2:
+                if b * t > self.batch_size * self.batch_size_scale_ratio_max:
                     beg = torch.randint(0, 2, ()).item()
                     if b < 2:
                         beg = 0
                     logging.info(
-                        f"Warning, b * t: {b * t} > {self.batch_size}, b: {b}, t: {t}, drop half data {idx}th, beg:{beg}"
+                        f"Warning, b * t: {b * t} > {self.batch_size_scale_ratio_max} * {self.batch_size}, b: {b}, t: {t}, drop half data {idx}th, beg:{beg}"
                     )
                     samples = samples[beg : beg + b : 2]
                     continue

--
Gitblit v1.9.1