From 526c810bd7ab9fdab38d5ce279a064b8e5da8bdd Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期五, 07 六月 2024 18:44:14 +0800
Subject: [PATCH] auto frontend

---
 funasr/models/llm_asr/model.py              |   21 ++++++++++++++++-----
 funasr/datasets/openai_datasets/datasets.py |    3 +++
 2 files changed, 19 insertions(+), 5 deletions(-)

diff --git a/funasr/datasets/openai_datasets/datasets.py b/funasr/datasets/openai_datasets/datasets.py
index 46735ba..0d12a1c 100644
--- a/funasr/datasets/openai_datasets/datasets.py
+++ b/funasr/datasets/openai_datasets/datasets.py
@@ -155,6 +155,9 @@
                 fbank_beg.append(fbank_beg_i)
 
             if len(input_ids) > self.max_token_length:
+                logging.info(
+                    f"input_ids > max_token_length: {len(input_ids)}>{self.max_token_length}, {item}"
+                )
                 badcase_flag = True
             if badcase_flag:
                 continue
diff --git a/funasr/models/llm_asr/model.py b/funasr/models/llm_asr/model.py
index d94058c..a6a7134 100644
--- a/funasr/models/llm_asr/model.py
+++ b/funasr/models/llm_asr/model.py
@@ -485,13 +485,24 @@
         # _, l, _ = encoder_out.shape
         for batch_idx in range(batch_size):
 
-            l = fbank_fake_lens[batch_idx].item()
+            fbank_fake_len = fbank_fake_lens[batch_idx].item()
             fbank_beg_idx = fbank_beg[batch_idx, 0].item()
-            min_len = min(l, inputs_embeds.shape[1] - fbank_beg_idx)
+            min_len = min(fbank_fake_len, inputs_embeds.shape[1] - fbank_beg_idx)
+            try:
+                inputs_embeds[batch_idx, fbank_beg_idx : fbank_beg_idx + min_len, :] = encoder_out[
+                    batch_idx, :min_len, :
+                ]
+            except Exception as e:
+                logging.error(f"{str(e)}, {traceback.format_exc()}")
+                logging.info(
+                    f"batch_idx: {batch_idx}, inputs_embeds: {inputs_embeds.shape}, fbank_beg_idx: {fbank_beg_idx}, min_len: {min_len}, fbank_fake_len: {fbank_fake_len}"
+                )
+                fbank_fake_len = encoder_out_lens[batch_idx].item()
+                min_len = min(fbank_fake_len, inputs_embeds.shape[1] - fbank_beg_idx)
+                inputs_embeds[batch_idx, fbank_beg_idx : fbank_beg_idx + min_len, :] = encoder_out[
+                    batch_idx, :min_len, :
+                ]
 
-            inputs_embeds[batch_idx, fbank_beg_idx : fbank_beg_idx + min_len, :] = encoder_out[
-                batch_idx, :min_len, :
-            ]
         labels_ids[labels_ids == -1] = -100
         model_outputs = self.llm(
             inputs_embeds=inputs_embeds, attention_mask=attention_mask, labels=labels_ids

--
Gitblit v1.9.1