From 834a8fd9e2e9d22034ee78ecb5a405c02a25b2eb Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期五, 07 六月 2024 19:27:08 +0800
Subject: [PATCH] auto frontend
---
funasr/models/llm_asr/model.py | 28 +++++++++++++++++++++++-----
1 files changed, 23 insertions(+), 5 deletions(-)
diff --git a/funasr/models/llm_asr/model.py b/funasr/models/llm_asr/model.py
index 66a69eb..82ad134 100644
--- a/funasr/models/llm_asr/model.py
+++ b/funasr/models/llm_asr/model.py
@@ -480,13 +480,31 @@
inputs_embeds = self.llm.model.get_input_embeddings()(input_ids)
batch_size, token_num, dims = inputs_embeds.shape
- _, l, _ = encoder_out.shape
+ fbank_mask[fbank_mask < 0] = 0
+ fbank_fake_lens = fbank_mask.sum(-1).to(torch.int32)
+ # _, l, _ = encoder_out.shape
for batch_idx in range(batch_size):
+
+ fbank_fake_len = fbank_fake_lens[batch_idx].item()
fbank_beg_idx = fbank_beg[batch_idx, 0].item()
- min_len = min(l, inputs_embeds.shape[1] - fbank_beg_idx)
- inputs_embeds[batch_idx, fbank_beg_idx : fbank_beg_idx + min_len, :] = encoder_out[
- batch_idx, :min_len, :
- ]
+ min_len = min(fbank_fake_len, inputs_embeds.shape[1] - fbank_beg_idx)
+ fbank_fake_len = encoder_out_lens[batch_idx].item()
+ min_len = min(fbank_fake_len, inputs_embeds.shape[1] - fbank_beg_idx)
+ try:
+ inputs_embeds[batch_idx, fbank_beg_idx : fbank_beg_idx + min_len, :] = encoder_out[
+ batch_idx, :min_len, :
+ ]
+ except Exception as e:
+ logging.error(f"{str(e)}, {traceback.format_exc()}")
+ logging.info(
+ f"batch_idx: {batch_idx}, inputs_embeds: {inputs_embeds.shape}, fbank_beg_idx: {fbank_beg_idx}, min_len: {min_len}, fbank_fake_len: {fbank_fake_len}"
+ )
+ fbank_fake_len = encoder_out_lens[batch_idx].item()
+ min_len = min(fbank_fake_len, inputs_embeds.shape[1] - fbank_beg_idx)
+ inputs_embeds[batch_idx, fbank_beg_idx : fbank_beg_idx + min_len, :] = encoder_out[
+ batch_idx, :min_len, :
+ ]
+
labels_ids[labels_ids == -1] = -100
model_outputs = self.llm(
inputs_embeds=inputs_embeds, attention_mask=attention_mask, labels=labels_ids
--
Gitblit v1.9.1