Shi Xian
2024-06-18 6c467e6f0abfc6d20d0621fbbf67b4dbd81776cc
funasr/models/llm_asr/model.py
@@ -166,8 +166,6 @@
                text: (Batch, Length)
                text_lengths: (Batch,)
        """
        # import pdb;
        # pdb.set_trace()
        if len(text_lengths.size()) > 1:
            text_lengths = text_lengths[:, 0]
        if len(speech_lengths.size()) > 1:
@@ -717,9 +715,9 @@
        dtype_map = {"bf16": torch.bfloat16, "fp16": torch.float16, "fp32": torch.float32}
        with torch.cuda.amp.autocast(dtype=dtype_map[llm_dtype]):
            label = contents["assistant"][0]
            self.llm = self.llm.to(dtype_map[llm_dtype])
            inputs_embeds = inputs_embeds.to(dtype_map[llm_dtype])
            attention_mask = attention_mask.to(dtype_map[llm_dtype])
            # self.llm = self.llm.to(dtype_map[llm_dtype])
            # inputs_embeds = inputs_embeds.to(dtype_map[llm_dtype])
            if not kwargs.get("tearchforing", False):
                generated_ids = self.llm.generate(
@@ -739,6 +737,7 @@
                labels_ids = batch["labels_ids"]
                labels_ids[labels_ids == -1] = -100
                attention_mask = batch.get("attention_mask", None)
                # attention_mask = attention_mask.to(dtype_map[llm_dtype])
                model_outputs = self.llm(
                    inputs_embeds=inputs_embeds, attention_mask=attention_mask, labels=labels_ids
                )