| funasr/models/ct_transformer/model.py | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 |
funasr/models/ct_transformer/model.py
@@ -287,9 +287,7 @@ # y, _ = self.wrapped_model(**data) y, _ = self.punc_forward(**data) _, indices = y.view(-1, y.shape[-1]).topk(1, dim=1) punctuations = indices if indices.size()[0] != 1: punctuations = torch.squeeze(indices) punctuations = torch.squeeze(indices, dim=1) assert punctuations.size()[0] == len(mini_sentence) # Search for the last Period/QuestionMark as cache