funasr/bin/inference.py @@ -222,6 +222,7 @@ batch["data_lengths"] = input_len time1 = time.perf_counter() with torch.no_grad(): results, meta_data = model.generate(**batch, **kwargs) time2 = time.perf_counter()