游雁
2024-01-08 fb176404cfeb40c053f4f42d01eb45c185d21ce2
funasr/bin/inference.py
@@ -222,7 +222,8 @@
            batch["data_lengths"] = input_len
      
         time1 = time.perf_counter()
         results, meta_data = model.generate(**batch, **kwargs)
         with torch.no_grad():
            results, meta_data = model.generate(**batch, **kwargs)
         time2 = time.perf_counter()
         
         asr_result_list.extend(results)