From fc08b62d05723cdc1ce021bb8ba044ca014fb1f7 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期一, 13 三月 2023 18:38:41 +0800
Subject: [PATCH] readme

---
 funasr/runtime/python/grpc/grpc_server.py |    6 +++---
 1 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/funasr/runtime/python/grpc/grpc_server.py b/funasr/runtime/python/grpc/grpc_server.py
index 2d03f9d..95fe96c 100644
--- a/funasr/runtime/python/grpc/grpc_server.py
+++ b/funasr/runtime/python/grpc/grpc_server.py
@@ -5,7 +5,6 @@
 
 import paraformer_pb2_grpc
 from paraformer_pb2 import Response
-from utils.frontend import load_bytes
 
 
 class ASRServicer(paraformer_pb2_grpc.ASRServicer):
@@ -25,9 +24,9 @@
             self.inference_16k_pipeline = pipeline(task=Tasks.auto_speech_recognition, model=model)
         elif self.backend == "onnxruntime":
             try:
-                from paraformer_onnx import Paraformer
+                from rapid_paraformer.paraformer_onnx import Paraformer
             except ImportError:
-                raise ImportError(f"Please install onnxruntime requirements, reference https://github.com/alibaba-damo-academy/FunASR/tree/main/funasr/runtime/python/onnxruntime/rapid_paraformer")
+                raise ImportError(f"Please install onnxruntime environment")
             self.inference_16k_pipeline = Paraformer(model_dir=onnx_dir)
         self.sample_rate = sample_rate
 
@@ -110,6 +109,7 @@
                             else:
                                 asr_result = ""
                         elif self.backend == "onnxruntime":
+                            from rapid_paraformer.utils.frontend import load_bytes
                             array = load_bytes(tmp_data)
                             asr_result = self.inference_16k_pipeline(array)[0]
                         end_time = int(round(time.time() * 1000))

--
Gitblit v1.9.1