From acf8b4d3dc590121d1297a82ccbb8ad87e9935d9 Mon Sep 17 00:00:00 2001
From: 雾聪 <wucong.lyb@alibaba-inc.com>
Date: 星期三, 13 三月 2024 17:30:39 +0800
Subject: [PATCH] fix redefinition of jieba StdExtension.hpp
---
runtime/onnxruntime/src/offline-stream.cpp | 23 +++++++++++++++++------
1 files changed, 17 insertions(+), 6 deletions(-)
diff --git a/runtime/onnxruntime/src/offline-stream.cpp b/runtime/onnxruntime/src/offline-stream.cpp
index 2709ca6..9cdcdd2 100644
--- a/runtime/onnxruntime/src/offline-stream.cpp
+++ b/runtime/onnxruntime/src/offline-stream.cpp
@@ -1,7 +1,7 @@
#include "precomp.h"
namespace funasr {
-OfflineStream::OfflineStream(std::map<std::string, std::string>& model_path, int thread_num)
+OfflineStream::OfflineStream(std::map<std::string, std::string>& model_path, int thread_num, bool use_gpu)
{
// VAD model
if(model_path.find(VAD_DIR) != model_path.end()){
@@ -35,7 +35,12 @@
string hw_compile_model_path;
string seg_dict_path;
- asr_handle = make_unique<Paraformer>();
+ if(use_gpu){
+ asr_handle = make_unique<ParaformerTorch>();
+ }else{
+ asr_handle = make_unique<Paraformer>();
+ }
+
bool enable_hotword = false;
hw_compile_model_path = PathAppend(model_path.at(MODEL_DIR), MODEL_EB_NAME);
seg_dict_path = PathAppend(model_path.at(MODEL_DIR), MODEL_SEG_DICT);
@@ -63,10 +68,16 @@
// Lm resource
if (model_path.find(LM_DIR) != model_path.end() && model_path.at(LM_DIR) != "") {
- string fst_path, lm_config_path, hws_path;
+ string fst_path, lm_config_path, lex_path;
fst_path = PathAppend(model_path.at(LM_DIR), LM_FST_RES);
lm_config_path = PathAppend(model_path.at(LM_DIR), LM_CONFIG_NAME);
- asr_handle->InitLm(fst_path, lm_config_path);
+ lex_path = PathAppend(model_path.at(LM_DIR), LEX_PATH);
+ if (access(lex_path.c_str(), F_OK) != 0 )
+ {
+ LOG(ERROR) << "Lexicon.txt file is not exist, please use the latest version. Skip load LM model.";
+ }else{
+ asr_handle->InitLm(fst_path, lm_config_path, lex_path);
+ }
}
// PUNC model
@@ -109,10 +120,10 @@
#endif
}
-OfflineStream *CreateOfflineStream(std::map<std::string, std::string>& model_path, int thread_num)
+OfflineStream *CreateOfflineStream(std::map<std::string, std::string>& model_path, int thread_num, bool use_gpu)
{
OfflineStream *mm;
- mm = new OfflineStream(model_path, thread_num);
+ mm = new OfflineStream(model_path, thread_num, use_gpu);
return mm;
}
--
Gitblit v1.9.1