From 30c40c643c19f6e2ac8679fa76d09d0f9ceccc65 Mon Sep 17 00:00:00 2001
From: chenmengzheAAA <123789350+chenmengzheAAA@users.noreply.github.com>
Date: 星期四, 14 九月 2023 18:00:43 +0800
Subject: [PATCH] Update modelscope_models.md
---
funasr/runtime/onnxruntime/src/paraformer.cpp | 44 ++++++++++++++++++++++++++++++++------------
1 files changed, 32 insertions(+), 12 deletions(-)
diff --git a/funasr/runtime/onnxruntime/src/paraformer.cpp b/funasr/runtime/onnxruntime/src/paraformer.cpp
index cbaab2d..887a463 100644
--- a/funasr/runtime/onnxruntime/src/paraformer.cpp
+++ b/funasr/runtime/onnxruntime/src/paraformer.cpp
@@ -41,7 +41,7 @@
LOG(INFO) << "Successfully load model from " << am_model;
} catch (std::exception const &e) {
LOG(ERROR) << "Error when load am onnx model: " << e.what();
- exit(0);
+ exit(-1);
}
string strName;
@@ -93,7 +93,7 @@
LOG(INFO) << "Successfully load model from " << en_model;
} catch (std::exception const &e) {
LOG(ERROR) << "Error when load am encoder model: " << e.what();
- exit(0);
+ exit(-1);
}
try {
@@ -101,7 +101,7 @@
LOG(INFO) << "Successfully load model from " << de_model;
} catch (std::exception const &e) {
LOG(ERROR) << "Error when load am decoder model: " << e.what();
- exit(0);
+ exit(-1);
}
// encoder
@@ -156,7 +156,7 @@
LOG(INFO) << "Successfully load model from " << am_model;
} catch (std::exception const &e) {
LOG(ERROR) << "Error when load am onnx model: " << e.what();
- exit(0);
+ exit(-1);
}
string strName;
@@ -225,7 +225,7 @@
LOG(INFO) << "Successfully load model from " << hw_model;
} catch (std::exception const &e) {
LOG(ERROR) << "Error when load hw compiler onnx model: " << e.what();
- exit(0);
+ exit(-1);
}
string strName;
@@ -292,7 +292,7 @@
ifstream cmvn_stream(filename);
if (!cmvn_stream.is_open()) {
LOG(ERROR) << "Failed to open file: " << filename;
- exit(0);
+ exit(-1);
}
string line;
@@ -438,11 +438,11 @@
}
}
string stamp_str="";
- for (i=0; i<timestamp_list.size(); i++) {
- stamp_str += std::to_string(timestamp_list[i][0]);
+ for (i=0; i<timestamp_merge.size(); i++) {
+ stamp_str += std::to_string(timestamp_merge[i][0]);
stamp_str += ", ";
- stamp_str += std::to_string(timestamp_list[i][1]);
- if(i!=timestamp_list.size()-1){
+ stamp_str += std::to_string(timestamp_merge[i][1]);
+ if(i!=timestamp_merge.size()-1){
stamp_str += ",";
}
}
@@ -475,7 +475,9 @@
if (char_list.back() == "</s>") {
char_list.pop_back();
}
-
+ if (char_list.empty()) {
+ return ;
+ }
vector<vector<float>> timestamp_list;
vector<string> new_char_list;
vector<float> fire_place;
@@ -490,6 +492,9 @@
if(num_peak != (int)char_list.size() + 1){
float sum = std::accumulate(us_alphas.begin(), us_alphas.end(), 0.0f);
float scale = sum/((int)char_list.size() + 1);
+ if(scale == 0){
+ return;
+ }
cif_peak.clear();
sum = 0.0;
for(auto &alpha:us_alphas){
@@ -507,6 +512,11 @@
fire_place.push_back(i + total_offset);
}
}
+ }
+
+ num_peak = fire_place.size();
+ if(fire_place.size() == 0){
+ return;
}
// begin silence
@@ -530,6 +540,10 @@
}
// tail token and end silence
+ if(timestamp_list.size()==0){
+ LOG(ERROR)<<"timestamp_list's size is 0!";
+ return;
+ }
if (num_frames - fire_place.back() > START_END_THRESHOLD) {
float _end = (num_frames + fire_place.back()) / 2.0;
timestamp_list.back()[1] = _end * TIME_RATE;
@@ -658,7 +672,7 @@
return "";
}
- string result;
+ string result="";
try {
auto outputTensor = m_session_->Run(Ort::RunOptions{nullptr}, m_szInputNames.data(), input_onnx.data(), input_onnx.size(), m_szOutputNames.data(), m_szOutputNames.size());
std::vector<int64_t> outputShape = outputTensor[0].GetTensorTypeAndShapeInfo().GetShape();
@@ -719,6 +733,7 @@
std::vector<int32_t> hotword_matrix;
std::vector<int32_t> lengths;
int hotword_size = 1;
+ int real_hw_size = 0;
if (!hotwords.empty()) {
std::vector<std::string> hotword_array = split(hotwords, ' ');
hotword_size = hotword_array.size() + 1;
@@ -735,6 +750,9 @@
chars.insert(chars.end(), tokens.begin(), tokens.end());
}
}
+ if(chars.size()==0){
+ continue;
+ }
std::vector<int32_t> hw_vector(max_hotword_len, 0);
int vector_len = std::min(max_hotword_len, (int)chars.size());
for (int i=0; i<chars.size(); i++) {
@@ -743,8 +761,10 @@
}
std::cout << std::endl;
lengths.push_back(vector_len);
+ real_hw_size += 1;
hotword_matrix.insert(hotword_matrix.end(), hw_vector.begin(), hw_vector.end());
}
+ hotword_size = real_hw_size + 1;
}
std::vector<int32_t> blank_vec(max_hotword_len, 0);
blank_vec[0] = 1;
--
Gitblit v1.9.1