Toggle navigation
Toggle navigation
此项目
正在载入...
Sign in
xuning
/
sherpaonnx
转到一个项目
Toggle navigation
项目
群组
代码片段
帮助
Toggle navigation pinning
Project
Activity
Repository
Pipelines
Graphs
Issues
0
Merge Requests
0
Wiki
Network
Create a new issue
Builds
Commits
Authored by
Fangjun Kuang
2024-03-06 14:40:23 +0800
Browse Files
Options
Browse Files
Download
Email Patches
Plain Diff
Committed by
GitHub
2024-03-06 14:40:23 +0800
Commit
bdf924394027b4336e0786c630249b34943e5392
bdf92439
1 parent
13260cdf
Allow to not use pre-installed onnxruntime libs. (#636)
隐藏空白字符变更
内嵌
并排对比
正在显示
3 个修改的文件
包含
58 行增加
和
48 行删除
CMakeLists.txt
cmake/onnxruntime.cmake
sherpa-onnx/csrc/transducer-keyword-decoder.cc
CMakeLists.txt
查看文件 @
bdf9243
...
...
@@ -26,6 +26,7 @@ option(SHERPA_ONNX_ENABLE_WASM_ASR "Whether to enable WASM for ASR" OFF)
option
(
SHERPA_ONNX_ENABLE_WASM_NODEJS
"Whether to enable WASM for NodeJS"
OFF
)
option
(
SHERPA_ONNX_ENABLE_BINARY
"Whether to build binaries"
ON
)
option
(
SHERPA_ONNX_LINK_LIBSTDCPP_STATICALLY
"True to link libstdc++ statically. Used only when BUILD_SHARED_LIBS is OFF on Linux"
ON
)
option
(
SHERPA_ONNX_USE_PRE_INSTALLED_ONNXRUNTIME_IF_AVAILABLE
"True to use pre-installed onnxruntime if available"
ON
)
set
(
CMAKE_ARCHIVE_OUTPUT_DIRECTORY
"
${
CMAKE_BINARY_DIR
}
/lib"
)
set
(
CMAKE_LIBRARY_OUTPUT_DIRECTORY
"
${
CMAKE_BINARY_DIR
}
/lib"
)
...
...
@@ -110,6 +111,7 @@ message(STATUS "SHERPA_ONNX_ENABLE_WASM ${SHERPA_ONNX_ENABLE_WASM}")
message
(
STATUS
"SHERPA_ONNX_ENABLE_WASM_TTS
${
SHERPA_ONNX_ENABLE_WASM_TTS
}
"
)
message
(
STATUS
"SHERPA_ONNX_ENABLE_WASM_ASR
${
SHERPA_ONNX_ENABLE_WASM_ASR
}
"
)
message
(
STATUS
"SHERPA_ONNX_ENABLE_WASM_NODEJS
${
SHERPA_ONNX_ENABLE_WASM_NODEJS
}
"
)
message
(
STATUS
"SHERPA_ONNX_USE_PRE_INSTALLED_ONNXRUNTIME_IF_AVAILABLE
${
SHERPA_ONNX_USE_PRE_INSTALLED_ONNXRUNTIME_IF_AVAILABLE
}
"
)
if
(
SHERPA_ONNX_ENABLE_WASM_TTS
)
if
(
NOT SHERPA_ONNX_ENABLE_WASM
)
...
...
cmake/onnxruntime.cmake
查看文件 @
bdf9243
...
...
@@ -117,67 +117,69 @@ function(download_onnxruntime)
set
(
onnxruntime_SOURCE_DIR
${
onnxruntime_SOURCE_DIR
}
PARENT_SCOPE
)
endfunction
()
# First, we try to locate the header and the lib if the use has already
# installed onnxruntime. Otherwise, we will download the pre-compiled lib
if
(
SHERPA_ONNX_USE_PRE_INSTALLED_ONNXRUNTIME_IF_AVAILABLE
)
# First, we try to locate the header and the lib if the user has already
# installed onnxruntime. Otherwise, we will download the pre-compiled lib
message
(
STATUS
"CMAKE_SYSTEM_NAME:
${
CMAKE_SYSTEM_NAME
}
"
)
message
(
STATUS
"CMAKE_SYSTEM_PROCESSOR:
${
CMAKE_SYSTEM_PROCESSOR
}
"
)
if
(
DEFINED ENV{SHERPA_ONNXRUNTIME_INCLUDE_DIR}
)
set
(
location_onnxruntime_header_dir $ENV{SHERPA_ONNXRUNTIME_INCLUDE_DIR}
)
include_directories
(
${
location_onnxruntime_header_dir
}
)
else
()
find_path
(
location_onnxruntime_header_dir onnxruntime_cxx_api.h
PATHS
/usr/include
/usr/local/include
)
endif
()
message
(
STATUS
"CMAKE_SYSTEM_NAME:
${
CMAKE_SYSTEM_NAME
}
"
)
message
(
STATUS
"CMAKE_SYSTEM_PROCESSOR:
${
CMAKE_SYSTEM_PROCESSOR
}
"
)
message
(
STATUS
"location_onnxruntime_header_dir:
${
location_onnxruntime_header_dir
}
"
)
if
(
DEFINED ENV{SHERPA_ONNXRUNTIME_INCLUDE_DIR}
)
set
(
location_onnxruntime_header_dir $ENV{SHERPA_ONNXRUNTIME_INCLUDE_DIR}
)
if
(
DEFINED ENV{SHERPA_ONNXRUNTIME_LIB_DIR}
)
if
(
APPLE
)
set
(
location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.dylib
)
include_directories
(
${
location_onnxruntime_header_dir
}
)
else
()
set
(
location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.so
)
find_path
(
location_onnxruntime_header_dir onnxruntime_cxx_api.h
PATHS
/usr/include
/usr/local/include
)
endif
()
if
(
NOT EXISTS
${
location_onnxruntime_lib
}
)
set
(
location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.a
)
message
(
STATUS
"location_onnxruntime_header_dir:
${
location_onnxruntime_header_dir
}
"
)
if
(
DEFINED ENV{SHERPA_ONNXRUNTIME_LIB_DIR}
)
if
(
APPLE
)
set
(
location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.dylib
)
else
()
set
(
location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.so
)
endif
()
if
(
NOT EXISTS
${
location_onnxruntime_lib
}
)
message
(
FATAL_ERROR
"
${
location_onnxruntime_lib
}
cannot be found"
)
set
(
location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.a
)
if
(
NOT EXISTS
${
location_onnxruntime_lib
}
)
message
(
FATAL_ERROR
"
${
location_onnxruntime_lib
}
cannot be found"
)
endif
()
set
(
onnxruntime_lib_files $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.a
)
message
(
"Use static lib:
${
onnxruntime_lib_files
}
"
)
endif
()
set
(
onnxruntime_lib_files $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.a
)
message
(
"Use static lib:
${
onnxruntime_lib_files
}
"
)
endif
()
if
(
SHERPA_ONNX_ENABLE_GPU
)
set
(
location_onnxruntime_cuda_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime_providers_cuda.so
)
if
(
NOT EXISTS
${
location_onnxruntime_cuda_lib
}
)
set
(
location_onnxruntime_cuda_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime_providers_cuda.a
)
if
(
SHERPA_ONNX_ENABLE_GPU
)
set
(
location_onnxruntime_cuda_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime_providers_cuda.so
)
if
(
NOT EXISTS
${
location_onnxruntime_cuda_lib
}
)
set
(
location_onnxruntime_cuda_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime_providers_cuda.a
)
endif
()
endif
()
endif
()
else
()
find_library
(
location_onnxruntime_lib onnxruntime
PATHS
/lib
/usr/lib
/usr/local/lib
)
if
(
SHERPA_ONNX_ENABLE_GPU
)
find_library
(
location_onnxruntime_cuda_lib onnxruntime_providers_cuda
else
()
find_library
(
location_onnxruntime_lib onnxruntime
PATHS
/lib
/usr/lib
/usr/local/lib
)
if
(
SHERPA_ONNX_ENABLE_GPU
)
find_library
(
location_onnxruntime_cuda_lib onnxruntime_providers_cuda
PATHS
/lib
/usr/lib
/usr/local/lib
)
endif
()
endif
()
endif
()
message
(
STATUS
"location_onnxruntime_lib:
${
location_onnxruntime_lib
}
"
)
if
(
SHERPA_ONNX_ENABLE_GPU
)
message
(
STATUS
"location_onnxruntime_cuda_lib:
${
location_onnxruntime_cuda_lib
}
"
)
message
(
STATUS
"location_onnxruntime_lib:
${
location_onnxruntime_lib
}
"
)
if
(
SHERPA_ONNX_ENABLE_GPU
)
message
(
STATUS
"location_onnxruntime_cuda_lib:
${
location_onnxruntime_cuda_lib
}
"
)
endif
()
endif
()
if
(
location_onnxruntime_header_dir AND location_onnxruntime_lib
)
...
...
@@ -195,6 +197,10 @@ if(location_onnxruntime_header_dir AND location_onnxruntime_lib)
endif
()
endif
()
else
()
message
(
STATUS
"Could not find a pre-installed onnxruntime. Downloading pre-compiled onnxruntime"
)
if
(
SHERPA_ONNX_USE_PRE_INSTALLED_ONNXRUNTIME_IF_AVAILABLE
)
message
(
STATUS
"Could not find a pre-installed onnxruntime."
)
endif
()
message
(
STATUS
"Downloading pre-compiled onnxruntime"
)
download_onnxruntime
()
endif
()
...
...
sherpa-onnx/csrc/transducer-keyword-decoder.cc
查看文件 @
bdf9243
...
...
@@ -2,14 +2,16 @@
//
// Copyright (c) 2023-2024 Xiaomi Corporation
#include "sherpa-onnx/csrc/transducer-keyword-decoder.h"
#include <algorithm>
#include <cmath>
#include <cstring>
#include <utility>
#include <vector>
#include "sherpa-onnx/csrc/log.h"
#include "sherpa-onnx/csrc/onnx-utils.h"
#include "sherpa-onnx/csrc/transducer-keyword-decoder.h"
namespace
sherpa_onnx
{
...
...
请
注册
或
登录
后发表评论