Toggle navigation
Toggle navigation
此项目
正在载入...
Sign in
xuning
/
sherpaonnx
转到一个项目
Toggle navigation
项目
群组
代码片段
帮助
Toggle navigation pinning
Project
Activity
Repository
Pipelines
Graphs
Issues
0
Merge Requests
0
Wiki
Network
Create a new issue
Builds
Commits
Authored by
Fangjun Kuang
2023-09-21 10:15:42 +0800
Browse Files
Options
Browse Files
Download
Email Patches
Plain Diff
Committed by
GitHub
2023-09-21 10:15:42 +0800
Commit
532ed142d28fbeeed390a913eee0242ad9e45c52
532ed142
1 parent
f5c060dd
Support linking onnxruntime lib statically on Linux (#326)
隐藏空白字符变更
内嵌
并排对比
正在显示
8 个修改的文件
包含
106 行增加
和
7 行删除
.github/workflows/export-whisper-to-onnx.yaml
.github/workflows/linux.yaml
CMakeLists.txt
c-api-examples/decode-file-c-api.c
cmake/onnxruntime-linux-x86_64-static.cmake
cmake/onnxruntime-linux-x86_64.cmake
cmake/onnxruntime.cmake
sherpa-onnx/csrc/CMakeLists.txt
.github/workflows/export-whisper-to-onnx.yaml
查看文件 @
532ed14
...
...
@@ -24,7 +24,7 @@ jobs:
-
name
:
Install dependencies
shell
:
bash
run
:
|
python3 -m pip install openai-whisper torch onnxruntime onnx
python3 -m pip install openai-whisper torch onnxruntime
==1.15.1
onnx
-
name
:
export ${{ matrix.model }}
shell
:
bash
...
...
.github/workflows/linux.yaml
查看文件 @
532ed14
...
...
@@ -39,12 +39,14 @@ concurrency:
jobs
:
linux
:
name
:
${{ matrix.build_type }} ${{ matrix.shared_lib }}
runs-on
:
${{ matrix.os }}
strategy
:
fail-fast
:
false
matrix
:
os
:
[
ubuntu-latest
]
build_type
:
[
Release
,
Debug
]
shared_lib
:
[
ON
,
OFF
]
steps
:
-
uses
:
actions/checkout@v2
...
...
@@ -56,7 +58,7 @@ jobs:
run
:
|
mkdir build
cd build
cmake -D CMAKE_BUILD_TYPE=${{ matrix.build_type }} -DCMAKE_INSTALL_PREFIX=./install ..
cmake -D CMAKE_BUILD_TYPE=${{ matrix.build_type }} -D
BUILD_SHARED_LIBS=${{ matrix.shared_lib }} -D
CMAKE_INSTALL_PREFIX=./install ..
-
name
:
Build sherpa-onnx for ubuntu
shell
:
bash
...
...
@@ -88,6 +90,8 @@ jobs:
export PATH=$PWD/build/bin:$PATH
export EXE=sherpa-onnx-offline
readelf -d build/bin/sherpa-onnx-offline
.github/scripts/test-offline-whisper.sh
-
name
:
Test offline CTC
...
...
CMakeLists.txt
查看文件 @
532ed14
...
...
@@ -20,6 +20,7 @@ option(SHERPA_ONNX_ENABLE_JNI "Whether to build JNI internface" OFF)
option
(
SHERPA_ONNX_ENABLE_C_API
"Whether to build C API"
ON
)
option
(
SHERPA_ONNX_ENABLE_WEBSOCKET
"Whether to build webscoket server/client"
ON
)
option
(
SHERPA_ONNX_ENABLE_GPU
"Enable ONNX Runtime GPU support"
OFF
)
option
(
SHERPA_ONNX_LINK_LIBSTDCPP_STATICALLY
"True to link libstdc++ statically. Used only when BUILD_SHARED_LIBS is ON on Linux"
ON
)
set
(
CMAKE_ARCHIVE_OUTPUT_DIRECTORY
"
${
CMAKE_BINARY_DIR
}
/lib"
)
set
(
CMAKE_LIBRARY_OUTPUT_DIRECTORY
"
${
CMAKE_BINARY_DIR
}
/lib"
)
...
...
@@ -65,6 +66,10 @@ is installed on your system. Otherwise, you will get errors at runtime.
Hint: You don't need sudo permission to install CUDA toolkit. Please refer to
https://k2-fsa.github.io/k2/installation/cuda-cudnn.html
to install CUDA toolkit if you have not installed it."
)
if
(
NOT BUILD_SHARED_LIBS
)
message
(
STATUS
"Set BUILD_SHARED_LIBS to ON since SHERPA_ONNX_ENABLE_GPU is ON"
)
set
(
BUILD_SHARED_LIBS ON CACHE BOOL
""
FORCE
)
endif
()
endif
()
if
(
BUILD_SHARED_LIBS AND MSVC
)
...
...
@@ -131,12 +136,22 @@ if(WIN32 AND MSVC)
foreach
(
w IN LISTS disabled_warnings
)
string
(
APPEND CMAKE_CXX_FLAGS
"
${
w
}
"
)
endforeach
()
endif
()
add_compile_options
(
"$<$<C_COMPILER_ID:MSVC>:/utf-8>"
)
add_compile_options
(
"$<$<CXX_COMPILER_ID:MSVC>:/utf-8>"
)
endif
()
list
(
APPEND CMAKE_MODULE_PATH
${
CMAKE_SOURCE_DIR
}
/cmake/Modules
)
list
(
APPEND CMAKE_MODULE_PATH
${
CMAKE_SOURCE_DIR
}
/cmake
)
if
(
NOT BUILD_SHARED_LIBS AND LINUX AND NOT APPLE
)
if
(
SHERPA_ONNX_LINK_LIBSTDCPP_STATICALLY
)
message
(
STATUS
"Link libstdc++ statically"
)
set
(
CMAKE_CXX_FLAGS
"
${
CMAKE_CXX_FLAGS
}
-static-libstdc++ -static-libgcc "
)
else
()
message
(
STATUS
"Link libstdc++ dynamically"
)
endif
()
endif
()
include
(
kaldi-native-fbank
)
include
(
onnxruntime
)
...
...
@@ -186,3 +201,4 @@ install(
DESTINATION
.
)
message
(
STATUS
"CMAKE_CXX_FLAGS:
${
CMAKE_CXX_FLAGS
}
"
)
...
...
c-api-examples/decode-file-c-api.c
查看文件 @
532ed14
...
...
@@ -96,6 +96,7 @@ int32_t main(int32_t argc, char *argv[]) {
}
SherpaOnnxOnlineRecognizerConfig
config
;
memset
(
&
config
,
0
,
sizeof
(
config
));
config
.
model_config
.
debug
=
0
;
config
.
model_config
.
num_threads
=
1
;
...
...
@@ -195,7 +196,7 @@ int32_t main(int32_t argc, char *argv[]) {
DecodeOnlineStream
(
recognizer
,
stream
);
}
SherpaOnnxOnlineRecognizerResult
*
r
=
const
SherpaOnnxOnlineRecognizerResult
*
r
=
GetOnlineStreamResult
(
recognizer
,
stream
);
if
(
strlen
(
r
->
text
))
{
...
...
@@ -223,7 +224,7 @@ int32_t main(int32_t argc, char *argv[]) {
DecodeOnlineStream
(
recognizer
,
stream
);
}
SherpaOnnxOnlineRecognizerResult
*
r
=
const
SherpaOnnxOnlineRecognizerResult
*
r
=
GetOnlineStreamResult
(
recognizer
,
stream
);
if
(
strlen
(
r
->
text
))
{
...
...
cmake/onnxruntime-linux-x86_64-static.cmake
0 → 100644
查看文件 @
532ed14
# Copyright (c) 2022-2023 Xiaomi Corporation
message
(
STATUS
"CMAKE_SYSTEM_NAME:
${
CMAKE_SYSTEM_NAME
}
"
)
message
(
STATUS
"CMAKE_SYSTEM_PROCESSOR:
${
CMAKE_SYSTEM_PROCESSOR
}
"
)
if
(
NOT CMAKE_SYSTEM_NAME STREQUAL Linux
)
message
(
FATAL_ERROR
"This file is for Linux only. Given:
${
CMAKE_SYSTEM_NAME
}
"
)
endif
()
if
(
NOT CMAKE_SYSTEM_PROCESSOR STREQUAL x86_64
)
message
(
FATAL_ERROR
"This file is for x86_64 only. Given:
${
CMAKE_SYSTEM_PROCESSOR
}
"
)
endif
()
if
(
BUILD_SHARED_LIBS
)
message
(
FATAL_ERROR
"This file is for building static libraries. BUILD_SHARED_LIBS:
${
BUILD_SHARED_LIBS
}
"
)
endif
()
# TODO(fangjun): update the URL
set
(
onnxruntime_URL
"https://huggingface.co/csukuangfj/sherpa-onnx-cmake-deps/resolve/main/onnxruntime-linux-x64-static_lib-1.15.1.tgz"
)
set
(
onnxruntime_URL2
"https://huggingface.co/csukuangfj/sherpa-onnx-cmake-deps/resolve/main/onnxruntime-linux-x64-static_lib-1.15.1.tgz"
)
set
(
onnxruntime_HASH
"SHA256=b64fcf4115e3d02193c7406461d582703ccc1f0c24ad320ef74b07e5f71681c6"
)
# If you don't have access to the Internet,
# please download onnxruntime to one of the following locations.
# You can add more if you want.
set
(
possible_file_locations
${
PROJECT_SOURCE_DIR
}
/onnxruntime-linux-x64-static_lib-1.15.1.tgz
$ENV{HOME}/Downloads/onnxruntime-linux-x64-static_lib-1.15.1.tgz
${
PROJECT_SOURCE_DIR
}
/onnxruntime-linux-x64-static_lib-1.15.1.tgz
${
PROJECT_BINARY_DIR
}
/onnxruntime-linux-x64-static_lib-1.15.1.tgz
/tmp/onnxruntime-linux-x64-static_lib-1.15.1.tgz
/star-fj/fangjun/download/github/onnxruntime-linux-x64-static_lib-1.15.1.tgz
)
foreach
(
f IN LISTS possible_file_locations
)
if
(
EXISTS
${
f
}
)
set
(
onnxruntime_URL
"
${
f
}
"
)
file
(
TO_CMAKE_PATH
"
${
onnxruntime_URL
}
"
onnxruntime_URL
)
message
(
STATUS
"Found local downloaded onnxruntime:
${
onnxruntime_URL
}
"
)
set
(
onnxruntime_URL2
)
break
()
endif
()
endforeach
()
FetchContent_Declare
(
onnxruntime
URL
${
onnxruntime_URL
}
${
onnxruntime_URL2
}
URL_HASH
${
onnxruntime_HASH
}
)
FetchContent_GetProperties
(
onnxruntime
)
if
(
NOT onnxruntime_POPULATED
)
message
(
STATUS
"Downloading onnxruntime from
${
onnxruntime_URL
}
"
)
FetchContent_Populate
(
onnxruntime
)
endif
()
message
(
STATUS
"onnxruntime is downloaded to
${
onnxruntime_SOURCE_DIR
}
"
)
# for static libraries, we use onnxruntime_lib_files directly below
include_directories
(
${
onnxruntime_SOURCE_DIR
}
/include
)
file
(
GLOB onnxruntime_lib_files
"
${
onnxruntime_SOURCE_DIR
}
/lib/lib*.a"
)
set
(
onnxruntime_lib_files
${
onnxruntime_lib_files
}
PARENT_SCOPE
)
message
(
STATUS
"onnxruntime lib files:
${
onnxruntime_lib_files
}
"
)
install
(
FILES
${
onnxruntime_lib_files
}
DESTINATION lib
)
...
...
cmake/onnxruntime-linux-x86_64.cmake
查看文件 @
532ed14
...
...
@@ -10,6 +10,10 @@ if(NOT CMAKE_SYSTEM_PROCESSOR STREQUAL x86_64)
message
(
FATAL_ERROR
"This file is for x86_64 only. Given:
${
CMAKE_SYSTEM_PROCESSOR
}
"
)
endif
()
if
(
NOT BUILD_SHARED_LIBS
)
message
(
FATAL_ERROR
"This file is for building shared libraries. BUILD_SHARED_LIBS:
${
BUILD_SHARED_LIBS
}
"
)
endif
()
set
(
onnxruntime_URL
"https://github.com/microsoft/onnxruntime/releases/download/v1.15.1/onnxruntime-linux-x64-1.15.1.tgz"
)
set
(
onnxruntime_URL2
"https://huggingface.co/csukuangfj/sherpa-onnx-cmake-deps/resolve/main/onnxruntime-linux-x64-1.15.1.tgz"
)
set
(
onnxruntime_HASH
"SHA256=5492f9065f87538a286fb04c8542e9ff7950abb2ea6f8c24993a940006787d87"
)
...
...
cmake/onnxruntime.cmake
查看文件 @
532ed14
...
...
@@ -12,8 +12,10 @@ function(download_onnxruntime)
elseif
(
CMAKE_SYSTEM_NAME STREQUAL Linux AND CMAKE_SYSTEM_PROCESSOR STREQUAL x86_64
)
if
(
SHERPA_ONNX_ENABLE_GPU
)
include
(
onnxruntime-linux-x86_64-gpu
)
else
(
)
else
if
(
BUILD_SHARED_LIBS
)
include
(
onnxruntime-linux-x86_64
)
else
()
include
(
onnxruntime-linux-x86_64-static
)
endif
()
elseif
(
CMAKE_SYSTEM_NAME STREQUAL Darwin
)
if
(
arm64 IN_LIST CMAKE_OSX_ARCHITECTURES AND x86_64 IN_LIST CMAKE_OSX_ARCHITECTURES
)
...
...
sherpa-onnx/csrc/CMakeLists.txt
查看文件 @
532ed14
...
...
@@ -97,7 +97,7 @@ endif()
target_link_libraries
(
sherpa-onnx-core kaldi-native-fbank-core
)
if
(
BUILD_SHARED_LIBS OR
NOT WIN32
)
if
(
BUILD_SHARED_LIBS OR
APPLE OR CMAKE_SYSTEM_PROCESSOR STREQUAL aarch64 OR CMAKE_SYSTEM_PROCESSOR STREQUAL arm
)
target_link_libraries
(
sherpa-onnx-core onnxruntime
)
else
()
target_link_libraries
(
sherpa-onnx-core
${
onnxruntime_lib_files
}
)
...
...
@@ -122,10 +122,15 @@ if(SHERPA_ONNX_ENABLE_CHECK)
endif
()
endif
()
if
(
NOT BUILD_SHARED_LIBS AND LINUX AND NOT APPLE
)
target_link_libraries
(
sherpa-onnx-core -pthread -ldl
)
endif
()
add_executable
(
sherpa-onnx sherpa-onnx.cc
)
add_executable
(
sherpa-onnx-offline sherpa-onnx-offline.cc
)
add_executable
(
sherpa-onnx-offline-parallel sherpa-onnx-offline-parallel.cc
)
target_link_libraries
(
sherpa-onnx sherpa-onnx-core
)
target_link_libraries
(
sherpa-onnx-offline sherpa-onnx-core
)
target_link_libraries
(
sherpa-onnx-offline-parallel sherpa-onnx-core
)
...
...
请
注册
或
登录
后发表评论