Fangjun Kuang
Committed by GitHub

Support linking onnxruntime lib statically on Linux (#326)

@@ -24,7 +24,7 @@ jobs: @@ -24,7 +24,7 @@ jobs:
24 - name: Install dependencies 24 - name: Install dependencies
25 shell: bash 25 shell: bash
26 run: | 26 run: |
27 - python3 -m pip install openai-whisper torch onnxruntime onnx 27 + python3 -m pip install openai-whisper torch onnxruntime==1.15.1 onnx
28 28
29 - name: export ${{ matrix.model }} 29 - name: export ${{ matrix.model }}
30 shell: bash 30 shell: bash
@@ -39,12 +39,14 @@ concurrency: @@ -39,12 +39,14 @@ concurrency:
39 39
40 jobs: 40 jobs:
41 linux: 41 linux:
  42 + name: ${{ matrix.build_type }} ${{ matrix.shared_lib }}
42 runs-on: ${{ matrix.os }} 43 runs-on: ${{ matrix.os }}
43 strategy: 44 strategy:
44 fail-fast: false 45 fail-fast: false
45 matrix: 46 matrix:
46 os: [ubuntu-latest] 47 os: [ubuntu-latest]
47 build_type: [Release, Debug] 48 build_type: [Release, Debug]
  49 + shared_lib: [ON, OFF]
48 50
49 steps: 51 steps:
50 - uses: actions/checkout@v2 52 - uses: actions/checkout@v2
@@ -56,7 +58,7 @@ jobs: @@ -56,7 +58,7 @@ jobs:
56 run: | 58 run: |
57 mkdir build 59 mkdir build
58 cd build 60 cd build
59 - cmake -D CMAKE_BUILD_TYPE=${{ matrix.build_type }} -DCMAKE_INSTALL_PREFIX=./install .. 61 + cmake -D CMAKE_BUILD_TYPE=${{ matrix.build_type }} -D BUILD_SHARED_LIBS=${{ matrix.shared_lib }} -DCMAKE_INSTALL_PREFIX=./install ..
60 62
61 - name: Build sherpa-onnx for ubuntu 63 - name: Build sherpa-onnx for ubuntu
62 shell: bash 64 shell: bash
@@ -88,6 +90,8 @@ jobs: @@ -88,6 +90,8 @@ jobs:
88 export PATH=$PWD/build/bin:$PATH 90 export PATH=$PWD/build/bin:$PATH
89 export EXE=sherpa-onnx-offline 91 export EXE=sherpa-onnx-offline
90 92
  93 + readelf -d build/bin/sherpa-onnx-offline
  94 +
91 .github/scripts/test-offline-whisper.sh 95 .github/scripts/test-offline-whisper.sh
92 96
93 - name: Test offline CTC 97 - name: Test offline CTC
@@ -20,6 +20,7 @@ option(SHERPA_ONNX_ENABLE_JNI "Whether to build JNI internface" OFF) @@ -20,6 +20,7 @@ option(SHERPA_ONNX_ENABLE_JNI "Whether to build JNI internface" OFF)
20 option(SHERPA_ONNX_ENABLE_C_API "Whether to build C API" ON) 20 option(SHERPA_ONNX_ENABLE_C_API "Whether to build C API" ON)
21 option(SHERPA_ONNX_ENABLE_WEBSOCKET "Whether to build webscoket server/client" ON) 21 option(SHERPA_ONNX_ENABLE_WEBSOCKET "Whether to build webscoket server/client" ON)
22 option(SHERPA_ONNX_ENABLE_GPU "Enable ONNX Runtime GPU support" OFF) 22 option(SHERPA_ONNX_ENABLE_GPU "Enable ONNX Runtime GPU support" OFF)
  23 +option(SHERPA_ONNX_LINK_LIBSTDCPP_STATICALLY "True to link libstdc++ statically. Used only when BUILD_SHARED_LIBS is ON on Linux" ON)
23 24
24 set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib") 25 set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib")
25 set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib") 26 set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib")
@@ -65,6 +66,10 @@ is installed on your system. Otherwise, you will get errors at runtime. @@ -65,6 +66,10 @@ is installed on your system. Otherwise, you will get errors at runtime.
65 Hint: You don't need sudo permission to install CUDA toolkit. Please refer to 66 Hint: You don't need sudo permission to install CUDA toolkit. Please refer to
66 https://k2-fsa.github.io/k2/installation/cuda-cudnn.html 67 https://k2-fsa.github.io/k2/installation/cuda-cudnn.html
67 to install CUDA toolkit if you have not installed it.") 68 to install CUDA toolkit if you have not installed it.")
  69 + if(NOT BUILD_SHARED_LIBS)
  70 + message(STATUS "Set BUILD_SHARED_LIBS to ON since SHERPA_ONNX_ENABLE_GPU is ON")
  71 + set(BUILD_SHARED_LIBS ON CACHE BOOL "" FORCE)
  72 + endif()
68 endif() 73 endif()
69 74
70 if(BUILD_SHARED_LIBS AND MSVC) 75 if(BUILD_SHARED_LIBS AND MSVC)
@@ -131,12 +136,22 @@ if(WIN32 AND MSVC) @@ -131,12 +136,22 @@ if(WIN32 AND MSVC)
131 foreach(w IN LISTS disabled_warnings) 136 foreach(w IN LISTS disabled_warnings)
132 string(APPEND CMAKE_CXX_FLAGS " ${w} ") 137 string(APPEND CMAKE_CXX_FLAGS " ${w} ")
133 endforeach() 138 endforeach()
134 -endif()  
135 139
  140 + add_compile_options("$<$<C_COMPILER_ID:MSVC>:/utf-8>")
  141 + add_compile_options("$<$<CXX_COMPILER_ID:MSVC>:/utf-8>")
  142 +endif()
136 143
137 list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake/Modules) 144 list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake/Modules)
138 list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake) 145 list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake)
139 146
  147 +if(NOT BUILD_SHARED_LIBS AND LINUX AND NOT APPLE)
  148 + if(SHERPA_ONNX_LINK_LIBSTDCPP_STATICALLY)
  149 + message(STATUS "Link libstdc++ statically")
  150 + set(CMAKE_CXX_FLAGS " ${CMAKE_CXX_FLAGS} -static-libstdc++ -static-libgcc ")
  151 + else()
  152 + message(STATUS "Link libstdc++ dynamically")
  153 + endif()
  154 +endif()
140 155
141 include(kaldi-native-fbank) 156 include(kaldi-native-fbank)
142 include(onnxruntime) 157 include(onnxruntime)
@@ -186,3 +201,4 @@ install( @@ -186,3 +201,4 @@ install(
186 DESTINATION 201 DESTINATION
187 . 202 .
188 ) 203 )
  204 +message(STATUS "CMAKE_CXX_FLAGS: ${CMAKE_CXX_FLAGS}")
@@ -96,6 +96,7 @@ int32_t main(int32_t argc, char *argv[]) { @@ -96,6 +96,7 @@ int32_t main(int32_t argc, char *argv[]) {
96 } 96 }
97 97
98 SherpaOnnxOnlineRecognizerConfig config; 98 SherpaOnnxOnlineRecognizerConfig config;
  99 + memset(&config, 0, sizeof(config));
99 100
100 config.model_config.debug = 0; 101 config.model_config.debug = 0;
101 config.model_config.num_threads = 1; 102 config.model_config.num_threads = 1;
@@ -195,7 +196,7 @@ int32_t main(int32_t argc, char *argv[]) { @@ -195,7 +196,7 @@ int32_t main(int32_t argc, char *argv[]) {
195 DecodeOnlineStream(recognizer, stream); 196 DecodeOnlineStream(recognizer, stream);
196 } 197 }
197 198
198 - SherpaOnnxOnlineRecognizerResult *r = 199 + const SherpaOnnxOnlineRecognizerResult *r =
199 GetOnlineStreamResult(recognizer, stream); 200 GetOnlineStreamResult(recognizer, stream);
200 201
201 if (strlen(r->text)) { 202 if (strlen(r->text)) {
@@ -223,7 +224,7 @@ int32_t main(int32_t argc, char *argv[]) { @@ -223,7 +224,7 @@ int32_t main(int32_t argc, char *argv[]) {
223 DecodeOnlineStream(recognizer, stream); 224 DecodeOnlineStream(recognizer, stream);
224 } 225 }
225 226
226 - SherpaOnnxOnlineRecognizerResult *r = 227 + const SherpaOnnxOnlineRecognizerResult *r =
227 GetOnlineStreamResult(recognizer, stream); 228 GetOnlineStreamResult(recognizer, stream);
228 229
229 if (strlen(r->text)) { 230 if (strlen(r->text)) {
  1 +# Copyright (c) 2022-2023 Xiaomi Corporation
  2 +message(STATUS "CMAKE_SYSTEM_NAME: ${CMAKE_SYSTEM_NAME}")
  3 +message(STATUS "CMAKE_SYSTEM_PROCESSOR: ${CMAKE_SYSTEM_PROCESSOR}")
  4 +
  5 +if(NOT CMAKE_SYSTEM_NAME STREQUAL Linux)
  6 + message(FATAL_ERROR "This file is for Linux only. Given: ${CMAKE_SYSTEM_NAME}")
  7 +endif()
  8 +
  9 +if(NOT CMAKE_SYSTEM_PROCESSOR STREQUAL x86_64)
  10 + message(FATAL_ERROR "This file is for x86_64 only. Given: ${CMAKE_SYSTEM_PROCESSOR}")
  11 +endif()
  12 +
  13 +if(BUILD_SHARED_LIBS)
  14 + message(FATAL_ERROR "This file is for building static libraries. BUILD_SHARED_LIBS: ${BUILD_SHARED_LIBS}")
  15 +endif()
  16 +
  17 +# TODO(fangjun): update the URL
  18 +set(onnxruntime_URL "https://huggingface.co/csukuangfj/sherpa-onnx-cmake-deps/resolve/main/onnxruntime-linux-x64-static_lib-1.15.1.tgz")
  19 +set(onnxruntime_URL2 "https://huggingface.co/csukuangfj/sherpa-onnx-cmake-deps/resolve/main/onnxruntime-linux-x64-static_lib-1.15.1.tgz")
  20 +set(onnxruntime_HASH "SHA256=b64fcf4115e3d02193c7406461d582703ccc1f0c24ad320ef74b07e5f71681c6")
  21 +
  22 +# If you don't have access to the Internet,
  23 +# please download onnxruntime to one of the following locations.
  24 +# You can add more if you want.
  25 +set(possible_file_locations
  26 + ${PROJECT_SOURCE_DIR}/onnxruntime-linux-x64-static_lib-1.15.1.tgz
  27 +
  28 + $ENV{HOME}/Downloads/onnxruntime-linux-x64-static_lib-1.15.1.tgz
  29 + ${PROJECT_SOURCE_DIR}/onnxruntime-linux-x64-static_lib-1.15.1.tgz
  30 + ${PROJECT_BINARY_DIR}/onnxruntime-linux-x64-static_lib-1.15.1.tgz
  31 + /tmp/onnxruntime-linux-x64-static_lib-1.15.1.tgz
  32 + /star-fj/fangjun/download/github/onnxruntime-linux-x64-static_lib-1.15.1.tgz
  33 +)
  34 +
  35 +foreach(f IN LISTS possible_file_locations)
  36 + if(EXISTS ${f})
  37 + set(onnxruntime_URL "${f}")
  38 + file(TO_CMAKE_PATH "${onnxruntime_URL}" onnxruntime_URL)
  39 + message(STATUS "Found local downloaded onnxruntime: ${onnxruntime_URL}")
  40 + set(onnxruntime_URL2)
  41 + break()
  42 + endif()
  43 +endforeach()
  44 +
  45 +FetchContent_Declare(onnxruntime
  46 + URL
  47 + ${onnxruntime_URL}
  48 + ${onnxruntime_URL2}
  49 + URL_HASH ${onnxruntime_HASH}
  50 +)
  51 +
  52 +FetchContent_GetProperties(onnxruntime)
  53 +if(NOT onnxruntime_POPULATED)
  54 + message(STATUS "Downloading onnxruntime from ${onnxruntime_URL}")
  55 + FetchContent_Populate(onnxruntime)
  56 +endif()
  57 +message(STATUS "onnxruntime is downloaded to ${onnxruntime_SOURCE_DIR}")
  58 +
  59 +# for static libraries, we use onnxruntime_lib_files directly below
  60 +include_directories(${onnxruntime_SOURCE_DIR}/include)
  61 +
  62 +file(GLOB onnxruntime_lib_files "${onnxruntime_SOURCE_DIR}/lib/lib*.a")
  63 +
  64 +set(onnxruntime_lib_files ${onnxruntime_lib_files} PARENT_SCOPE)
  65 +
  66 +message(STATUS "onnxruntime lib files: ${onnxruntime_lib_files}")
  67 +install(FILES ${onnxruntime_lib_files} DESTINATION lib)
@@ -10,6 +10,10 @@ if(NOT CMAKE_SYSTEM_PROCESSOR STREQUAL x86_64) @@ -10,6 +10,10 @@ if(NOT CMAKE_SYSTEM_PROCESSOR STREQUAL x86_64)
10 message(FATAL_ERROR "This file is for x86_64 only. Given: ${CMAKE_SYSTEM_PROCESSOR}") 10 message(FATAL_ERROR "This file is for x86_64 only. Given: ${CMAKE_SYSTEM_PROCESSOR}")
11 endif() 11 endif()
12 12
  13 +if(NOT BUILD_SHARED_LIBS)
  14 + message(FATAL_ERROR "This file is for building shared libraries. BUILD_SHARED_LIBS: ${BUILD_SHARED_LIBS}")
  15 +endif()
  16 +
13 set(onnxruntime_URL "https://github.com/microsoft/onnxruntime/releases/download/v1.15.1/onnxruntime-linux-x64-1.15.1.tgz") 17 set(onnxruntime_URL "https://github.com/microsoft/onnxruntime/releases/download/v1.15.1/onnxruntime-linux-x64-1.15.1.tgz")
14 set(onnxruntime_URL2 "https://huggingface.co/csukuangfj/sherpa-onnx-cmake-deps/resolve/main/onnxruntime-linux-x64-1.15.1.tgz") 18 set(onnxruntime_URL2 "https://huggingface.co/csukuangfj/sherpa-onnx-cmake-deps/resolve/main/onnxruntime-linux-x64-1.15.1.tgz")
15 set(onnxruntime_HASH "SHA256=5492f9065f87538a286fb04c8542e9ff7950abb2ea6f8c24993a940006787d87") 19 set(onnxruntime_HASH "SHA256=5492f9065f87538a286fb04c8542e9ff7950abb2ea6f8c24993a940006787d87")
@@ -12,8 +12,10 @@ function(download_onnxruntime) @@ -12,8 +12,10 @@ function(download_onnxruntime)
12 elseif(CMAKE_SYSTEM_NAME STREQUAL Linux AND CMAKE_SYSTEM_PROCESSOR STREQUAL x86_64) 12 elseif(CMAKE_SYSTEM_NAME STREQUAL Linux AND CMAKE_SYSTEM_PROCESSOR STREQUAL x86_64)
13 if(SHERPA_ONNX_ENABLE_GPU) 13 if(SHERPA_ONNX_ENABLE_GPU)
14 include(onnxruntime-linux-x86_64-gpu) 14 include(onnxruntime-linux-x86_64-gpu)
15 - else() 15 + elseif(BUILD_SHARED_LIBS)
16 include(onnxruntime-linux-x86_64) 16 include(onnxruntime-linux-x86_64)
  17 + else()
  18 + include(onnxruntime-linux-x86_64-static)
17 endif() 19 endif()
18 elseif(CMAKE_SYSTEM_NAME STREQUAL Darwin) 20 elseif(CMAKE_SYSTEM_NAME STREQUAL Darwin)
19 if (arm64 IN_LIST CMAKE_OSX_ARCHITECTURES AND x86_64 IN_LIST CMAKE_OSX_ARCHITECTURES) 21 if (arm64 IN_LIST CMAKE_OSX_ARCHITECTURES AND x86_64 IN_LIST CMAKE_OSX_ARCHITECTURES)
@@ -97,7 +97,7 @@ endif() @@ -97,7 +97,7 @@ endif()
97 97
98 target_link_libraries(sherpa-onnx-core kaldi-native-fbank-core) 98 target_link_libraries(sherpa-onnx-core kaldi-native-fbank-core)
99 99
100 -if(BUILD_SHARED_LIBS OR NOT WIN32) 100 +if(BUILD_SHARED_LIBS OR APPLE OR CMAKE_SYSTEM_PROCESSOR STREQUAL aarch64 OR CMAKE_SYSTEM_PROCESSOR STREQUAL arm)
101 target_link_libraries(sherpa-onnx-core onnxruntime) 101 target_link_libraries(sherpa-onnx-core onnxruntime)
102 else() 102 else()
103 target_link_libraries(sherpa-onnx-core ${onnxruntime_lib_files}) 103 target_link_libraries(sherpa-onnx-core ${onnxruntime_lib_files})
@@ -122,10 +122,15 @@ if(SHERPA_ONNX_ENABLE_CHECK) @@ -122,10 +122,15 @@ if(SHERPA_ONNX_ENABLE_CHECK)
122 endif() 122 endif()
123 endif() 123 endif()
124 124
  125 +if(NOT BUILD_SHARED_LIBS AND LINUX AND NOT APPLE)
  126 + target_link_libraries(sherpa-onnx-core -pthread -ldl)
  127 +endif()
  128 +
125 add_executable(sherpa-onnx sherpa-onnx.cc) 129 add_executable(sherpa-onnx sherpa-onnx.cc)
126 add_executable(sherpa-onnx-offline sherpa-onnx-offline.cc) 130 add_executable(sherpa-onnx-offline sherpa-onnx-offline.cc)
127 add_executable(sherpa-onnx-offline-parallel sherpa-onnx-offline-parallel.cc) 131 add_executable(sherpa-onnx-offline-parallel sherpa-onnx-offline-parallel.cc)
128 132
  133 +
129 target_link_libraries(sherpa-onnx sherpa-onnx-core) 134 target_link_libraries(sherpa-onnx sherpa-onnx-core)
130 target_link_libraries(sherpa-onnx-offline sherpa-onnx-core) 135 target_link_libraries(sherpa-onnx-offline sherpa-onnx-core)
131 target_link_libraries(sherpa-onnx-offline-parallel sherpa-onnx-core) 136 target_link_libraries(sherpa-onnx-offline-parallel sherpa-onnx-core)