Fangjun Kuang
Committed by GitHub

Provide pre-compiled shepra-onnx libs/binaries for CUDA 12.x + onnxruntime 1.22.0 (#2599)

@@ -28,13 +28,14 @@ concurrency: @@ -28,13 +28,14 @@ concurrency:
28 jobs: 28 jobs:
29 linux_gpu: 29 linux_gpu:
30 runs-on: ${{ matrix.os }} 30 runs-on: ${{ matrix.os }}
31 - name: ${{ matrix.build_type }} 31 + name: ${{ matrix.build_type }} ${{ matrix.onnxruntime_version }}
32 strategy: 32 strategy:
33 fail-fast: false 33 fail-fast: false
34 matrix: 34 matrix:
35 os: [ubuntu-latest] 35 os: [ubuntu-latest]
36 # build_type: [Release, Debug] 36 # build_type: [Release, Debug]
37 build_type: [Release] 37 build_type: [Release]
  38 + onnxruntime_version: ["1.17.1", "1.22.0"]
38 39
39 steps: 40 steps:
40 - uses: actions/checkout@v4 41 - uses: actions/checkout@v4
@@ -64,6 +65,17 @@ jobs: @@ -64,6 +65,17 @@ jobs:
64 65
65 cd /home/runner/work/sherpa-onnx/sherpa-onnx 66 cd /home/runner/work/sherpa-onnx/sherpa-onnx
66 67
  68 + onnxruntime_version=${{ matrix.onnxruntime_version }}
  69 + if [[ $onnxruntime_version == "1.22.0" ]]; then
  70 + curl -SL -O https://github.com/csukuangfj/onnxruntime-libs/releases/download/v1.22.0/onnxruntime-linux-x64-gpu-1.22.0-patched.zip
  71 + unzip onnxruntime-linux-x64-gpu-1.22.0-patched.zip
  72 +
  73 + export SHERPA_ONNXRUNTIME_LIB_DIR=$PWD/onnxruntime-linux-x64-gpu-1.22.0-patched/lib
  74 + export SHERPA_ONNXRUNTIME_INCLUDE_DIR=$PWD/onnxruntime-linux-x64-gpu-1.22.0-patched/include
  75 +
  76 + ls -lh /home/runner/work/sherpa-onnx/sherpa-onnx/onnxruntime-linux-x64-gpu-1.22.0-patched/lib/libonnxruntime.so
  77 + fi
  78 +
67 git clone --depth 1 --branch v1.2.12 https://github.com/alsa-project/alsa-lib 79 git clone --depth 1 --branch v1.2.12 https://github.com/alsa-project/alsa-lib
68 pushd alsa-lib 80 pushd alsa-lib
69 ./gitcompile 81 ./gitcompile
@@ -88,6 +100,11 @@ jobs: @@ -88,6 +100,11 @@ jobs:
88 ls -lh lib 100 ls -lh lib
89 ls -lh bin 101 ls -lh bin
90 102
  103 + if [[ $onnxruntime_version == "1.22.0" ]]; then
  104 + cp -v $SHERPA_ONNXRUNTIME_LIB_DIR/libonnxruntime* ./lib/
  105 + cp -v $SHERPA_ONNXRUNTIME_LIB_DIR/libonnxruntime* install/lib/
  106 + fi
  107 +
91 echo "----" 108 echo "----"
92 ls -lh install/lib 109 ls -lh install/lib
93 110
@@ -100,7 +117,7 @@ jobs: @@ -100,7 +117,7 @@ jobs:
100 du -h -d1 . 117 du -h -d1 .
101 sudo chown -R $USER ./build 118 sudo chown -R $USER ./build
102 ls -lh build/bin 119 ls -lh build/bin
103 - ls -lh build/_deps/onnxruntime-src/lib/ 120 + ls -lh build/_deps/onnxruntime-src/lib/ || true
104 121
105 echo "strip" 122 echo "strip"
106 strip build/bin/* 123 strip build/bin/*
@@ -120,12 +137,17 @@ jobs: @@ -120,12 +137,17 @@ jobs:
120 strings build/install/lib/*.so | grep "^GLIBC_" 137 strings build/install/lib/*.so | grep "^GLIBC_"
121 138
122 - name: Copy files 139 - name: Copy files
123 - if: github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')  
124 shell: bash 140 shell: bash
125 run: | 141 run: |
126 SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2) 142 SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
127 143
128 dst=sherpa-onnx-${SHERPA_ONNX_VERSION}-linux-x64-gpu 144 dst=sherpa-onnx-${SHERPA_ONNX_VERSION}-linux-x64-gpu
  145 +
  146 + onnxruntime_version=${{ matrix.onnxruntime_version }}
  147 + if [[ $onnxruntime_version == "1.22.0" ]]; then
  148 + dst=sherpa-onnx-${SHERPA_ONNX_VERSION}-cuda-12.x-cudnn-9.x-linux-x64-gpu
  149 + fi
  150 +
129 mkdir $dst 151 mkdir $dst
130 152
131 cp -a build/install/bin $dst/ 153 cp -a build/install/bin $dst/
@@ -137,15 +159,23 @@ jobs: @@ -137,15 +159,23 @@ jobs:
137 tar cjvf ${dst}.tar.bz2 $dst 159 tar cjvf ${dst}.tar.bz2 $dst
138 160
139 - name: Release pre-compiled binaries and libs for linux x64 161 - name: Release pre-compiled binaries and libs for linux x64
140 - if: (github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa') && github.event_name == 'push' && contains(github.ref, 'refs/tags/') 162 + if: github.repository_owner == 'csukuangfj' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
  163 + uses: svenstaro/upload-release-action@v2
  164 + with:
  165 + file_glob: true
  166 + overwrite: true
  167 + file: sherpa-onnx-*gpu.tar.bz2
  168 + repo_name: k2-fsa/sherpa-onnx
  169 + repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
  170 + tag: v1.12.13
  171 +
  172 + - name: Release pre-compiled binaries and libs for linux x64
  173 + if: github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
141 uses: svenstaro/upload-release-action@v2 174 uses: svenstaro/upload-release-action@v2
142 with: 175 with:
143 file_glob: true 176 file_glob: true
144 overwrite: true 177 overwrite: true
145 - file: sherpa-onnx-*linux-x64-gpu.tar.bz2  
146 - # repo_name: k2-fsa/sherpa-onnx  
147 - # repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}  
148 - # tag: v1.11.3 178 + file: sherpa-onnx-*gpu.tar.bz2
149 179
150 - name: Display dependencies of sherpa-onnx for linux 180 - name: Display dependencies of sherpa-onnx for linux
151 shell: bash 181 shell: bash
@@ -25,12 +25,13 @@ concurrency: @@ -25,12 +25,13 @@ concurrency:
25 25
26 jobs: 26 jobs:
27 windows_x64_cuda: 27 windows_x64_cuda:
28 - name: Windows x64 CUDA 28 + name: Windows x64 CUDA ${{ matrix.onnxruntime_version }}
29 runs-on: ${{ matrix.os }} 29 runs-on: ${{ matrix.os }}
30 strategy: 30 strategy:
31 fail-fast: false 31 fail-fast: false
32 matrix: 32 matrix:
33 os: [windows-latest] 33 os: [windows-latest]
  34 + onnxruntime_version: ["1.17.1", "1.22.0"]
34 35
35 steps: 36 steps:
36 - uses: actions/checkout@v4 37 - uses: actions/checkout@v4
@@ -46,6 +47,15 @@ jobs: @@ -46,6 +47,15 @@ jobs:
46 - name: Configure CMake 47 - name: Configure CMake
47 shell: bash 48 shell: bash
48 run: | 49 run: |
  50 + onnxruntime_version=${{ matrix.onnxruntime_version }}
  51 + if [[ $onnxruntime_version == "1.22.0" ]]; then
  52 + curl -SL -O https://github.com/microsoft/onnxruntime/releases/download/v1.22.0/onnxruntime-win-x64-gpu-1.22.0.zip
  53 + unzip onnxruntime-win-x64-gpu-1.22.0.zip
  54 +
  55 + export SHERPA_ONNXRUNTIME_LIB_DIR=$PWD/onnxruntime-win-x64-gpu-1.22.0/lib
  56 + export SHERPA_ONNXRUNTIME_INCLUDE_DIR=$PWD/onnxruntime-win-x64-gpu-1.22.0/include
  57 + fi
  58 +
49 mkdir build 59 mkdir build
50 cd build 60 cd build
51 cmake \ 61 cmake \
@@ -65,6 +75,51 @@ jobs: @@ -65,6 +75,51 @@ jobs:
65 75
66 ls -lh ./bin/Release/sherpa-onnx.exe 76 ls -lh ./bin/Release/sherpa-onnx.exe
67 77
  78 + onnxruntime_version=${{ matrix.onnxruntime_version }}
  79 + if [[ $onnxruntime_version == "1.22.0" ]]; then
  80 + cp -v ../onnxruntime-win-x64-gpu-1.22.0/lib/*.dll ./bin/Release/
  81 + cp -v ../onnxruntime-win-x64-gpu-1.22.0/lib/*.dll ./install/bin/
  82 + fi
  83 +
  84 + - name: Copy files
  85 + shell: bash
  86 + run: |
  87 + SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
  88 +
  89 + dst=sherpa-onnx-${SHERPA_ONNX_VERSION}-win-x64-cuda
  90 +
  91 + onnxruntime_version=${{ matrix.onnxruntime_version }}
  92 + if [[ $onnxruntime_version == "1.22.0" ]]; then
  93 + dst=sherpa-onnx-${SHERPA_ONNX_VERSION}-cuda-12.x-cudnn-9.x-win-x64-cuda
  94 + fi
  95 +
  96 + mkdir $dst
  97 +
  98 + cp -a build/install/bin $dst/
  99 + cp -a build/install/lib $dst/
  100 + cp -a build/install/include $dst/
  101 +
  102 + tar cjvf ${dst}.tar.bz2 $dst
  103 +
  104 + - name: Release pre-compiled binaries and libs for linux x64
  105 + if: github.repository_owner == 'csukuangfj' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
  106 + uses: svenstaro/upload-release-action@v2
  107 + with:
  108 + file_glob: true
  109 + overwrite: true
  110 + file: sherpa-onnx-*cuda.tar.bz2
  111 + repo_name: k2-fsa/sherpa-onnx
  112 + repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
  113 + tag: v1.12.13
  114 +
  115 + - name: Release pre-compiled binaries and libs for linux x64
  116 + if: github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
  117 + uses: svenstaro/upload-release-action@v2
  118 + with:
  119 + file_glob: true
  120 + overwrite: true
  121 + file: sherpa-onnx-*cuda.tar.bz2
  122 +
68 - name: Test spoken language identification 123 - name: Test spoken language identification
69 shell: bash 124 shell: bash
70 run: | 125 run: |
@@ -137,24 +192,4 @@ jobs: @@ -137,24 +192,4 @@ jobs:
137 192
138 .github/scripts/test-online-transducer.sh 193 .github/scripts/test-online-transducer.sh
139 194
140 - - name: Copy files  
141 - shell: bash  
142 - run: |  
143 - SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)  
144 195
145 - dst=sherpa-onnx-${SHERPA_ONNX_VERSION}-win-x64-cuda  
146 - mkdir $dst  
147 -  
148 - cp -a build/install/bin $dst/  
149 - cp -a build/install/lib $dst/  
150 - cp -a build/install/include $dst/  
151 -  
152 - tar cjvf ${dst}.tar.bz2 $dst  
153 -  
154 - - name: Release pre-compiled binaries and libs for Windows x64 CUDA  
155 - if: (github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa') && github.event_name == 'push' && contains(github.ref, 'refs/tags/')  
156 - uses: svenstaro/upload-release-action@v2  
157 - with:  
158 - file_glob: true  
159 - overwrite: true  
160 - file: sherpa-onnx-*-win-x64-cuda.tar.bz2  
@@ -153,14 +153,21 @@ if(SHERPA_ONNX_USE_PRE_INSTALLED_ONNXRUNTIME_IF_AVAILABLE) @@ -153,14 +153,21 @@ if(SHERPA_ONNX_USE_PRE_INSTALLED_ONNXRUNTIME_IF_AVAILABLE)
153 if(APPLE) 153 if(APPLE)
154 set(location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.dylib) 154 set(location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.dylib)
155 elseif(WIN32) 155 elseif(WIN32)
  156 + if(SHERPA_ONNX_ENABLE_GPU)
  157 + set(location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/onnxruntime.dll)
  158 + else()
156 set(location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/onnxruntime.lib) 159 set(location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/onnxruntime.lib)
157 if(SHERPA_ONNX_ENABLE_DIRECTML) 160 if(SHERPA_ONNX_ENABLE_DIRECTML)
158 include(onnxruntime-win-x64-directml) 161 include(onnxruntime-win-x64-directml)
159 endif() 162 endif()
  163 + endif()
160 else() 164 else()
161 set(location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.so) 165 set(location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.so)
162 endif() 166 endif()
  167 +
163 if(NOT EXISTS ${location_onnxruntime_lib}) 168 if(NOT EXISTS ${location_onnxruntime_lib})
  169 + message(STATUS "${location_onnxruntime_lib} does not exist. Try static lib")
  170 +
164 set(location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.a) 171 set(location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.a)
165 if(NOT EXISTS ${location_onnxruntime_lib}) 172 if(NOT EXISTS ${location_onnxruntime_lib})
166 message(FATAL_ERROR "${location_onnxruntime_lib} cannot be found") 173 message(FATAL_ERROR "${location_onnxruntime_lib} cannot be found")
@@ -168,8 +175,14 @@ if(SHERPA_ONNX_USE_PRE_INSTALLED_ONNXRUNTIME_IF_AVAILABLE) @@ -168,8 +175,14 @@ if(SHERPA_ONNX_USE_PRE_INSTALLED_ONNXRUNTIME_IF_AVAILABLE)
168 set(onnxruntime_lib_files $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.a) 175 set(onnxruntime_lib_files $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.a)
169 message("Use static lib: ${onnxruntime_lib_files}") 176 message("Use static lib: ${onnxruntime_lib_files}")
170 endif() 177 endif()
  178 +
171 if(SHERPA_ONNX_ENABLE_GPU) 179 if(SHERPA_ONNX_ENABLE_GPU)
  180 + if(WIN32)
  181 + set(location_onnxruntime_cuda_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/onnxruntime_providers_cuda.dll)
  182 + else()
172 set(location_onnxruntime_cuda_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime_providers_cuda.so) 183 set(location_onnxruntime_cuda_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime_providers_cuda.so)
  184 + endif()
  185 +
173 if(NOT EXISTS ${location_onnxruntime_cuda_lib}) 186 if(NOT EXISTS ${location_onnxruntime_cuda_lib})
174 set(location_onnxruntime_cuda_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime_providers_cuda.a) 187 set(location_onnxruntime_cuda_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime_providers_cuda.a)
175 endif() 188 endif()
@@ -283,12 +283,6 @@ if(DEFINED OHOS AND x${OHOS} STREQUAL xOHOS) @@ -283,12 +283,6 @@ if(DEFINED OHOS AND x${OHOS} STREQUAL xOHOS)
283 ) 283 )
284 endif() 284 endif()
285 285
286 -if(SHERPA_ONNX_ENABLE_GPU)  
287 - target_link_libraries(sherpa-onnx-core  
288 - onnxruntime_providers_shared  
289 - )  
290 -endif()  
291 -  
292 if(SHERPA_ONNX_ENABLE_RKNN) 286 if(SHERPA_ONNX_ENABLE_RKNN)
293 if(DEFINED ENV{SHERPA_ONNX_RKNN_TOOLKIT2_LIB_DIR}) 287 if(DEFINED ENV{SHERPA_ONNX_RKNN_TOOLKIT2_LIB_DIR})
294 target_link_libraries(sherpa-onnx-core -L$ENV{SHERPA_ONNX_RKNN_TOOLKIT2_LIB_DIR} -lrknnrt) 288 target_link_libraries(sherpa-onnx-core -L$ENV{SHERPA_ONNX_RKNN_TOOLKIT2_LIB_DIR} -lrknnrt)