Toggle navigation
Toggle navigation
此项目
正在载入...
Sign in
xuning
/
sherpaonnx
转到一个项目
Toggle navigation
项目
群组
代码片段
帮助
Toggle navigation pinning
Project
Activity
Repository
Pipelines
Graphs
Issues
0
Merge Requests
0
Wiki
Network
Create a new issue
Builds
Commits
Authored by
Fangjun Kuang
2025-09-15 12:05:21 +0800
Browse Files
Options
Browse Files
Download
Email Patches
Plain Diff
Committed by
GitHub
2025-09-15 12:05:21 +0800
Commit
aa66810c5cdcdd1d922e6f705308bf5c96bcd310
aa66810c
1 parent
bff2691e
Provide pre-compiled shepra-onnx libs/binaries for CUDA 12.x + onnxruntime 1.22.0 (#2599)
隐藏空白字符变更
内嵌
并排对比
正在显示
4 个修改的文件
包含
111 行增加
和
39 行删除
.github/workflows/linux-gpu.yaml
.github/workflows/windows-x64-cuda.yaml
cmake/onnxruntime.cmake
sherpa-onnx/csrc/CMakeLists.txt
.github/workflows/linux-gpu.yaml
查看文件 @
aa66810
...
...
@@ -28,13 +28,14 @@ concurrency:
jobs
:
linux_gpu
:
runs-on
:
${{ matrix.os }}
name
:
${{ matrix.build_type }}
name
:
${{ matrix.build_type }}
${{ matrix.onnxruntime_version }}
strategy
:
fail-fast
:
false
matrix
:
os
:
[
ubuntu-latest
]
# build_type: [Release, Debug]
build_type
:
[
Release
]
onnxruntime_version
:
[
"
1.17.1"
,
"
1.22.0"
]
steps
:
-
uses
:
actions/checkout@v4
...
...
@@ -64,6 +65,17 @@ jobs:
cd /home/runner/work/sherpa-onnx/sherpa-onnx
onnxruntime_version=${{ matrix.onnxruntime_version }}
if [[ $onnxruntime_version == "1.22.0" ]]; then
curl -SL -O https://github.com/csukuangfj/onnxruntime-libs/releases/download/v1.22.0/onnxruntime-linux-x64-gpu-1.22.0-patched.zip
unzip onnxruntime-linux-x64-gpu-1.22.0-patched.zip
export SHERPA_ONNXRUNTIME_LIB_DIR=$PWD/onnxruntime-linux-x64-gpu-1.22.0-patched/lib
export SHERPA_ONNXRUNTIME_INCLUDE_DIR=$PWD/onnxruntime-linux-x64-gpu-1.22.0-patched/include
ls -lh /home/runner/work/sherpa-onnx/sherpa-onnx/onnxruntime-linux-x64-gpu-1.22.0-patched/lib/libonnxruntime.so
fi
git clone --depth 1 --branch v1.2.12 https://github.com/alsa-project/alsa-lib
pushd alsa-lib
./gitcompile
...
...
@@ -88,6 +100,11 @@ jobs:
ls -lh lib
ls -lh bin
if [[ $onnxruntime_version == "1.22.0" ]]; then
cp -v $SHERPA_ONNXRUNTIME_LIB_DIR/libonnxruntime* ./lib/
cp -v $SHERPA_ONNXRUNTIME_LIB_DIR/libonnxruntime* install/lib/
fi
echo "----"
ls -lh install/lib
...
...
@@ -100,7 +117,7 @@ jobs:
du -h -d1 .
sudo chown -R $USER ./build
ls -lh build/bin
ls -lh build/_deps/onnxruntime-src/lib/
ls -lh build/_deps/onnxruntime-src/lib/
|| true
echo "strip"
strip build/bin/*
...
...
@@ -120,12 +137,17 @@ jobs:
strings build/install/lib/*.so | grep "^GLIBC_"
-
name
:
Copy files
if
:
github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
shell
:
bash
run
:
|
SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
dst=sherpa-onnx-${SHERPA_ONNX_VERSION}-linux-x64-gpu
onnxruntime_version=${{ matrix.onnxruntime_version }}
if [[ $onnxruntime_version == "1.22.0" ]]; then
dst=sherpa-onnx-${SHERPA_ONNX_VERSION}-cuda-12.x-cudnn-9.x-linux-x64-gpu
fi
mkdir $dst
cp -a build/install/bin $dst/
...
...
@@ -137,15 +159,23 @@ jobs:
tar cjvf ${dst}.tar.bz2 $dst
-
name
:
Release pre-compiled binaries and libs for linux x64
if
:
(github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa') && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
if
:
github.repository_owner == 'csukuangfj' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses
:
svenstaro/upload-release-action@v2
with
:
file_glob
:
true
overwrite
:
true
file
:
sherpa-onnx-*gpu.tar.bz2
repo_name
:
k2-fsa/sherpa-onnx
repo_token
:
${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
tag
:
v1.12.13
-
name
:
Release pre-compiled binaries and libs for linux x64
if
:
github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses
:
svenstaro/upload-release-action@v2
with
:
file_glob
:
true
overwrite
:
true
file
:
sherpa-onnx-*linux-x64-gpu.tar.bz2
# repo_name: k2-fsa/sherpa-onnx
# repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
# tag: v1.11.3
file
:
sherpa-onnx-*gpu.tar.bz2
-
name
:
Display dependencies of sherpa-onnx for linux
shell
:
bash
...
...
.github/workflows/windows-x64-cuda.yaml
查看文件 @
aa66810
...
...
@@ -25,12 +25,13 @@ concurrency:
jobs
:
windows_x64_cuda
:
name
:
Windows x64 CUDA
name
:
Windows x64 CUDA
${{ matrix.onnxruntime_version }}
runs-on
:
${{ matrix.os }}
strategy
:
fail-fast
:
false
matrix
:
os
:
[
windows-latest
]
onnxruntime_version
:
[
"
1.17.1"
,
"
1.22.0"
]
steps
:
-
uses
:
actions/checkout@v4
...
...
@@ -46,6 +47,15 @@ jobs:
-
name
:
Configure CMake
shell
:
bash
run
:
|
onnxruntime_version=${{ matrix.onnxruntime_version }}
if [[ $onnxruntime_version == "1.22.0" ]]; then
curl -SL -O https://github.com/microsoft/onnxruntime/releases/download/v1.22.0/onnxruntime-win-x64-gpu-1.22.0.zip
unzip onnxruntime-win-x64-gpu-1.22.0.zip
export SHERPA_ONNXRUNTIME_LIB_DIR=$PWD/onnxruntime-win-x64-gpu-1.22.0/lib
export SHERPA_ONNXRUNTIME_INCLUDE_DIR=$PWD/onnxruntime-win-x64-gpu-1.22.0/include
fi
mkdir build
cd build
cmake \
...
...
@@ -65,6 +75,51 @@ jobs:
ls -lh ./bin/Release/sherpa-onnx.exe
onnxruntime_version=${{ matrix.onnxruntime_version }}
if [[ $onnxruntime_version == "1.22.0" ]]; then
cp -v ../onnxruntime-win-x64-gpu-1.22.0/lib/*.dll ./bin/Release/
cp -v ../onnxruntime-win-x64-gpu-1.22.0/lib/*.dll ./install/bin/
fi
-
name
:
Copy files
shell
:
bash
run
:
|
SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
dst=sherpa-onnx-${SHERPA_ONNX_VERSION}-win-x64-cuda
onnxruntime_version=${{ matrix.onnxruntime_version }}
if [[ $onnxruntime_version == "1.22.0" ]]; then
dst=sherpa-onnx-${SHERPA_ONNX_VERSION}-cuda-12.x-cudnn-9.x-win-x64-cuda
fi
mkdir $dst
cp -a build/install/bin $dst/
cp -a build/install/lib $dst/
cp -a build/install/include $dst/
tar cjvf ${dst}.tar.bz2 $dst
-
name
:
Release pre-compiled binaries and libs for linux x64
if
:
github.repository_owner == 'csukuangfj' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses
:
svenstaro/upload-release-action@v2
with
:
file_glob
:
true
overwrite
:
true
file
:
sherpa-onnx-*cuda.tar.bz2
repo_name
:
k2-fsa/sherpa-onnx
repo_token
:
${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
tag
:
v1.12.13
-
name
:
Release pre-compiled binaries and libs for linux x64
if
:
github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses
:
svenstaro/upload-release-action@v2
with
:
file_glob
:
true
overwrite
:
true
file
:
sherpa-onnx-*cuda.tar.bz2
-
name
:
Test spoken language identification
shell
:
bash
run
:
|
...
...
@@ -137,24 +192,4 @@ jobs:
.github/scripts/test-online-transducer.sh
-
name
:
Copy files
shell
:
bash
run
:
|
SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
dst=sherpa-onnx-${SHERPA_ONNX_VERSION}-win-x64-cuda
mkdir $dst
cp -a build/install/bin $dst/
cp -a build/install/lib $dst/
cp -a build/install/include $dst/
tar cjvf ${dst}.tar.bz2 $dst
-
name
:
Release pre-compiled binaries and libs for Windows x64 CUDA
if
:
(github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa') && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses
:
svenstaro/upload-release-action@v2
with
:
file_glob
:
true
overwrite
:
true
file
:
sherpa-onnx-*-win-x64-cuda.tar.bz2
...
...
cmake/onnxruntime.cmake
查看文件 @
aa66810
...
...
@@ -153,14 +153,21 @@ if(SHERPA_ONNX_USE_PRE_INSTALLED_ONNXRUNTIME_IF_AVAILABLE)
if
(
APPLE
)
set
(
location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.dylib
)
elseif
(
WIN32
)
set
(
location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/onnxruntime.lib
)
if
(
SHERPA_ONNX_ENABLE_DIRECTML
)
include
(
onnxruntime-win-x64-directml
)
if
(
SHERPA_ONNX_ENABLE_GPU
)
set
(
location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/onnxruntime.dll
)
else
()
set
(
location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/onnxruntime.lib
)
if
(
SHERPA_ONNX_ENABLE_DIRECTML
)
include
(
onnxruntime-win-x64-directml
)
endif
()
endif
()
else
()
set
(
location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.so
)
endif
()
if
(
NOT EXISTS
${
location_onnxruntime_lib
}
)
message
(
STATUS
"
${
location_onnxruntime_lib
}
does not exist. Try static lib"
)
set
(
location_onnxruntime_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.a
)
if
(
NOT EXISTS
${
location_onnxruntime_lib
}
)
message
(
FATAL_ERROR
"
${
location_onnxruntime_lib
}
cannot be found"
)
...
...
@@ -168,8 +175,14 @@ if(SHERPA_ONNX_USE_PRE_INSTALLED_ONNXRUNTIME_IF_AVAILABLE)
set
(
onnxruntime_lib_files $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime.a
)
message
(
"Use static lib:
${
onnxruntime_lib_files
}
"
)
endif
()
if
(
SHERPA_ONNX_ENABLE_GPU
)
set
(
location_onnxruntime_cuda_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime_providers_cuda.so
)
if
(
WIN32
)
set
(
location_onnxruntime_cuda_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/onnxruntime_providers_cuda.dll
)
else
()
set
(
location_onnxruntime_cuda_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime_providers_cuda.so
)
endif
()
if
(
NOT EXISTS
${
location_onnxruntime_cuda_lib
}
)
set
(
location_onnxruntime_cuda_lib $ENV{SHERPA_ONNXRUNTIME_LIB_DIR}/libonnxruntime_providers_cuda.a
)
endif
()
...
...
sherpa-onnx/csrc/CMakeLists.txt
查看文件 @
aa66810
...
...
@@ -283,12 +283,6 @@ if(DEFINED OHOS AND x${OHOS} STREQUAL xOHOS)
)
endif
()
if
(
SHERPA_ONNX_ENABLE_GPU
)
target_link_libraries
(
sherpa-onnx-core
onnxruntime_providers_shared
)
endif
()
if
(
SHERPA_ONNX_ENABLE_RKNN
)
if
(
DEFINED ENV{SHERPA_ONNX_RKNN_TOOLKIT2_LIB_DIR}
)
target_link_libraries
(
sherpa-onnx-core -L$ENV{SHERPA_ONNX_RKNN_TOOLKIT2_LIB_DIR} -lrknnrt
)
...
...
请
注册
或
登录
后发表评论