xuning

Merge branch 'master' of https://github.com/k2-fsa/sherpa-onnx

正在显示 50 个修改的文件 包含 3208 行增加546 行删除

要显示太多修改。

为保证性能只显示 50 of 50+ 个文件。

... ... @@ -4,6 +4,40 @@ set -ex
cd dart-api-examples
pushd spoken-language-identification
./run-whisper.sh
popd
pushd streaming-asr
echo '----------streaming T-one ctc----------'
./run-t-one-ctc.sh
rm -rf sherpa-onnx-*
echo '----------streaming zipformer ctc HLG----------'
./run-zipformer-ctc-hlg.sh
rm -rf sherpa-onnx-*
echo '----------streaming zipformer ctc----------'
./run-zipformer-ctc.sh
rm -rf sherpa-onnx-*
echo '----------streaming zipformer transducer----------'
./run-zipformer-transducer-itn.sh
./run-zipformer-transducer.sh
rm -f itn*
rm -rf sherpa-onnx-*
echo '----------streaming NeMo transducer----------'
./run-nemo-transducer.sh
rm -rf sherpa-onnx-*
echo '----------streaming paraformer----------'
./run-paraformer.sh
rm -rf sherpa-onnx-*
popd # streaming-asr
pushd tts
echo '----------matcha tts----------'
... ... @@ -40,6 +74,10 @@ popd
pushd non-streaming-asr
echo '----------Wenet CTC----------'
./run-wenet-ctc.sh
rm -rf sherpa-onnx-*
echo '----------Zipformer CTC----------'
./run-zipformer-ctc.sh
rm -rf sherpa-onnx-*
... ... @@ -167,29 +205,3 @@ popd
pushd keyword-spotter
./run-zh.sh
popd
pushd streaming-asr
echo '----------streaming zipformer ctc HLG----------'
./run-zipformer-ctc-hlg.sh
rm -rf sherpa-onnx-*
echo '----------streaming zipformer ctc----------'
./run-zipformer-ctc.sh
rm -rf sherpa-onnx-*
echo '----------streaming zipformer transducer----------'
./run-zipformer-transducer-itn.sh
./run-zipformer-transducer.sh
rm -f itn*
rm -rf sherpa-onnx-*
echo '----------streaming NeMo transducer----------'
./run-nemo-transducer.sh
rm -rf sherpa-onnx-*
echo '----------streaming paraformer----------'
./run-paraformer.sh
rm -rf sherpa-onnx-*
popd # streaming-asr
... ...
... ... @@ -27,6 +27,9 @@ rm -rf sherpa-onnx-nemo-*
cd ../offline-decode-files
./run-wenet-ctc.sh
rm -rf sherpa-onnx-*
./run-zipformer-ctc.sh
rm -rf sherpa-onnx-*
... ... @@ -108,6 +111,9 @@ cd ../keyword-spotting-from-files
./run.sh
cd ../online-decode-files
./run-t-one-ctc.sh
rm -rf sherpa-onnx-*
./run-transducer-itn.sh
rm -rf sherpa-onnx-*
... ...
... ... @@ -10,6 +10,26 @@ arch=$(node -p "require('os').arch()")
platform=$(node -p "require('os').platform()")
node_version=$(node -p "process.versions.node.split('.')[0]")
echo "----------non-streaming ASR Wenet CTC----------"
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-wenetspeech-yue-u2pp-conformer-ctc-zh-en-cantonese-int8-2025-09-10.tar.bz2
tar xvf sherpa-onnx-wenetspeech-yue-u2pp-conformer-ctc-zh-en-cantonese-int8-2025-09-10.tar.bz2
rm sherpa-onnx-wenetspeech-yue-u2pp-conformer-ctc-zh-en-cantonese-int8-2025-09-10.tar.bz2
node ./test_asr_non_streaming_wenet_ctc.js
rm -rf sherpa-onnx-wenetspeech-yue-u2pp-conformer-ctc-zh-en-cantonese-int8-2025-09-10
echo "----------streaming ASR T-one CTC----------"
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
tar xvf sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
rm sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
node ./test_asr_streaming_t_one_ctc.js
rm -rf sherpa-onnx-streaming-t-one-russian-2025-09-08
echo "----------KittenTTS----------"
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/kitten-nano-en-v0_1-fp16.tar.bz2
tar xf kitten-nano-en-v0_1-fp16.tar.bz2
rm kitten-nano-en-v0_1-fp16.tar.bz2
... ...
... ... @@ -9,6 +9,20 @@ git status
ls -lh
ls -lh node_modules
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-wenetspeech-yue-u2pp-conformer-ctc-zh-en-cantonese-int8-2025-09-10.tar.bz2
tar xvf sherpa-onnx-wenetspeech-yue-u2pp-conformer-ctc-zh-en-cantonese-int8-2025-09-10.tar.bz2
rm sherpa-onnx-wenetspeech-yue-u2pp-conformer-ctc-zh-en-cantonese-int8-2025-09-10.tar.bz2
node ./test-offline-wenet-ctc.js
rm -rf sherpa-onnx-wenetspeech-yue-u2pp-conformer-ctc-zh-en-cantonese-int8-2025-09-10
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
tar xvf sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
rm sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
node ./test-online-t-one-ctc.js
rm -rf sherpa-onnx-streaming-t-one-russian-2025-09-08
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/kitten-nano-en-v0_1-fp16.tar.bz2
tar xf kitten-nano-en-v0_1-fp16.tar.bz2
rm kitten-nano-en-v0_1-fp16.tar.bz2
... ...
... ... @@ -8,6 +8,16 @@ log() {
echo -e "$(date '+%Y-%m-%d %H:%M:%S') (${fname}:${BASH_LINENO[0]}:${FUNCNAME[1]}) $*"
}
log "test T-one"
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
tar xvf sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
rm sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
python3 ./python-api-examples/online-t-one-ctc-decode-files.py
rm -rf sherpa-onnx-streaming-t-one-russian-2025-09-08
log "test nemo canary"
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-nemo-canary-180m-flash-en-es-de-fr-int8.tar.bz2
tar xvf sherpa-onnx-nemo-canary-180m-flash-en-es-de-fr-int8.tar.bz2
... ...
... ... @@ -9,10 +9,19 @@ ls -lh
./run-test-version.sh
./run-decode-file-t-one-streaming.sh
rm -rf sherpa-onnx-streaming-*
./run-compute-speaker-embeddings.sh
rm -fv *.wav *.onnx
./run-tts-kitten-en.sh
ls -lh
rm -rf kitten-*
./run-wenet-ctc-asr.sh
rm -rf sherpa-onnx-*
./run-zipformer-ctc-asr.sh
rm -rf sherpa-onnx-zipformer-*
... ...
... ... @@ -13,15 +13,6 @@ on:
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
- 'toolchains/aarch64-linux-gnu.toolchain.cmake'
pull_request:
branches:
- master
paths:
- '.github/workflows/aarch64-linux-gnu-shared.yaml'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
- 'toolchains/aarch64-linux-gnu.toolchain.cmake'
workflow_dispatch:
... ...
... ... @@ -13,15 +13,6 @@ on:
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
- 'toolchains/aarch64-linux-gnu.toolchain.cmake'
pull_request:
branches:
- master
paths:
- '.github/workflows/aarch64-linux-gnu-static.yaml'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
- 'toolchains/aarch64-linux-gnu.toolchain.cmake'
workflow_dispatch:
... ...
... ... @@ -12,15 +12,6 @@ on:
- 'build-android*.sh'
tags:
- 'v[0-9]+.[0-9]+.[0-9]+*'
pull_request:
branches:
- master
paths:
- '.github/workflows/android-rknn.yaml'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/jni/*'
- 'build-android*.sh'
workflow_dispatch:
... ... @@ -148,7 +139,7 @@ jobs:
file: sherpa-onnx-*-android-rknn.tar.bz2
# repo_name: k2-fsa/sherpa-onnx
# repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
# tag: v1.11.3
# tag: v1.12.13
build-android-aar-rknn:
needs: [build-android-rknn-libs]
... ... @@ -284,7 +275,7 @@ jobs:
file: ./*.aar
# repo_name: k2-fsa/sherpa-onnx
# repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
# tag: v1.11.3
# tag: v1.12.13
- name: Release android aar
if: github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
... ...
... ... @@ -15,15 +15,6 @@ on:
- 'build-android*.sh'
tags:
- 'v[0-9]+.[0-9]+.[0-9]+*'
pull_request:
branches:
- master
paths:
- '.github/workflows/android-static.yaml'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/jni/*'
- 'build-android*.sh'
workflow_dispatch:
... ...
... ... @@ -12,15 +12,6 @@ on:
- 'build-android*.sh'
tags:
- 'v[0-9]+.[0-9]+.[0-9]+*'
pull_request:
branches:
- master
paths:
- '.github/workflows/android.yaml'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/jni/*'
- 'build-android*.sh'
workflow_dispatch:
... ... @@ -68,6 +59,9 @@ jobs:
export ANDROID_NDK=$ANDROID_NDK_LATEST_HOME
export SHERPA_ONNX_ENABLE_C_API=ON
./build-android-arm64-v8a.sh
readelf -l ./build-android-arm64-v8a/install/lib/*.so
mkdir -p jniLibs/arm64-v8a/
cp -v ./build-android-arm64-v8a/install/lib/*.so ./jniLibs/arm64-v8a/
cp -v ./build-android-arm64-v8a/install/lib/README.md ./jniLibs/arm64-v8a/
... ... @@ -83,6 +77,9 @@ jobs:
export SHERPA_ONNX_ENABLE_C_API=ON
./build-android-armv7-eabi.sh
mkdir -p ./jniLibs/armeabi-v7a/
readelf -l ./build-android-armv7-eabi/install/lib/*.so
cp -v ./build-android-armv7-eabi/install/lib/*.so ./jniLibs/armeabi-v7a/
cp -v ./build-android-armv7-eabi/install/lib/README.md ./jniLibs/armeabi-v7a/
rm -rf ./build-android-armv7-eabi
... ... @@ -96,6 +93,9 @@ jobs:
export ANDROID_NDK=$ANDROID_NDK_LATEST_HOME
export SHERPA_ONNX_ENABLE_C_API=ON
./build-android-x86-64.sh
readelf -l ./build-android-x86-64/install/lib/*.so
mkdir -p ./jniLibs/x86_64
cp -v ./build-android-x86-64/install/lib/*.so ./jniLibs/x86_64
cp -v ./build-android-x86-64/install/lib/README.md ./jniLibs/x86_64
... ... @@ -110,6 +110,9 @@ jobs:
export ANDROID_NDK=$ANDROID_NDK_LATEST_HOME
export SHERPA_ONNX_ENABLE_C_API=ON
./build-android-x86.sh
readelf -l ./build-android-x86/install/lib/*.so
mkdir -p ./jniLibs/x86
cp -v ./build-android-x86/install/lib/*.so ./jniLibs/x86
cp -v ./build-android-x86/install/lib/README.md ./jniLibs/x86
... ... @@ -174,7 +177,7 @@ jobs:
file: sherpa-onnx-*-android.tar.bz2
# repo_name: k2-fsa/sherpa-onnx
# repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
# tag: v1.11.5
# tag: v1.12.11
build-android-aar:
needs: [build-android-libs]
... ... @@ -307,9 +310,9 @@ jobs:
file_glob: true
overwrite: true
file: ./*.aar
# repo_name: k2-fsa/sherpa-onnx
# repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
# tag: v1.11.5
repo_name: k2-fsa/sherpa-onnx
repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
tag: v1.12.11
- name: Release android aar
if: github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
... ...
... ... @@ -13,15 +13,6 @@ on:
- 'toolchains/arm-linux-gnueabihf.toolchain.cmake'
tags:
- 'v[0-9]+.[0-9]+.[0-9]+*'
pull_request:
branches:
- master
paths:
- '.github/workflows/arm-linux-gnueabihf.yaml'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
- 'toolchains/arm-linux-gnueabihf.toolchain.cmake'
workflow_dispatch:
... ...
... ... @@ -5,6 +5,12 @@ on:
branches:
- wheel
workflow_dispatch:
inputs:
publish_sherpa_onnx_bin:
description: "Publish sherpa-onnx-bin"
required: false
default: "true"
type: boolean
env:
SHERPA_ONNX_IS_IN_GITHUB_ACTIONS: 1
... ... @@ -14,7 +20,277 @@ concurrency:
cancel-in-progress: true
jobs:
core:
name: core
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-24.04-arm]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Update version
shell: bash
run: |
./new-release.sh
git diff .
- name: Display PWD
shell: bash
run: |
echo "pwd: $PWD"
ls -lh
du -h -d1 .
- name: Build sherpa-onnx
uses: addnab/docker-run-action@v3
with:
image: quay.io/pypa/manylinux2014_aarch64
options: |
--volume ${{ github.workspace }}/:/home/runner/work/sherpa-onnx/sherpa-onnx
shell: bash
run: |
uname -a
gcc --version
cmake --version
cat /etc/*release
id
pwd
cd /home/runner/work/sherpa-onnx/sherpa-onnx
find /opt -name "python*"
echo "--------------------"
export PATH=/opt/_internal/cpython-3.10.18/bin:$PATH
which python3
python3 --version
python3 -m venv my
source ./my/bin/activate
python3 -m pip install setuptools wheel twine
git clone --depth 1 --branch v1.2.12 https://github.com/alsa-project/alsa-lib
pushd alsa-lib
./gitcompile
popd
export CPLUS_INCLUDE_PATH=$PWD/alsa-lib/include:$CPLUS_INCLUDE_PATH
export SHERPA_ONNX_ALSA_LIB_DIR=$PWD/alsa-lib/src/.libs
mkdir build
pushd build
cmake \
-D SHERPA_ONNX_ENABLE_TTS=ON \
-D CMAKE_BUILD_TYPE=Release \
-D BUILD_SHARED_LIBS=ON \
-D SHERPA_ONNX_BUILD_C_API_EXAMPLES=OFF \
-D CMAKE_INSTALL_PREFIX=./install \
..
make -j2
make install
ls -lh lib
ls -lh bin
echo "----"
ls -lh install/lib
rm -fv install/lib/libcargs.so
echo "----"
ls -lh install/bin
echo 'sherpa-onnx-core'
mkdir -p ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
cp -v ./install/lib/lib*.so ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
mkdir -p ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/include/sherpa-onnx/c-api
cp -v ./install/include/sherpa-onnx/c-api/*.h ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/include/sherpa-onnx/c-api
pushd ../scripts/wheel/sherpa-onnx-core
python3 setup.py bdist_wheel --plat-name=manylinux2014_aarch64
ls -lh dist
popd
echo 'sherpa-onnx-bin'
mkdir -p ../scripts/wheel/sherpa-onnx-bin/bin
cp -v ./install/bin/sherpa-onnx* ../scripts/wheel/sherpa-onnx-bin/bin
pushd ../scripts/wheel/sherpa-onnx-bin
python3 setup.py bdist_wheel --plat-name=manylinux2014_aarch64
ls -lh dist
popd
- name: Collect wheels
shell: bash
run: |
sudo chown -R $USER ./scripts/wheel
mkdir wheelhouse
cp -v ./scripts/wheel/sherpa-onnx-core/dist/*.whl ./wheelhouse
cp -v ./scripts/wheel/sherpa-onnx-bin/dist/*.whl ./wheelhouse
- uses: actions/upload-artifact@v4
with:
name: wheels-core-linux-aarch64
path: ./wheelhouse/*.whl
- name: Show wheels
shell: bash
run: |
sudo chown -R $USER ./scripts/wheel
ls -lh ./scripts/wheel/sherpa-onnx-core/dist
ls -lh ./scripts/wheel/sherpa-onnx-bin/dist
unzip -l ./scripts/wheel/sherpa-onnx-core/dist/*.whl
echo "---"
unzip -l ./scripts/wheel/sherpa-onnx-bin/dist/*.whl
- name: Install patchelf
shell: bash
run: |
sudo apt-get update -q
sudo apt-get install -q -y patchelf
patchelf --help
- name: Patch wheels
shell: bash
run: |
mkdir ./wheels
sudo ./scripts/wheel/patch_wheel.py --in-dir ./wheelhouse --out-dir ./wheels
ls -lh ./wheels/
rm -rf ./wheelhouse
mv ./wheels ./wheelhouse
- uses: actions/upload-artifact@v4
with:
name: wheels-core-linux-aarch64-patched
path: ./wheelhouse/*.whl
test:
name: test
needs: [core]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-24.04-arm]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Retrieve artifact from Linux x64
uses: actions/download-artifact@v4
with:
name: wheels-core-linux-aarch64-patched
path: /tmp/wheels
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Show
shell: bash
run: |
ls -lh /tmp/wheels
- name: Install
shell: bash
run: |
python3 -m pip install /tmp/wheels/*.whl
- name: Show version
shell: bash
run: |
sherpa-onnx-version
- name: Show help
shell: bash
run: |
sherpa-onnx --help
echo "---"
ls -lh $(which sherpa-onnx)
file $(which sherpa-onnx)
readelf -d $(which sherpa-onnx)
ldd $(which sherpa-onnx)
sherpa-onnx-offline --help
echo "---"
sherpa-onnx-vad --help
- name: Publish to huggingface
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
uses: nick-fields/retry@v3
with:
max_attempts: 20
timeout_seconds: 200
shell: bash
command: |
git config --global user.email "csukuangfj@gmail.com"
git config --global user.name "Fangjun Kuang"
rm -rf huggingface
export GIT_LFS_SKIP_SMUDGE=1
export GIT_CLONE_PROTECTION_ACTIVE=false
SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
echo "SHERPA_ONNX_VERSION $SHERPA_ONNX_VERSION"
d=cpu/$SHERPA_ONNX_VERSION
git clone https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels huggingface
cd huggingface
git fetch
git pull
git merge -m "merge remote" --ff origin main
mkdir -p $d
cp -v /tmp/wheels/*.whl $d/
git status
git add .
git commit -m "add more wheels"
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels main
- name: Publish wheels to PyPI ${{ github.event.inputs.publish_sherpa_onnx_bin }}
if: ${{ (github.event.inputs.publish_sherpa_onnx_bin || 'true') == 'true' }}
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
shell: bash
run: |
python3 -m pip install --upgrade pip
python3 -m pip install wheel twine==5.0.0 setuptools
twine upload /tmp/wheels/*.whl
build_wheels_aarch64:
needs: [core, test]
name: ${{ matrix.manylinux }} ${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
strategy:
... ... @@ -22,8 +298,8 @@ jobs:
matrix:
# see https://github.com/pypa/cibuildwheel/issues/2257
# we don't use qemu from now on
os: [ubuntu-22.04-arm]
python-version: ["cp38", "cp39", "cp310", "cp311", "cp312", "cp313"]
os: [ubuntu-24.04-arm]
python-version: ["cp38", "cp39", "cp310", "cp311", "cp312", "cp313", "cp314"]
manylinux: [manylinux2014] #, manylinux_2_28]
steps:
... ... @@ -38,7 +314,7 @@ jobs:
# see https://cibuildwheel.readthedocs.io/en/stable/changelog/
# for a list of versions
- name: Build wheels
uses: pypa/cibuildwheel@v2.21.3
uses: pypa/cibuildwheel@v3.1.4
env:
CIBW_BEFORE_ALL: |
git clone --depth 1 --branch v1.2.12 https://github.com/alsa-project/alsa-lib
... ... @@ -48,7 +324,15 @@ jobs:
echo "PWD"
ls -lh /project/alsa-lib/src/.libs
CIBW_ENVIRONMENT: CPLUS_INCLUDE_PATH=/project/alsa-lib/include:$CPLUS_INCLUDE_PATH SHERPA_ONNX_ALSA_LIB_DIR=/project/alsa-lib/src/.libs LD_LIBRARY_PATH=/project/build/bdist.linux-x86_64/wheel/sherpa_onnx/lib:$SHERPA_ONNX_ALSA_LIB_DIR SHERPA_ONNX_MAKE_ARGS="VERBOSE=1" SHERPA_ONNX_ENABLE_ALSA=1
CIBW_ENVIRONMENT: >
SHERPA_ONNX_SPLIT_PYTHON_PACKAGE=ON
CPLUS_INCLUDE_PATH=/project/alsa-lib/include:$CPLUS_INCLUDE_PATH
SHERPA_ONNX_ALSA_LIB_DIR=/project/alsa-lib/src/.libs
LD_LIBRARY_PATH=/project/build/bdist.linux-aarch64/wheel/sherpa_onnx/lib:$SHERPA_ONNX_ALSA_LIB_DIR
SHERPA_ONNX_MAKE_ARGS="VERBOSE=1"
SHERPA_ONNX_ENABLE_ALSA=1
SHERPA_ONNX_CMAKE_ARGS="-DSHERPA_ONNX_ENABLE_BINARY=OFF -DSHERPA_ONNX_BUILD_C_API_EXAMPLES=OFF -DSHERPA_ONNX_ENABLE_C_API=OFF -DSHERPA_ONNX_ENABLE_WEBSOCKET=OFF"
CIBW_BUILD: "${{ matrix.python-version}}-* "
CIBW_SKIP: "cp27-* cp35-* cp36-* *-win32 pp* *-musllinux* *-manylinux_i686"
CIBW_BUILD_VERBOSITY: 3
... ... @@ -57,28 +341,38 @@ jobs:
CIBW_MANYLINUX_AARCH64_IMAGE: quay.io/pypa/${{ matrix.manylinux }}_aarch64
# From onnxruntime >= 1.17.0, it drops support for CentOS 7.0 and it supports only manylinux_2_28.
# manylinux_2_24 is no longer supported
CIBW_REPAIR_WHEEL_COMMAND: >
auditwheel repair -w {dest_dir}
--exclude libonnxruntime.so
{wheel}
- uses: actions/upload-artifact@v4
with:
name: wheel-${{ matrix.python-version }}-${{ matrix.manylinux }}-linux-aarch64
path: ./wheelhouse/*.whl
- name: Display wheels
shell: bash
run: |
ls -lh ./wheelhouse/
- name: Install patchelf
- name: Show wheels
shell: bash
run: |
sudo apt-get update -q
sudo apt-get install -q -y patchelf
patchelf --help
ls -lh wheelhouse/*.whl
- name: Patch wheels
shell: bash
run: |
mkdir ./wheels
sudo ./scripts/wheel/patch_wheel.py --in-dir ./wheelhouse --out-dir ./wheels
unzip -l wheelhouse/*.whl
ls -lh ./wheels/
rm -rf ./wheelhouse
mv ./wheels ./wheelhouse
echo "---"
mkdir t
cp wheelhouse/*.whl ./t
cd ./t
unzip ./*.whl
ls -lh
echo "---"
readelf -d sherpa_onnx/lib/*.so
- name: Publish to huggingface
env:
... ... @@ -116,11 +410,6 @@ jobs:
git commit -m "add more wheels"
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels main
- uses: actions/upload-artifact@v4
with:
name: wheel-${{ matrix.python-version }}-${{ matrix.manylinux }}
path: ./wheelhouse/*.whl
- name: Publish wheels to PyPI
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
... ...
... ... @@ -15,13 +15,14 @@ concurrency:
jobs:
build_wheels_linux_cuda:
name: ${{ matrix.manylinux }} ${{ matrix.python-version }}
name: ${{ matrix.manylinux }} ${{ matrix.python-version }} ${{ matrix.onnxruntime_version }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-22.04]
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
onnxruntime_version: ["1.17.1", "1.22.0"]
steps:
- uses: actions/checkout@v4
... ... @@ -69,6 +70,16 @@ jobs:
export SHERPA_ONNX_ENABLE_ALSA=1
export SHERPA_ONNX_CMAKE_ARGS="-DSHERPA_ONNX_ENABLE_GPU=ON"
onnxruntime_version=${{ matrix.onnxruntime_version }}
if [[ $onnxruntime_version == "1.22.0" ]]; then
curl -SL -O https://github.com/csukuangfj/onnxruntime-libs/releases/download/v1.22.0/onnxruntime-linux-x64-gpu-1.22.0-patched.zip
unzip onnxruntime-linux-x64-gpu-1.22.0-patched.zip
export SHERPA_ONNXRUNTIME_LIB_DIR=$PWD/onnxruntime-linux-x64-gpu-1.22.0-patched/lib
export SHERPA_ONNXRUNTIME_INCLUDE_DIR=$PWD/onnxruntime-linux-x64-gpu-1.22.0-patched/include
export SHERPA_ONNX_CUDA_VERSION="12.cudnn9"
fi
python3 setup.py bdist_wheel
ls -lh dist
... ... @@ -80,6 +91,8 @@ jobs:
run: |
ls -lh ./wheelhouse/
unzip -l ./wheelhouse/*.whl
- name: Install patchelf
shell: bash
run: |
... ... @@ -97,9 +110,10 @@ jobs:
rm -rf ./wheelhouse
mv ./wheels ./wheelhouse
- uses: actions/upload-artifact@v4
with:
name: wheel-cuda-${{ matrix.python-version }}
name: wheel-cuda-${{ matrix.python-version }}-${{ matrix.onnxruntime_version }}
path: ./wheelhouse/*.whl
- name: Publish to huggingface
... ...
... ... @@ -5,6 +5,12 @@ on:
branches:
- wheel
workflow_dispatch:
inputs:
publish_sherpa_onnx_bin:
description: "Publish sherpa-onnx-bin"
required: false
default: "true"
type: boolean
env:
SHERPA_ONNX_IS_IN_GITHUB_ACTIONS: 1
... ... @@ -14,19 +20,18 @@ concurrency:
cancel-in-progress: true
jobs:
build_wheels_linux:
name: ${{ matrix.manylinux }} ${{ matrix.python-version }}
core:
name: core
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-22.04]
python-version: ["cp38", "cp39", "cp310", "cp311", "cp312", "cp313"]
manylinux: [manylinux2014] #, manylinux_2_28]
os: [ubuntu-latest]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Update version
shell: bash
... ... @@ -34,30 +39,127 @@ jobs:
./new-release.sh
git diff .
# see https://cibuildwheel.readthedocs.io/en/stable/changelog/
# for a list of versions
- name: Build wheels
uses: pypa/cibuildwheel@v2.21.3
env:
CIBW_BEFORE_ALL: |
git clone --depth 1 --branch v1.2.12 https://github.com/alsa-project/alsa-lib
cd alsa-lib
./gitcompile
cd ..
echo "PWD"
ls -lh /project/alsa-lib/src/.libs
- name: Display PWD
shell: bash
run: |
echo "pwd: $PWD"
ls -lh
du -h -d1 .
CIBW_ENVIRONMENT: CPLUS_INCLUDE_PATH=/project/alsa-lib/include:$CPLUS_INCLUDE_PATH SHERPA_ONNX_ALSA_LIB_DIR=/project/alsa-lib/src/.libs LD_LIBRARY_PATH=/project/build/bdist.linux-x86_64/wheel/sherpa_onnx/lib:$SHERPA_ONNX_ALSA_LIB_DIR SHERPA_ONNX_MAKE_ARGS="VERBOSE=1" SHERPA_ONNX_ENABLE_ALSA=1
- name: Build sherpa-onnx
uses: addnab/docker-run-action@v3
with:
image: quay.io/pypa/manylinux2014_x86_64
options: |
--volume ${{ github.workspace }}/:/home/runner/work/sherpa-onnx/sherpa-onnx
shell: bash
run: |
uname -a
gcc --version
cmake --version
cat /etc/*release
id
pwd
CIBW_BUILD: "${{ matrix.python-version}}-* "
CIBW_SKIP: "cp27-* cp35-* cp36-* *-win32 pp* *-musllinux* *-manylinux_i686"
CIBW_BUILD_VERBOSITY: 3
CIBW_MANYLINUX_X86_64_IMAGE: quay.io/pypa/${{ matrix.manylinux }}_x86_64
cd /home/runner/work/sherpa-onnx/sherpa-onnx
find /opt -name "python*"
echo "--------------------"
export PATH=/opt/_internal/cpython-3.10.18/bin:$PATH
which python3
python3 --version
python3 -m venv my
source ./my/bin/activate
python3 -m pip install setuptools wheel twine
git clone --depth 1 --branch v1.2.12 https://github.com/alsa-project/alsa-lib
pushd alsa-lib
./gitcompile
popd
export CPLUS_INCLUDE_PATH=$PWD/alsa-lib/include:$CPLUS_INCLUDE_PATH
export SHERPA_ONNX_ALSA_LIB_DIR=$PWD/alsa-lib/src/.libs
mkdir build
pushd build
cmake \
-D SHERPA_ONNX_ENABLE_TTS=ON \
-D CMAKE_BUILD_TYPE=Release \
-D BUILD_SHARED_LIBS=ON \
-D SHERPA_ONNX_BUILD_C_API_EXAMPLES=OFF \
-D CMAKE_INSTALL_PREFIX=./install \
..
make -j2
make install
ls -lh lib
ls -lh bin
echo "----"
ls -lh install/lib
rm -fv install/lib/libcargs.so
echo "----"
ls -lh install/bin
echo 'sherpa-onnx-core'
mkdir -p ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
cp -v ./install/lib/lib*.so ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
mkdir -p ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/include/sherpa-onnx/c-api
cp -v ./install/include/sherpa-onnx/c-api/*.h ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/include/sherpa-onnx/c-api
pushd ../scripts/wheel/sherpa-onnx-core
python3 setup.py bdist_wheel --plat-name=manylinux2014_x86_64
ls -lh dist
unzip -l dist/*.whl
popd
echo 'sherpa-onnx-bin'
mkdir -p ../scripts/wheel/sherpa-onnx-bin/bin
cp -v ./install/bin/sherpa-onnx* ../scripts/wheel/sherpa-onnx-bin/bin
- name: Display wheels
pushd ../scripts/wheel/sherpa-onnx-bin
python3 setup.py bdist_wheel --plat-name=manylinux2014_x86_64
ls -lh dist
unzip -l dist/*.whl
popd
- name: Collect wheels
shell: bash
run: |
ls -lh ./wheelhouse/
sudo chown -R $USER ./scripts/wheel
mkdir wheelhouse
cp -v ./scripts/wheel/sherpa-onnx-core/dist/*.whl ./wheelhouse
cp -v ./scripts/wheel/sherpa-onnx-bin/dist/*.whl ./wheelhouse
- uses: actions/upload-artifact@v4
with:
name: wheels-core-linux-x64
path: ./wheelhouse/*.whl
- name: Show wheels
shell: bash
run: |
sudo chown -R $USER ./scripts/wheel
ls -lh ./scripts/wheel/sherpa-onnx-core/dist
ls -lh ./scripts/wheel/sherpa-onnx-bin/dist
unzip -l ./scripts/wheel/sherpa-onnx-core/dist/*.whl
echo "---"
unzip -l ./scripts/wheel/sherpa-onnx-bin/dist/*.whl
- name: Install patchelf
shell: bash
... ... @@ -78,9 +180,189 @@ jobs:
- uses: actions/upload-artifact@v4
with:
name: wheels-core-linux-x64-patched
path: ./wheelhouse/*.whl
test:
name: test
needs: [core]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Retrieve artifact from Linux x64
uses: actions/download-artifact@v4
with:
name: wheels-core-linux-x64-patched
path: /tmp/wheels
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Show
shell: bash
run: |
ls -lh /tmp/wheels
- name: Install
shell: bash
run: |
python3 -m pip install /tmp/wheels/*.whl
- name: Show version
shell: bash
run: |
sherpa-onnx-version
- name: Show help
shell: bash
run: |
sherpa-onnx --help
echo "---"
ls -lh $(which sherpa-onnx)
file $(which sherpa-onnx)
readelf -d $(which sherpa-onnx)
ldd $(which sherpa-onnx)
sherpa-onnx-offline --help
echo "---"
sherpa-onnx-vad --help
- name: Publish to huggingface
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
uses: nick-fields/retry@v3
with:
max_attempts: 20
timeout_seconds: 200
shell: bash
command: |
git config --global user.email "csukuangfj@gmail.com"
git config --global user.name "Fangjun Kuang"
rm -rf huggingface
export GIT_LFS_SKIP_SMUDGE=1
export GIT_CLONE_PROTECTION_ACTIVE=false
SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
echo "SHERPA_ONNX_VERSION $SHERPA_ONNX_VERSION"
d=cpu/$SHERPA_ONNX_VERSION
git clone https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels huggingface
cd huggingface
git fetch
git pull
git merge -m "merge remote" --ff origin main
mkdir -p $d
cp -v /tmp/wheels/*.whl $d/
git status
git add .
git commit -m "add more wheels"
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels main
- name: Publish wheels to PyPI ${{ github.event.inputs.publish_sherpa_onnx_bin }}
if: ${{ (github.event.inputs.publish_sherpa_onnx_bin || 'true') == 'true' }}
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
shell: bash
run: |
python3 -m pip install --upgrade pip
python3 -m pip install wheel twine==5.0.0 setuptools
twine upload /tmp/wheels/*.whl
build_wheels_linux:
needs: [core, test]
name: ${{ matrix.manylinux }} ${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
python-version: ["cp38", "cp39", "cp310", "cp311", "cp312", "cp313", "cp314"]
manylinux: [manylinux2014] #, manylinux_2_28]
steps:
- uses: actions/checkout@v4
- name: Update version
shell: bash
run: |
./new-release.sh
git diff .
# see https://cibuildwheel.readthedocs.io/en/stable/changelog/
# for a list of versions
- name: Build wheels
uses: pypa/cibuildwheel@v3.1.4
env:
CIBW_BEFORE_ALL: |
git clone --depth 1 --branch v1.2.12 https://github.com/alsa-project/alsa-lib
cd alsa-lib
./gitcompile
cd ..
echo "PWD"
ls -lh /project/alsa-lib/src/.libs
CIBW_ENVIRONMENT: >
SHERPA_ONNX_SPLIT_PYTHON_PACKAGE=ON
CPLUS_INCLUDE_PATH=/project/alsa-lib/include:$CPLUS_INCLUDE_PATH
SHERPA_ONNX_ALSA_LIB_DIR=/project/alsa-lib/src/.libs
LD_LIBRARY_PATH=/project/build/bdist.linux-x86_64/wheel/sherpa_onnx/lib:$SHERPA_ONNX_ALSA_LIB_DIR
SHERPA_ONNX_MAKE_ARGS="VERBOSE=1"
SHERPA_ONNX_ENABLE_ALSA=1
SHERPA_ONNX_CMAKE_ARGS="-DSHERPA_ONNX_ENABLE_BINARY=OFF -DSHERPA_ONNX_BUILD_C_API_EXAMPLES=OFF -DSHERPA_ONNX_ENABLE_C_API=OFF -DSHERPA_ONNX_ENABLE_WEBSOCKET=OFF"
CIBW_BUILD: "${{ matrix.python-version}}-* "
CIBW_SKIP: "cp27-* cp35-* cp36-* *-win32 pp* *-musllinux* *-manylinux_i686"
CIBW_BUILD_VERBOSITY: 3
CIBW_MANYLINUX_X86_64_IMAGE: quay.io/pypa/${{ matrix.manylinux }}_x86_64
CIBW_REPAIR_WHEEL_COMMAND: >
auditwheel repair -w {dest_dir}
--exclude libonnxruntime.so
{wheel}
- uses: actions/upload-artifact@v4
with:
name: wheel-${{ matrix.python-version }}-${{ matrix.manylinux }}
path: ./wheelhouse/*.whl
- name: Show wheels
shell: bash
run: |
ls -lh wheelhouse/*.whl
unzip -l wheelhouse/*.whl
echo "---"
mkdir t
cp wheelhouse/*.whl ./t
cd ./t
unzip ./*.whl
ls -lh
echo "---"
readelf -d sherpa_onnx/lib/*.so
- name: Publish to huggingface
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
... ...
... ... @@ -5,6 +5,12 @@ on:
branches:
- wheel
workflow_dispatch:
inputs:
publish_sherpa_onnx_bin:
description: "Publish sherpa-onnx-bin"
required: false
default: "true"
type: boolean
env:
SHERPA_ONNX_IS_IN_GITHUB_ACTIONS: 1
... ... @@ -14,14 +20,246 @@ concurrency:
cancel-in-progress: true
jobs:
core:
runs-on: ${{ matrix.os }}
name: core
strategy:
fail-fast: false
matrix:
os: [macos-latest]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Update version
shell: bash
run: |
./new-release.sh
git diff .
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install deps
shell: bash
run: |
python3 -m pip install setuptools wheel twine
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: macos-latest-sherpa-onnx-core-arm64
- name: Configure CMake
shell: bash
run: |
export CMAKE_CXX_COMPILER_LAUNCHER=ccache
export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH"
cmake --version
mkdir build
cd build
cmake \
-DSHERPA_ONNX_SPLIT_PYTHON_PACKAGE=ON \
-D BUILD_SHARED_LIBS=ON \
-D SHERPA_ONNX_BUILD_C_API_EXAMPLES=OFF \
-D CMAKE_BUILD_TYPE=Release \
-D CMAKE_OSX_ARCHITECTURES='arm64' \
-D CMAKE_INSTALL_PREFIX=./install \
..
- name: Build sherpa-onnx for macos
shell: bash
run: |
export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH"
cd build
make -j2
make install
ls -lh lib
ls -lh bin
file ./bin/sherpa-onnx
rm -fv ./install/include/cargs.h
rm -fv ./install/lib/cargs.h
rm -fv ./install/lib/libcargs.dylib
rm -fv ./install/lib/libcargs.a
rm -rfv ./install/lib/pkgconfig
- name: Copy files
shell: bash
run: |
echo 'sherpa-onnx-core'
mkdir -p scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
cp -v ./build/install/lib/lib* ./scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
mkdir -p ./scripts/wheel/sherpa-onnx-core/sherpa_onnx/include/sherpa-onnx/c-api
cp -v ./build/install/include/sherpa-onnx/c-api/*.h ./scripts/wheel/sherpa-onnx-core/sherpa_onnx/include/sherpa-onnx/c-api
echo 'sherpa-onnx-bin'
mkdir -p ./scripts/wheel/sherpa-onnx-bin/bin
cp -v ./build/install/bin/sherpa-onnx* ./scripts/wheel/sherpa-onnx-bin/bin
- name: Build sherpa-onnx-core
shell: bash
run: |
pushd ./scripts/wheel/sherpa-onnx-core
python3 setup.py bdist_wheel --plat-name=macosx_11_0_arm64
ls -lh dist
unzip -l dist/*.whl
popd
- name: Build sherpa-onnx-bin
shell: bash
run: |
pushd ./scripts/wheel/sherpa-onnx-bin
python3 setup.py bdist_wheel --plat-name=macosx_11_0_arm64
ls -lh dist
unzip -l dist/*.whl
popd
- name: Collect wheels
shell: bash
run: |
cp -v ./scripts/wheel/sherpa-onnx-core/dist/*.whl .
cp -v ./scripts/wheel/sherpa-onnx-bin/dist/*.whl .
ls -lh *.whl
- uses: actions/upload-artifact@v4
with:
name: wheels-core-macos-arm64
path: ./*.whl
test:
name: test
needs: [core]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [macos-latest]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Retrieve artifact from macos arm64
uses: actions/download-artifact@v4
with:
name: wheels-core-macos-arm64
path: /tmp/wheels
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Show
shell: bash
run: |
ls -lh /tmp/wheels
- name: Install
shell: bash
run: |
python3 -m pip install /tmp/wheels/*.whl
- name: Show version
shell: bash
run: |
sherpa-onnx-version
- name: Show help
shell: bash
run: |
sherpa-onnx --help
ls -lh $(which sherpa-onnx)
file $(which sherpa-onnx)
otool -L $(which sherpa-onnx)
otool -l $(which sherpa-onnx)
echo "---"
sherpa-onnx-offline --help
echo "---"
sherpa-onnx-vad --help
- name: Publish to huggingface
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
uses: nick-fields/retry@v3
with:
max_attempts: 20
timeout_seconds: 200
shell: bash
command: |
git config --global user.email "csukuangfj@gmail.com"
git config --global user.name "Fangjun Kuang"
rm -rf huggingface
export GIT_LFS_SKIP_SMUDGE=1
export GIT_CLONE_PROTECTION_ACTIVE=false
SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
echo "SHERPA_ONNX_VERSION $SHERPA_ONNX_VERSION"
d=cpu/$SHERPA_ONNX_VERSION
git clone https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels huggingface
cd huggingface
git fetch
git pull
git merge -m "merge remote" --ff origin main
mkdir -p $d
cp -v /tmp/wheels/*.whl $d/
git status
git add .
git commit -m "add more wheels"
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels main
- name: Publish wheels to PyPI ${{ github.event.inputs.publish_sherpa_onnx_bin }}
if: ${{ (github.event.inputs.publish_sherpa_onnx_bin || 'true') == 'true' }}
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
shell: bash
run: |
opts='--break-system-packages'
python3 -m pip install $opts --upgrade pip
python3 -m pip install $opts wheel twine==5.0.0 setuptools
twine upload /tmp/wheels/*.whl
build_wheels_macos_arm64:
needs: [core, test]
name: ${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [macos-13]
python-version: ["cp38", "cp39", "cp310", "cp311", "cp312", "cp313"]
os: [macos-latest]
python-version: ["cp38", "cp39", "cp310", "cp311", "cp312", "cp313", "cp314"]
steps:
- uses: actions/checkout@v4
... ... @@ -33,10 +271,12 @@ jobs:
git diff .
- name: Build wheels
uses: pypa/cibuildwheel@v2.21.3
uses: pypa/cibuildwheel@v3.1.4
env:
CIBW_BUILD: "${{ matrix.python-version}}-* "
CIBW_ENVIRONMENT: SHERPA_ONNX_CMAKE_ARGS="-DCMAKE_OSX_ARCHITECTURES='arm64'"
CIBW_ENVIRONMENT: >
SHERPA_ONNX_SPLIT_PYTHON_PACKAGE=ON
SHERPA_ONNX_CMAKE_ARGS="-DCMAKE_OSX_ARCHITECTURES='arm64' -DSHERPA_ONNX_ENABLE_BINARY=OFF -DSHERPA_ONNX_BUILD_C_API_EXAMPLES=OFF -DSHERPA_ONNX_ENABLE_C_API=OFF -DSHERPA_ONNX_ENABLE_WEBSOCKET=OFF"
CIBW_ARCHS: "arm64"
CIBW_BUILD_VERBOSITY: 3
... ... @@ -47,6 +287,7 @@ jobs:
shell: bash
run: |
ls -lh ./wheelhouse/
unzip -l ./wheelhouse/*.whl
- uses: actions/upload-artifact@v4
with:
... ... @@ -95,10 +336,6 @@ jobs:
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
opts='--break-system-packages'
v=${{ matrix.python-version }}
if [[ $v == cp38 || $v == cp39 ]]; then
opts=''
fi
python3 -m pip install $opts --upgrade pip
python3 -m pip install $opts wheel twine==5.0.0 setuptools
... ...
... ... @@ -5,6 +5,12 @@ on:
branches:
- wheel
workflow_dispatch:
inputs:
publish_sherpa_onnx_bin:
description: "Publish sherpa-onnx-bin"
required: false
default: "true"
type: boolean
env:
SHERPA_ONNX_IS_IN_GITHUB_ACTIONS: 1
... ... @@ -14,14 +20,247 @@ concurrency:
cancel-in-progress: true
jobs:
core:
runs-on: ${{ matrix.os }}
name: core
strategy:
fail-fast: false
matrix:
os: [macos-latest]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Update version
shell: bash
run: |
./new-release.sh
git diff .
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install deps
shell: bash
run: |
python3 -m pip install setuptools wheel twine
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: macos-latest-sherpa-onnx-core-universal2
- name: Configure CMake
shell: bash
run: |
export CMAKE_CXX_COMPILER_LAUNCHER=ccache
export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH"
cmake --version
mkdir build
cd build
cmake \
-DSHERPA_ONNX_SPLIT_PYTHON_PACKAGE=ON \
-D BUILD_SHARED_LIBS=ON \
-D SHERPA_ONNX_BUILD_C_API_EXAMPLES=OFF \
-D CMAKE_BUILD_TYPE=Release \
-D CMAKE_OSX_ARCHITECTURES='arm64;x86_64' \
-D CMAKE_INSTALL_PREFIX=./install \
..
- name: Build sherpa-onnx for macos
shell: bash
run: |
export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH"
cd build
make -j2
make install
ls -lh lib
ls -lh bin
file ./bin/sherpa-onnx
rm -fv ./install/include/cargs.h
rm -fv ./install/lib/cargs.h
rm -fv ./install/lib/libcargs.dylib
rm -fv ./install/lib/libcargs.a
rm -rfv ./install/lib/pkgconfig
- name: Copy files
shell: bash
run: |
echo 'sherpa-onnx-core'
mkdir -p scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
cp -v ./build/install/lib/lib* ./scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
mkdir -p ./scripts/wheel/sherpa-onnx-core/sherpa_onnx/include/sherpa-onnx/c-api
cp -v ./build/install/include/sherpa-onnx/c-api/*.h ./scripts/wheel/sherpa-onnx-core/sherpa_onnx/include/sherpa-onnx/c-api
echo 'sherpa-onnx-bin'
mkdir -p ./scripts/wheel/sherpa-onnx-bin/bin
cp -v ./build/install/bin/sherpa-onnx* ./scripts/wheel/sherpa-onnx-bin/bin
- name: Build sherpa-onnx-core
shell: bash
run: |
pushd ./scripts/wheel/sherpa-onnx-core
python3 setup.py bdist_wheel --plat-name=macosx_10_15_universal2
ls -lh dist
unzip -l dist/*.whl
popd
- name: Build sherpa-onnx-bin
shell: bash
run: |
pushd ./scripts/wheel/sherpa-onnx-bin
python3 setup.py bdist_wheel --plat-name=macosx_10_15_universal2
ls -lh dist
unzip -l dist/*.whl
popd
- name: Collect wheels
shell: bash
run: |
cp -v ./scripts/wheel/sherpa-onnx-core/dist/*.whl .
cp -v ./scripts/wheel/sherpa-onnx-bin/dist/*.whl .
ls -lh *.whl
- uses: actions/upload-artifact@v4
with:
name: wheels-core-macos-universal
path: ./*.whl
test:
name: test ${{ matrix.os }}
needs: [core]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [macos-latest, macos-13]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Retrieve artifact from macos universal
uses: actions/download-artifact@v4
with:
name: wheels-core-macos-universal
path: /tmp/wheels
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Show
shell: bash
run: |
ls -lh /tmp/wheels
- name: Install
shell: bash
run: |
python3 -m pip install /tmp/wheels/*.whl
- name: Show version
shell: bash
run: |
sherpa-onnx-version
- name: Show help
shell: bash
run: |
sherpa-onnx --help
ls -lh $(which sherpa-onnx)
file $(which sherpa-onnx)
otool -L $(which sherpa-onnx)
otool -l $(which sherpa-onnx)
echo "---"
sherpa-onnx-offline --help
echo "---"
sherpa-onnx-vad --help
- name: Publish to huggingface
if: matrix.os == 'macos-latest'
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
uses: nick-fields/retry@v3
with:
max_attempts: 20
timeout_seconds: 200
shell: bash
command: |
git config --global user.email "csukuangfj@gmail.com"
git config --global user.name "Fangjun Kuang"
rm -rf huggingface
export GIT_LFS_SKIP_SMUDGE=1
export GIT_CLONE_PROTECTION_ACTIVE=false
SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
echo "SHERPA_ONNX_VERSION $SHERPA_ONNX_VERSION"
d=cpu/$SHERPA_ONNX_VERSION
git clone https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels huggingface
cd huggingface
git fetch
git pull
git merge -m "merge remote" --ff origin main
mkdir -p $d
cp -v /tmp/wheels/*.whl $d/
git status
git add .
git commit -m "add more wheels"
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels main
- name: Publish wheels to PyPI ${{ github.event.inputs.publish_sherpa_onnx_bin }}
if: ${{ matrix.os == 'macos-latest' && (github.event.inputs.publish_sherpa_onnx_bin || 'true') == 'true' }}
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
shell: bash
run: |
opts='--break-system-packages'
python3 -m pip install $opts --upgrade pip
python3 -m pip install $opts wheel twine==5.0.0 setuptools
twine upload /tmp/wheels/*.whl
build_wheels_macos_universal2:
needs: [core, test]
name: ${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [macos-13]
python-version: ["cp38", "cp39", "cp310", "cp311", "cp312", "cp313"]
os: [macos-latest]
python-version: ["cp38", "cp39", "cp310", "cp311", "cp312", "cp313", "cp314"]
steps:
- uses: actions/checkout@v4
... ... @@ -32,11 +271,17 @@ jobs:
./new-release.sh
git diff .
- name: Set macOS deployment target
run: echo "MACOSX_DEPLOYMENT_TARGET=10.15" >> $GITHUB_ENV
- name: Build wheels
uses: pypa/cibuildwheel@v2.21.3
uses: pypa/cibuildwheel@v3.1.4
env:
CIBW_BUILD: "${{ matrix.python-version}}-* "
CIBW_ENVIRONMENT: SHERPA_ONNX_CMAKE_ARGS="-DCMAKE_OSX_ARCHITECTURES='arm64;x86_64'"
CIBW_ENVIRONMENT: >
MACOSX_DEPLOYMENT_TARGET=10.15
SHERPA_ONNX_SPLIT_PYTHON_PACKAGE=ON
SHERPA_ONNX_CMAKE_ARGS="-DCMAKE_OSX_ARCHITECTURES='arm64;x86_64' -DSHERPA_ONNX_ENABLE_BINARY=OFF -DCMAKE_OSX_DEPLOYMENT_TARGET='10.15' -DSHERPA_ONNX_BUILD_C_API_EXAMPLES=OFF -DSHERPA_ONNX_ENABLE_C_API=OFF -DSHERPA_ONNX_ENABLE_WEBSOCKET=OFF"
CIBW_ARCHS: "universal2"
CIBW_BUILD_VERBOSITY: 3
... ... @@ -47,6 +292,7 @@ jobs:
shell: bash
run: |
ls -lh ./wheelhouse/
unzip -l ./wheelhouse/*.whl
- uses: actions/upload-artifact@v4
with:
... ... @@ -95,10 +341,6 @@ jobs:
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
opts='--break-system-packages'
v=${{ matrix.python-version }}
if [[ $v == cp38 || $v == cp39 ]]; then
opts=''
fi
python3 -m pip install $opts --upgrade pip
python3 -m pip install $opts wheel twine==5.0.0 setuptools
... ...
... ... @@ -5,6 +5,12 @@ on:
branches:
- wheel
workflow_dispatch:
inputs:
publish_sherpa_onnx_bin:
description: "Publish sherpa-onnx-bin"
required: false
default: "true"
type: boolean
env:
SHERPA_ONNX_IS_IN_GITHUB_ACTIONS: 1
... ... @@ -14,44 +20,266 @@ concurrency:
cancel-in-progress: true
jobs:
build_wheels_macos_x64:
name: ${{ matrix.python-version }}
core:
runs-on: ${{ matrix.os }}
name: core
strategy:
fail-fast: false
matrix:
os: [macos-13]
python-version: ["cp38", "cp39", "cp310", "cp311", "cp312", "cp313"]
os: [macos-latest]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Update version
shell: bash
run: |
./new-release.sh
git diff .
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
# see https://cibuildwheel.readthedocs.io/en/stable/changelog/
# for a list of versions
- name: Build wheels
if: matrix.python-version == 'cp37'
uses: pypa/cibuildwheel@v2.11.4
- name: Install deps
shell: bash
run: |
python3 -m pip install setuptools wheel twine
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: macos-latest-sherpa-onnx-core-x64
- name: Configure CMake
shell: bash
run: |
export CMAKE_CXX_COMPILER_LAUNCHER=ccache
export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH"
cmake --version
mkdir build
cd build
cmake \
-DSHERPA_ONNX_SPLIT_PYTHON_PACKAGE=ON \
-DCMAKE_OSX_DEPLOYMENT_TARGET=10.15 \
-D BUILD_SHARED_LIBS=ON \
-D SHERPA_ONNX_BUILD_C_API_EXAMPLES=OFF \
-D CMAKE_BUILD_TYPE=Release \
-D CMAKE_OSX_ARCHITECTURES='x86_64' \
-D CMAKE_INSTALL_PREFIX=./install \
..
- name: Build sherpa-onnx for macos
shell: bash
run: |
export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH"
cd build
make -j2
make install
ls -lh lib
ls -lh bin
file ./bin/sherpa-onnx
rm -fv ./install/include/cargs.h
rm -fv ./install/lib/cargs.h
rm -fv ./install/lib/libcargs.dylib
rm -fv ./install/lib/libcargs.a
rm -rfv ./install/lib/pkgconfig
- name: Copy files
shell: bash
run: |
echo 'sherpa-onnx-core'
mkdir -p scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
cp -v ./build/install/lib/lib* ./scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
mkdir -p ./scripts/wheel/sherpa-onnx-core/sherpa_onnx/include/sherpa-onnx/c-api
cp -v ./build/install/include/sherpa-onnx/c-api/*.h ./scripts/wheel/sherpa-onnx-core/sherpa_onnx/include/sherpa-onnx/c-api
echo 'sherpa-onnx-bin'
mkdir -p ./scripts/wheel/sherpa-onnx-bin/bin
cp -v ./build/install/bin/sherpa-onnx* ./scripts/wheel/sherpa-onnx-bin/bin
- name: Build sherpa-onnx-core
shell: bash
run: |
pushd ./scripts/wheel/sherpa-onnx-core
python3 setup.py bdist_wheel --plat-name=macosx_10_15_x86_64
ls -lh dist
unzip -l dist/*.whl
popd
- name: Build sherpa-onnx-bin
shell: bash
run: |
pushd ./scripts/wheel/sherpa-onnx-bin
python3 setup.py bdist_wheel --plat-name=macosx_10_15_x86_64
ls -lh dist
unzip -l dist/*.whl
popd
- name: Collect wheels
shell: bash
run: |
cp -v ./scripts/wheel/sherpa-onnx-core/dist/*.whl .
cp -v ./scripts/wheel/sherpa-onnx-bin/dist/*.whl .
ls -lh *.whl
- uses: actions/upload-artifact@v4
with:
name: wheels-core-macos-x64
path: ./*.whl
test:
name: test
needs: [core]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [macos-13]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Retrieve artifact from macos x64
uses: actions/download-artifact@v4
with:
name: wheels-core-macos-x64
path: /tmp/wheels
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Show
shell: bash
run: |
ls -lh /tmp/wheels
- name: Install
shell: bash
run: |
python3 -m pip install /tmp/wheels/*.whl
- name: Show version
shell: bash
run: |
sherpa-onnx-version
- name: Show help
shell: bash
run: |
sherpa-onnx --help
ls -lh $(which sherpa-onnx)
file $(which sherpa-onnx)
otool -L $(which sherpa-onnx)
otool -l $(which sherpa-onnx)
echo "---"
sherpa-onnx-offline --help
echo "---"
sherpa-onnx-vad --help
- name: Publish to huggingface
env:
CIBW_BUILD: "${{ matrix.python-version}}-* "
CIBW_ENVIRONMENT: SHERPA_ONNX_CMAKE_ARGS="-DCMAKE_OSX_ARCHITECTURES='x86_64'"
CIBW_ARCHS: "x86_64"
CIBW_BUILD_VERBOSITY: 3
HF_TOKEN: ${{ secrets.HF_TOKEN }}
uses: nick-fields/retry@v3
with:
max_attempts: 20
timeout_seconds: 200
shell: bash
command: |
git config --global user.email "csukuangfj@gmail.com"
git config --global user.name "Fangjun Kuang"
# Don't repair macOS wheels
CIBW_REPAIR_WHEEL_COMMAND_MACOS: ""
rm -rf huggingface
export GIT_LFS_SKIP_SMUDGE=1
export GIT_CLONE_PROTECTION_ACTIVE=false
SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
echo "SHERPA_ONNX_VERSION $SHERPA_ONNX_VERSION"
d=cpu/$SHERPA_ONNX_VERSION
git clone https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels huggingface
cd huggingface
git fetch
git pull
git merge -m "merge remote" --ff origin main
mkdir -p $d
cp -v /tmp/wheels/*.whl $d/
git status
git add .
git commit -m "add more wheels"
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels main
- name: Publish wheels to PyPI ${{ github.event.inputs.publish_sherpa_onnx_bin }}
if: ${{ (github.event.inputs.publish_sherpa_onnx_bin || 'true') == 'true' }}
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
shell: bash
run: |
python3 -m pip install --upgrade pip
python3 -m pip install wheel twine==5.0.0 setuptools
twine upload /tmp/wheels/*.whl
build_wheels_macos_x64:
needs: [core, test]
name: ${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [macos-latest]
python-version: ["cp38", "cp39", "cp310", "cp311", "cp312", "cp313", "cp314"]
steps:
- uses: actions/checkout@v4
- name: Update version
shell: bash
run: |
./new-release.sh
git diff .
- name: Set macOS deployment target
run: echo "MACOSX_DEPLOYMENT_TARGET=10.15" >> $GITHUB_ENV
- name: Build wheels
if: matrix.python-version != 'cp37'
uses: pypa/cibuildwheel@v2.21.3
uses: pypa/cibuildwheel@v3.1.4
env:
CIBW_BUILD: "${{ matrix.python-version}}-* "
CIBW_ENVIRONMENT: SHERPA_ONNX_CMAKE_ARGS="-DCMAKE_OSX_ARCHITECTURES='x86_64'"
CIBW_ENVIRONMENT: >
MACOSX_DEPLOYMENT_TARGET=10.15
SHERPA_ONNX_SPLIT_PYTHON_PACKAGE=ON
SHERPA_ONNX_CMAKE_ARGS="-DCMAKE_OSX_ARCHITECTURES='x86_64' -DSHERPA_ONNX_ENABLE_BINARY=OFF -DCMAKE_OSX_DEPLOYMENT_TARGET='10.15' -DSHERPA_ONNX_BUILD_C_API_EXAMPLES=OFF -DSHERPA_ONNX_ENABLE_C_API=OFF -DSHERPA_ONNX_ENABLE_WEBSOCKET=OFF"
CIBW_ARCHS: "x86_64"
CIBW_BUILD_VERBOSITY: 3
... ... @@ -62,10 +290,11 @@ jobs:
shell: bash
run: |
ls -lh ./wheelhouse/
unzip -l ./wheelhouse/*.whl
- uses: actions/upload-artifact@v4
with:
name: wheel-${{ matrix.python-version }}
name: wheel-macos-x64-${{ matrix.python-version }}
path: ./wheelhouse/*.whl
- name: Publish to huggingface
... ... @@ -110,16 +339,8 @@ jobs:
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
opts='--break-system-packages'
v=${{ matrix.python-version }}
if [[ $v == cp37 || $v == cp38 || $v == cp39 ]]; then
opts=''
fi
python3 -m pip install $opts --upgrade pip
if [[ ${{ matrix.python-version }} == "cp37" ]]; then
python3 -m pip install $opts wheel twine setuptools
else
python3 -m pip install $opts wheel twine==5.0.0 setuptools
fi
python3 -m pip install $opts wheel twine==5.0.0 setuptools
twine upload ./wheelhouse/*.whl
... ...
... ... @@ -5,6 +5,12 @@ on:
branches:
- wheel
workflow_dispatch:
inputs:
publish_sherpa_onnx_bin:
description: "Publish sherpa-onnx-bin"
required: false
default: "true"
type: boolean
env:
SHERPA_ONNX_IS_IN_GITHUB_ACTIONS: 1
... ... @@ -14,14 +20,251 @@ concurrency:
cancel-in-progress: true
jobs:
core:
name: core
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [windows-latest]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Update version
shell: bash
run: |
./new-release.sh
git diff .
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install Python dependencies
shell: bash
run: |
python3 -m pip install wheel twine==5.0.0 setuptools
- name: Install sccache
run: choco install sccache -y
- name: Cache sccache
uses: actions/cache@v3
with:
path: C:\Users\runneradmin\AppData\Local\Mozilla\sccache
key: ${{ matrix.os }}-sccache-core-win32
restore-keys: |
${{ matrix.os }}-sccache-core-win32
- name: Configure CMake
shell: bash
run: |
mkdir build
cd build
cmake \
-D CMAKE_C_COMPILER_LAUNCHER=sccache \
-D CMAKE_CXX_COMPILER_LAUNCHER=sccache \
-A Win32 \
-D CMAKE_BUILD_TYPE=Release \
-D BUILD_SHARED_LIBS=ON \
-D SHERPA_ONNX_BUILD_C_API_EXAMPLES=OFF \
-DCMAKE_INSTALL_PREFIX=./install \
..
- name: Build sherpa-onnx for windows
shell: bash
run: |
cd build
cmake --build . --config Release -- -m:2
cmake --build . --config Release --target install -- -m:2
ls -lh ./bin/Release/sherpa-onnx.exe
- name: Show sccache stats
run: sccache --show-stats
- name: Show
shell: bash
run: |
echo "---bin---"
ls -lh build/install/bin
echo "---lib---"
ls -lh build/install/lib
echo "---include---"
ls -lh build/install/include
- name: Copy files
shell: bash
run: |
cd build
echo 'sherpa-onnx-core'
mkdir -p ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
cp -v ./install/lib/onnxruntime.dll ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
cp -v ./install/lib/sherpa-onnx-*.dll ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
# keep the *.lib file so users can write code to link with our dll
cp -v ./install/lib/sherpa-onnx-*.lib ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
mkdir -p ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/include/sherpa-onnx/c-api
cp -v ./install/include/sherpa-onnx/c-api/*.h ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/include/sherpa-onnx/c-api
pushd ../scripts/wheel/sherpa-onnx-core
python3 setup.py bdist_wheel --plat-name=win32
ls -lh dist
popd
echo 'sherpa-onnx-bin'
mkdir -p ../scripts/wheel/sherpa-onnx-bin/bin
cp -v ./install/bin/sherpa-onnx* ../scripts/wheel/sherpa-onnx-bin/bin
pushd ../scripts/wheel/sherpa-onnx-bin
python3 setup.py bdist_wheel --plat-name=win32
ls -lh dist
popd
- name: Collect wheels
shell: bash
run: |
mkdir wheelhouse
cp -v ./scripts/wheel/sherpa-onnx-core/dist/*.whl ./wheelhouse
cp -v ./scripts/wheel/sherpa-onnx-bin/dist/*.whl ./wheelhouse
- uses: actions/upload-artifact@v4
with:
name: wheels-core-win-x86
path: ./wheelhouse/*.whl
- name: Show wheels
shell: bash
run: |
ls -lh ./scripts/wheel/sherpa-onnx-core/dist
ls -lh ./scripts/wheel/sherpa-onnx-bin/dist
unzip -l ./scripts/wheel/sherpa-onnx-core/dist/*.whl
echo "---"
unzip -l ./scripts/wheel/sherpa-onnx-bin/dist/*.whl
test:
name: test
needs: [core]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [windows-2022]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Retrieve artifact from Windows x86
uses: actions/download-artifact@v4
with:
name: wheels-core-win-x86
path: /tmp/wheels
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
architecture: x86
- name: Show
shell: bash
run: |
ls -lh /d/tmp/wheels
- name: Install
shell: bash
run: |
python3 -m pip install /d/tmp/wheels/*.whl
- name: Show version
shell: bash
run: |
sherpa-onnx-version
which sherpa-onnx-version
- name: Show help
shell: bash
run: |
sherpa-onnx --help
echo "---"
sherpa-onnx-offline --help
echo "---"
sherpa-onnx-vad --help
- name: Publish to huggingface
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
uses: nick-fields/retry@v3
with:
max_attempts: 20
timeout_seconds: 200
shell: bash
command: |
git config --global user.email "csukuangfj@gmail.com"
git config --global user.name "Fangjun Kuang"
rm -rf huggingface
export GIT_LFS_SKIP_SMUDGE=1
export GIT_CLONE_PROTECTION_ACTIVE=false
SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
echo "SHERPA_ONNX_VERSION $SHERPA_ONNX_VERSION"
d=cpu/$SHERPA_ONNX_VERSION
git clone https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels huggingface
cd huggingface
git fetch
git pull
git merge -m "merge remote" --ff origin main
mkdir -p $d
cp -v /d/tmp/wheels/*.whl $d/
git status
git add .
git commit -m "add more wheels"
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels main
- name: Publish wheels to PyPI ${{ github.event.inputs.publish_sherpa_onnx_bin }}
if: ${{ (github.event.inputs.publish_sherpa_onnx_bin || 'true') == 'true' }}
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
shell: bash
run: |
python3 -m pip install --upgrade pip
python3 -m pip install wheel twine==5.0.0 setuptools
twine upload /d/tmp/wheels/*.whl
build_wheels_win32:
needs: [core, test]
name: ${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [windows-latest]
python-version: ["cp37", "cp38", "cp39", "cp310", "cp311", "cp312", "cp313"]
python-version: ["cp38", "cp39", "cp310", "cp311", "cp312", "cp313", "cp314"]
steps:
- uses: actions/checkout@v4
... ... @@ -34,19 +277,23 @@ jobs:
# see https://cibuildwheel.readthedocs.io/en/stable/changelog/
# for a list of versions
- name: Build wheels
uses: pypa/cibuildwheel@v2.21.3
- name: Build wheels (cibuildwheel)
uses: pypa/cibuildwheel@v3.1.4
env:
CIBW_ENVIRONMENT: SHERPA_ONNX_CMAKE_ARGS="-A Win32"
CIBW_BUILD: "${{ matrix.python-version}}-* "
CIBW_SKIP: "*-win_amd64"
CIBW_BUILD_VERBOSITY: 3
CIBW_ENVIRONMENT: >
SHERPA_ONNX_SPLIT_PYTHON_PACKAGE=ON
SHERPA_ONNX_CMAKE_ARGS="-A Win32 -DSHERPA_ONNX_ENABLE_BINARY=OFF -DSHERPA_ONNX_ENABLE_C_API=OFF -DSHERPA_ONNX_ENABLE_C_API=OFF -DSHERPA_ONNX_ENABLE_WEBSOCKET=OFF"
- name: Display wheels
shell: bash
run: |
ls -lh ./wheelhouse/
unzip -l ./wheelhouse/*.whl
- uses: actions/upload-artifact@v4
with:
name: wheel-${{ matrix.python-version }}
... ...
... ... @@ -15,13 +15,14 @@ concurrency:
jobs:
build_wheels_win64_cuda:
name: ${{ matrix.python-version }}
name: ${{ matrix.python-version }} ${{ matrix.onnxruntime_version }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [windows-2022]
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
onnxruntime_version: ["1.17.1", "1.22.0"]
steps:
- uses: actions/checkout@v4
... ... @@ -44,6 +45,17 @@ jobs:
export SHERPA_ONNX_CMAKE_ARGS="-DSHERPA_ONNX_ENABLE_GPU=ON"
onnxruntime_version=${{ matrix.onnxruntime_version }}
if [[ $onnxruntime_version == "1.22.0" ]]; then
curl -SL -O https://github.com/microsoft/onnxruntime/releases/download/v1.22.0/onnxruntime-win-x64-gpu-1.22.0.zip
unzip onnxruntime-win-x64-gpu-1.22.0.zip
export SHERPA_ONNXRUNTIME_LIB_DIR=$PWD/onnxruntime-win-x64-gpu-1.22.0/lib
export SHERPA_ONNXRUNTIME_INCLUDE_DIR=$PWD/onnxruntime-win-x64-gpu-1.22.0/include
export SHERPA_ONNX_CUDA_VERSION="12.cudnn9"
fi
python3 setup.py bdist_wheel
ls -lh ./dist/
... ... @@ -54,10 +66,11 @@ jobs:
shell: bash
run: |
ls -lh ./wheelhouse/
unzip -l ./wheelhouse/*.whl
- uses: actions/upload-artifact@v4
with:
name: wheel-${{ matrix.python-version }}
name: wheel-${{ matrix.python-version }}-${{ matrix.onnxruntime_version }}
path: ./wheelhouse/*.whl
- name: Publish to huggingface
... ...
... ... @@ -5,6 +5,12 @@ on:
branches:
- wheel
workflow_dispatch:
inputs:
publish_sherpa_onnx_bin:
description: "Publish sherpa-onnx-bin"
required: false
default: "true"
type: boolean
env:
SHERPA_ONNX_IS_IN_GITHUB_ACTIONS: 1
... ... @@ -14,13 +20,252 @@ concurrency:
cancel-in-progress: true
jobs:
core:
name: core
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [windows-latest]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Update version
shell: bash
run: |
./new-release.sh
git diff .
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install Python dependencies
shell: bash
run: |
python3 -m pip install wheel twine==5.0.0 setuptools
- name: Install sccache
run: choco install sccache -y
- name: Cache sccache
uses: actions/cache@v3
with:
path: C:\Users\runneradmin\AppData\Local\Mozilla\sccache
key: ${{ matrix.os }}-sccache-core
restore-keys: |
${{ matrix.os }}-sccache-core-
- name: Configure CMake
shell: bash
run: |
mkdir build
cd build
cmake \
-D CMAKE_C_COMPILER_LAUNCHER=sccache \
-D CMAKE_CXX_COMPILER_LAUNCHER=sccache \
-A x64 \
-D SHERPA_ONNX_ENABLE_TTS=ON \
-D CMAKE_BUILD_TYPE=Release \
-D BUILD_SHARED_LIBS=ON \
-D SHERPA_ONNX_BUILD_C_API_EXAMPLES=OFF \
-DCMAKE_INSTALL_PREFIX=./install \
..
- name: Build sherpa-onnx for windows
shell: bash
run: |
cd build
cmake --build . --config Release -- -m:2
cmake --build . --config Release --target install -- -m:2
ls -lh ./bin/Release/sherpa-onnx.exe
- name: Show sccache stats
run: sccache --show-stats
- name: Show
shell: bash
run: |
echo "---bin---"
ls -lh build/install/bin
echo "---lib---"
ls -lh build/install/lib
echo "---include---"
ls -lh build/install/include
- name: Copy files
shell: bash
run: |
cd build
echo 'sherpa-onnx-core'
mkdir -p ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
cp -v ./install/lib/onnxruntime.dll ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
cp -v ./install/lib/sherpa-onnx-*.dll ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
# keep the *.lib file so users can write code to link with our dll
cp -v ./install/lib/sherpa-onnx-*.lib ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/lib
mkdir -p ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/include/sherpa-onnx/c-api
cp -v ./install/include/sherpa-onnx/c-api/*.h ../scripts/wheel/sherpa-onnx-core/sherpa_onnx/include/sherpa-onnx/c-api
pushd ../scripts/wheel/sherpa-onnx-core
python3 setup.py bdist_wheel --plat-name=win_amd64
ls -lh dist
popd
echo 'sherpa-onnx-bin'
mkdir -p ../scripts/wheel/sherpa-onnx-bin/bin
cp -v ./install/bin/sherpa-onnx* ../scripts/wheel/sherpa-onnx-bin/bin
pushd ../scripts/wheel/sherpa-onnx-bin
python3 setup.py bdist_wheel --plat-name=win_amd64
ls -lh dist
popd
- name: Collect wheels
shell: bash
run: |
mkdir wheelhouse
cp -v ./scripts/wheel/sherpa-onnx-core/dist/*.whl ./wheelhouse
cp -v ./scripts/wheel/sherpa-onnx-bin/dist/*.whl ./wheelhouse
- uses: actions/upload-artifact@v4
with:
name: wheels-core-win-x64
path: ./wheelhouse/*.whl
- name: Show wheels
shell: bash
run: |
ls -lh ./scripts/wheel/sherpa-onnx-core/dist
ls -lh ./scripts/wheel/sherpa-onnx-bin/dist
unzip -l ./scripts/wheel/sherpa-onnx-core/dist/*.whl
echo "---"
unzip -l ./scripts/wheel/sherpa-onnx-bin/dist/*.whl
test:
name: test
needs: [core]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [windows-2022]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Retrieve artifact from Windows x64
uses: actions/download-artifact@v4
with:
name: wheels-core-win-x64
path: /tmp/wheels
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Show
shell: bash
run: |
ls -lh /d/tmp/wheels
- name: Install
shell: bash
run: |
python3 -m pip install /d/tmp/wheels/*.whl
- name: Show version
shell: bash
run: |
sherpa-onnx-version
which sherpa-onnx-version
- name: Show help
shell: bash
run: |
sherpa-onnx --help
echo "---"
sherpa-onnx-offline --help
echo "---"
sherpa-onnx-vad --help
which sherpa-onnx-vad
- name: Publish to huggingface
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
uses: nick-fields/retry@v3
with:
max_attempts: 20
timeout_seconds: 200
shell: bash
command: |
git config --global user.email "csukuangfj@gmail.com"
git config --global user.name "Fangjun Kuang"
rm -rf huggingface
export GIT_LFS_SKIP_SMUDGE=1
export GIT_CLONE_PROTECTION_ACTIVE=false
SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
echo "SHERPA_ONNX_VERSION $SHERPA_ONNX_VERSION"
d=cpu/$SHERPA_ONNX_VERSION
git clone https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels huggingface
cd huggingface
git fetch
git pull
git merge -m "merge remote" --ff origin main
mkdir -p $d
cp -v /d/tmp/wheels/*.whl $d/
git status
git add .
git commit -m "add more wheels"
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-wheels main
- name: Publish wheels to PyPI ${{ github.event.inputs.publish_sherpa_onnx_bin }}
if: ${{ (github.event.inputs.publish_sherpa_onnx_bin || 'true') == 'true' }}
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
shell: bash
run: |
python3 -m pip install --upgrade pip
python3 -m pip install wheel twine==5.0.0 setuptools
twine upload /d/tmp/wheels/*.whl
build_wheels_win64:
needs: [core, test]
name: ${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [windows-2022]
os: [windows-latest]
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
steps:
... ... @@ -37,10 +282,14 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
- name: Build wheels
- name: Build wheels (cmd)
shell: bash
run: |
pip install setuptools wheel
python3 -m pip install setuptools wheel twine
export SHERPA_ONNX_SPLIT_PYTHON_PACKAGE=ON
export SHERPA_ONNX_CMAKE_ARGS="-DSHERPA_ONNX_ENABLE_BINARY=OFF -DSHERPA_ONNX_BUILD_C_API_EXAMPLES=OFF -DSHERPA_ONNX_ENABLE_C_API=OFF -DSHERPA_ONNX_ENABLE_C_API=OFF -DSHERPA_ONNX_ENABLE_WEBSOCKET=OFF"
python3 setup.py bdist_wheel
... ... @@ -52,6 +301,7 @@ jobs:
shell: bash
run: |
ls -lh ./wheelhouse/
unzip -l ./wheelhouse/*.whl
- uses: actions/upload-artifact@v4
with:
... ...
... ... @@ -13,16 +13,6 @@ on:
- 'sherpa-onnx/c-api/*'
tags:
- 'v[0-9]+.[0-9]+.[0-9]+*'
pull_request:
branches:
- master
paths:
- './build-ios.sh'
- '.github/workflows/build-xcframework.yaml'
- 'CMakeLists.txt'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
workflow_dispatch:
... ...
... ... @@ -13,16 +13,6 @@ on:
- 'sherpa-onnx/c-api/*'
- 'c-api-examples/**'
- 'ffmpeg-examples/**'
pull_request:
branches:
- master
paths:
- '.github/workflows/c-api-from-buffer.yaml'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
- 'c-api-examples/**'
- 'ffmpeg-examples/**'
workflow_dispatch:
... ...
... ... @@ -11,16 +11,6 @@ on:
- 'sherpa-onnx/c-api/*'
- 'c-api-examples/**'
- 'ffmpeg-examples/**'
pull_request:
branches:
- master
paths:
- '.github/workflows/c-api.yaml'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
- 'c-api-examples/**'
- 'ffmpeg-examples/**'
workflow_dispatch:
... ... @@ -85,6 +75,66 @@ jobs:
otool -L ./install/lib/libsherpa-onnx-c-api.dylib
fi
- name: Test Wenet CTC
shell: bash
run: |
name=wenet-ctc-c-api
gcc -o $name ./c-api-examples/$name.c \
-I ./build/install/include \
-L ./build/install/lib/ \
-l sherpa-onnx-c-api \
-l onnxruntime
ls -lh $name
if [[ ${{ matrix.os }} == ubuntu-latest || ${{ matrix.os }} == ubuntu-22.04-arm ]]; then
ldd ./$name
echo "----"
readelf -d ./$name
fi
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-wenetspeech-yue-u2pp-conformer-ctc-zh-en-cantonese-int8-2025-09-10.tar.bz2
tar xvf sherpa-onnx-wenetspeech-yue-u2pp-conformer-ctc-zh-en-cantonese-int8-2025-09-10.tar.bz2
rm sherpa-onnx-wenetspeech-yue-u2pp-conformer-ctc-zh-en-cantonese-int8-2025-09-10.tar.bz2
export LD_LIBRARY_PATH=$PWD/build/install/lib:$LD_LIBRARY_PATH
export DYLD_LIBRARY_PATH=$PWD/build/install/lib:$DYLD_LIBRARY_PATH
./$name
rm $name
rm -rf sherpa-onnx-wenetspeech-*
- name: Test T-one
shell: bash
run: |
name=streaming-t-one-ctc-c-api
gcc -o $name ./c-api-examples/$name.c \
-I ./build/install/include \
-L ./build/install/lib/ \
-l sherpa-onnx-c-api \
-l onnxruntime
ls -lh $name
if [[ ${{ matrix.os }} == ubuntu-latest || ${{ matrix.os }} == ubuntu-22.04-arm ]]; then
ldd ./$name
echo "----"
readelf -d ./$name
fi
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
tar xvf sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
rm sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
export LD_LIBRARY_PATH=$PWD/build/install/lib:$LD_LIBRARY_PATH
export DYLD_LIBRARY_PATH=$PWD/build/install/lib:$DYLD_LIBRARY_PATH
./$name
rm $name
rm -rf sherpa-onnx-streaming-t-one-russian-2025-09-08
- name: Test KittenTTS
shell: bash
run: |
... ... @@ -540,7 +590,8 @@ jobs:
rm -rf sherpa-onnx-*
- name: Test ffmpeg
if: matrix.os == 'macos-latest'
# if: matrix.os == 'macos-latest'
if: false
shell: bash
run: |
brew install ffmpeg
... ...
... ... @@ -8,11 +8,6 @@ on:
paths:
- 'sherpa-onnx/csrc/**'
pull_request:
branches:
- master
paths:
- 'sherpa-onnx/csrc/**'
workflow_dispatch:
... ...
... ... @@ -11,15 +11,6 @@ on:
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
- 'cxx-api-examples/**'
pull_request:
branches:
- master
paths:
- '.github/workflows/cxx-api.yaml'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
- 'cxx-api-examples/**'
workflow_dispatch:
... ... @@ -87,6 +78,74 @@ jobs:
otool -L ./install/lib/libsherpa-onnx-cxx-api.dylib
fi
- name: Test Wenet CTC
shell: bash
run: |
name=wenet-ctc-cxx-api
g++ -std=c++17 -o $name ./cxx-api-examples/$name.cc \
-I ./build/install/include \
-L ./build/install/lib/ \
-l sherpa-onnx-cxx-api \
-l sherpa-onnx-c-api \
-l onnxruntime
ls -lh $name
if [[ ${{ matrix.os }} == ubuntu-latest || ${{ matrix.os }} == ubuntu-22.04-arm ]]; then
ls -lh ./$name
ldd ./$name
echo "----"
readelf -d ./$name
fi
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-wenetspeech-yue-u2pp-conformer-ctc-zh-en-cantonese-int8-2025-09-10.tar.bz2
tar xvf sherpa-onnx-wenetspeech-yue-u2pp-conformer-ctc-zh-en-cantonese-int8-2025-09-10.tar.bz2
rm sherpa-onnx-wenetspeech-yue-u2pp-conformer-ctc-zh-en-cantonese-int8-2025-09-10.tar.bz2
echo "---"
export LD_LIBRARY_PATH=$PWD/build/install/lib:$LD_LIBRARY_PATH
export DYLD_LIBRARY_PATH=$PWD/build/install/lib:$DYLD_LIBRARY_PATH
./$name
rm -rf sherpa-onnx-wenetspeech-*
rm -v ./$name
- name: Test T-one
shell: bash
run: |
name=streaming-t-one-ctc-cxx-api
g++ -std=c++17 -o $name ./cxx-api-examples/$name.cc \
-I ./build/install/include \
-L ./build/install/lib/ \
-l sherpa-onnx-cxx-api \
-l sherpa-onnx-c-api \
-l onnxruntime
ls -lh $name
if [[ ${{ matrix.os }} == ubuntu-latest || ${{ matrix.os }} == ubuntu-22.04-arm ]]; then
ls -lh ./$name
ldd ./$name
echo "----"
readelf -d ./$name
fi
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
tar xvf sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
rm sherpa-onnx-streaming-t-one-russian-2025-09-08.tar.bz2
echo "---"
export LD_LIBRARY_PATH=$PWD/build/install/lib:$LD_LIBRARY_PATH
export DYLD_LIBRARY_PATH=$PWD/build/install/lib:$DYLD_LIBRARY_PATH
./$name
rm -rf sherpa-onnx-streaming-t-one-russian-2025-09-08
rm -v ./$name
- name: Test KittenTTS
shell: bash
run: |
... ...
... ... @@ -3,7 +3,7 @@ name: export-kitten-to-onnx
on:
push:
branches:
- kitten-tts
- kitten-0.2
workflow_dispatch:
... ... @@ -20,6 +20,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest]
version: ["nano_v0_1", "nano_v0_2", "mini_v0_1"]
python-version: ["3.10"]
steps:
... ... @@ -40,7 +41,7 @@ jobs:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
shell: bash
run: |
cd scripts/kitten-tts/nano_v0_1
cd scripts/kitten-tts/${{ matrix.version }}
./run.sh
- name: Collect results
... ... @@ -50,9 +51,20 @@ jobs:
tar xf espeak-ng-data.tar.bz2
rm espeak-ng-data.tar.bz2
src=scripts/kitten-tts/nano_v0_1
version=${{ matrix.version }}
d=kitten-nano-en-v0_1-fp16
src=scripts/kitten-tts/$version
if [[ $version == "nano_v0_1" ]]; then
d=kitten-nano-en-v0_1-fp16
elif [[ $version == "nano_v0_2" ]]; then
d=kitten-nano-en-v0_2-fp16
elif [[ $version == "mini_v0_1" ]]; then
d=kitten-mini-en-v0_1-fp16
else
echo "version $version"
exit 1
fi
mkdir $d
cp -a LICENSE $d/LICENSE
... ... @@ -100,12 +112,21 @@ jobs:
dirs=(
kitten-nano-en-v0_1-fp16
kitten-nano-en-v0_2-fp16
kitten-mini-en-v0_1-fp16
)
export GIT_LFS_SKIP_SMUDGE=1
export GIT_CLONE_PROTECTION_ACTIVE=false
for d in ${dirs[@]}; do
echo "d $d"
if [[ ! -d $d ]]; then
echo "$d does not exist"
continue
fi
echo "$d exists"
rm -rf huggingface
git clone https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/$d huggingface
... ...
... ... @@ -3,7 +3,7 @@ name: export-matcha-fa-en-to-onnx
on:
push:
branches:
- fix-ci
- tts-matcha-samples
workflow_dispatch:
... ... @@ -33,15 +33,48 @@ jobs:
- name: Install Python dependencies
shell: bash
run: |
pip install "numpy<=1.26.4" onnx==1.16.0 onnxruntime==1.17.1 soundfile piper_phonemize -f https://k2-fsa.github.io/icefall/piper_phonemize.html
pip install "numpy<=1.26.4" onnx==1.16.0 onnxruntime==1.17.1 soundfile piper_phonemize -f https://k2-fsa.github.io/icefall/piper_phonemize.html sherpa-onnx
- name: Run
if: false
shell: bash
run: |
cd scripts/matcha-tts/fa-en
./run.sh
- name: Generate samples
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
shell: bash
run: |
cd scripts/matcha-tts/zh
git config --global user.email "csukuangfj@gmail.com"
git config --global user.name "Fangjun Kuang"
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/matcha-icefall-zh-baker.tar.bz2
tar xvf matcha-icefall-zh-baker.tar.bz2
rm matcha-icefall-zh-baker.tar.bz2
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/vocoder-models/vocos-22khz-univ.onnx
git clone https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-tts-samples hf
mkdir -p ./hf/matcha/icefall-zh/mp3
./generate_samples.py
pushd hf
git pull
git add .
git commit -m 'add kokoro samples for matcha tts zh'
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-tts-samples main
popd
rm -rf hf
ls -lh
- name: Collect results ${{ matrix.version }}
if: false
shell: bash
run: |
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/espeak-ng-data.tar.bz2
... ... @@ -73,6 +106,7 @@ jobs:
ls -lh $dst2.tar.bz2
- name: Publish to huggingface male (musa)
if: false
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
uses: nick-fields/retry@v3
... ... @@ -110,6 +144,7 @@ jobs:
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/matcha-tts-fa_en-musa main || true
- name: Publish to huggingface female (khadijah)
if: false
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
uses: nick-fields/retry@v3
... ... @@ -147,7 +182,8 @@ jobs:
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/matcha-tts-fa_en-khadijah main || true
- name: Release
if: github.repository_owner == 'csukuangfj'
# if: github.repository_owner == 'csukuangfj'
if: false
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
... ... @@ -158,7 +194,8 @@ jobs:
tag: tts-models
- name: Release
if: github.repository_owner == 'k2-fsa'
# if: github.repository_owner == 'k2-fsa'
if: false
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
... ...
name: export-nemo-parakeet-tdt-0.6b-v2
name: export-nemo-parakeet-tdt-0.6b
on:
push:
... ... @@ -10,81 +10,111 @@ concurrency:
group: export-nemo-parakeet-tdt-0.6b-v2-${{ github.ref }}
cancel-in-progress: true
env:
HF_HUB_ENABLE_HF_TRANSFER: "0"
jobs:
export-nemo-parakeet-tdt-0_6b-v2:
export-nemo-parakeet-tdt-0_6b:
if: github.repository_owner == 'k2-fsa' || github.repository_owner == 'csukuangfj'
name: parakeet tdt 0.6b v2
name: parakeet tdt 0.6b ${{ matrix.version }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [macos-latest]
python-version: ["3.10"]
version: ["v2", "v3"]
steps:
- uses: actions/checkout@v4
- name: Show disk space
run: |
df -h
# See https://github.com/vlayer-xyz/vlayer/pull/543/files
# Free up disk space as the macOS runners end up using most for Xcode
# versions we don't need and use iOS simulators.
- name: Free up disk space
run: |
echo '*** Delete iOS simulators and their caches'
xcrun simctl delete all
sudo rm -rf ~/Library/Developer/CoreSimulator/Caches/*
- name: Show disk space
run: |
df -h
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Run
- name: Run ${{ matrix.version }}
if: matrix.version == 'v2'
shell: bash
run: |
cd scripts/nemo/parakeet-tdt-0.6b-v2
./run.sh
ls -lh *.onnx
ls -lh *.weights
mv -v *.onnx ../../..
mv -v *.weights ../../..
mv -v tokens.txt ../../..
mv 2086-149220-0033.wav ../../../0.wav
- name: Collect files (fp32)
- name: Run ${{ matrix.version }}
if: matrix.version == 'v3'
shell: bash
run: |
d=sherpa-onnx-nemo-parakeet-tdt-0.6b-v2
mkdir -p $d
cp encoder.int8.onnx $d
cp decoder.onnx $d
cp joiner.onnx $d
cp tokens.txt $d
mkdir $d/test_wavs
cp 0.wav $d/test_wavs
cd scripts/nemo/parakeet-tdt-0.6b-v3
./run.sh
tar cjfv $d.tar.bz2 $d
ls -lh *.onnx
mv -v *.onnx ../../..
mv -v *.weights ../../..
mv -v tokens.txt ../../..
mv *.wav ../../../
- name: Collect files (int8)
- name: Collect files (fp32)
shell: bash
run: |
d=sherpa-onnx-nemo-parakeet-tdt-0.6b-v2-int8
version=${{ matrix.version }}
d=sherpa-onnx-nemo-parakeet-tdt-0.6b-$version
mkdir -p $d
cp encoder.int8.onnx $d
cp decoder.int8.onnx $d
cp joiner.int8.onnx $d
cp tokens.txt $d
cp -v encoder.onnx $d
cp -v encoder.weights $d
cp -v decoder.onnx $d
cp -v joiner.onnx $d
cp -v tokens.txt $d
mkdir $d/test_wavs
cp 0.wav $d/test_wavs
cp -v *.wav $d/test_wavs
tar cjfv $d.tar.bz2 $d
# tar cjfv $d.tar.bz2 $d
# ls -lh *.tar.bz2
- name: Collect files (fp16)
- name: Collect files (int8)
shell: bash
run: |
d=sherpa-onnx-nemo-parakeet-tdt-0.6b-v2-fp16
version=${{ matrix.version }}
d=sherpa-onnx-nemo-parakeet-tdt-0.6b-$version-int8
mkdir -p $d
cp encoder.fp16.onnx $d
cp decoder.fp16.onnx $d
cp joiner.fp16.onnx $d
cp tokens.txt $d
cp -v encoder.int8.onnx $d
cp -v decoder.int8.onnx $d
cp -v joiner.int8.onnx $d
cp -v tokens.txt $d
mkdir $d/test_wavs
cp 0.wav $d/test_wavs
cp -v *.wav $d/test_wavs
tar cjfv $d.tar.bz2 $d
ls -lh *.tar.bz2
- name: Publish to huggingface
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
... ... @@ -94,13 +124,13 @@ jobs:
timeout_seconds: 200
shell: bash
command: |
version=${{ matrix.version }}
git config --global user.email "csukuangfj@gmail.com"
git config --global user.name "Fangjun Kuang"
models=(
sherpa-onnx-nemo-parakeet-tdt-0.6b-v2
sherpa-onnx-nemo-parakeet-tdt-0.6b-v2-int8
sherpa-onnx-nemo-parakeet-tdt-0.6b-v2-fp16
sherpa-onnx-nemo-parakeet-tdt-0.6b-$version
sherpa-onnx-nemo-parakeet-tdt-0.6b-$version-int8
)
for m in ${models[@]}; do
... ... @@ -112,6 +142,7 @@ jobs:
cd huggingface
git lfs track "*.onnx"
git lfs track "*.wav"
git lfs track "*.weights"
git status
git add .
git status
... ...
... ... @@ -158,8 +158,15 @@ jobs:
vits-piper-nl_NL-miro-high
vits-piper-nl_NL-dii-high
vits-piper-de_DE-miro-high
vits-piper-de_DE-dii-high
vits-piper-fr_FR-miro-high
vits-piper-en_US-miro-high
vits-piper-pl_PL-jarvis_wg_glos-medium
vits-piper-pl_PL-justyna_wg_glos-medium
vits-piper-pl_PL-meski_wg_glos-medium
vits-piper-pl_PL-zenski_wg_glos-medium
vits-piper-id_ID-news_tts-medium
vits-piper-hi_IN-rohan-medium
)
for d in ${dirs[@]}; do
src=scripts/piper/release/$d
... ...
name: export-sense-voice-to-rknn
on:
push:
branches:
- export-sense-voice-rknn-ci-2
workflow_dispatch:
concurrency:
group: export-sense-voice-to-rknn-${{ github.ref }}
cancel-in-progress: true
jobs:
export-sense-voice-to-rknn:
if: github.repository_owner == 'k2-fsa' || github.repository_owner == 'csukuangfj'
name: ${{ matrix.framework }} ${{ matrix.platform }} ${{ matrix.input_in_seconds }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
python-version: ["3.10"]
platform: ["rk3562", "rk3566", "rk3568", "rk3576", "rk3588"]
input_in_seconds: ["10", "15", "20", "25", "30"]
framework: ["FunASR", "WSYue-ASR"]
steps:
- uses: actions/checkout@v4
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install Python dependencies
shell: bash
run: |
python3 -m pip install --upgrade \
pip \
"numpy<2" \
torch==2.0.0+cpu -f https://download.pytorch.org/whl/torch \
onnx==1.17.0 \
onnxruntime==1.17.1 \
librosa \
soundfile \
onnxsim \
sentencepiece \
kaldi_native_fbank
curl -SL -O https://huggingface.co/csukuangfj/rknn-toolkit2/resolve/main/rknn_toolkit2-2.1.0%2B708089d1-cp310-cp310-linux_x86_64.whl
pip install ./*.whl "numpy<=1.26.4"
- name: Run SenseVoice from FunAsr
if: matrix.framework == 'FunASR'
shell: bash
run: |
cd scripts/sense-voice/rknn
curl -SL -O https://hf-mirror.com/FunAudioLLM/SenseVoiceSmall/resolve/main/am.mvn
curl -SL -O https://hf-mirror.com/FunAudioLLM/SenseVoiceSmall/resolve/main/model.pt
curl -SL -O https://hf-mirror.com/FunAudioLLM/SenseVoiceSmall/resolve/main/chn_jpn_yue_eng_ko_spectok.bpe.model
curl -SL -O https://huggingface.co/csukuangfj/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-2024-07-17/resolve/main/test_wavs/en.wav
curl -SL -O https://huggingface.co/csukuangfj/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-2024-07-17/resolve/main/test_wavs/ja.wav
curl -SL -O https://huggingface.co/csukuangfj/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-2024-07-17/resolve/main/test_wavs/ko.wav
curl -SL -O https://huggingface.co/csukuangfj/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-2024-07-17/resolve/main/test_wavs/yue.wav
curl -SL -O https://huggingface.co/csukuangfj/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-2024-07-17/resolve/main/test_wavs/zh.wav
rm -f README.md || true
curl -SL -O https://huggingface.co/csukuangfj/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-2024-07-17/resolve/main/README.md
curl -SL -O https://huggingface.co/csukuangfj/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-2024-07-17/resolve/main/LICENSE
echo "export to onnx"
t=${{ matrix.input_in_seconds }}
p=${{ matrix.platform }}
echo "----$t---"
python3 ./export-onnx.py --input-len-in-seconds $t
ls -lh *.onnx
echo "test exported onnx models"
echo "----------$t----------"
python3 ./test_onnx.py --model model-$t-seconds.onnx --tokens ./tokens.txt --wave ./en.wav
python3 ./test_onnx.py --model model-$t-seconds.onnx --tokens ./tokens.txt --wave ./ja.wav
python3 ./test_onnx.py --model model-$t-seconds.onnx --tokens ./tokens.txt --wave ./ko.wav
python3 ./test_onnx.py --model model-$t-seconds.onnx --tokens ./tokens.txt --wave ./yue.wav
python3 ./test_onnx.py --model model-$t-seconds.onnx --tokens ./tokens.txt --wave ./zh.wav
echo "export to rknn"
echo "----------$t----------"
echo "----------$p----------"
python3 export-rknn.py --target-platform $p --in-model model-$t-seconds.onnx --out-model model-$p-$t-seconds.rknn >/dev/null 2>&1
ls -lh *.rknn
echo "collect results"
d=sherpa-onnx-$p-$t-seconds-sense-voice-zh-en-ja-ko-yue-2024-07-17
mkdir -p $d
mkdir -p $d/test_wavs
cp -v README.md $d
cp -v LICENSE $d
cp -v model-$p-$t-seconds.rknn $d/model.rknn
cp -v tokens.txt $d
cp -v *.wav $d/test_wavs
ls -lh $d
tar cjfv $d.tar.bz2 $d
ls -lh *.tar.bz2
rm -rf d
echo "----show---"
ls -lh *.tar.bz2
mv *.tar.bz2 ../../..
- name: Run SenseVoice from WSYue-ASR
if: matrix.framework == 'WSYue-ASR'
shell: bash
run: |
cd scripts/sense-voice/rknn
curl -SL -O https://huggingface.co/ASLP-lab/WSYue-ASR/resolve/main/sensevoice_small_yue/model.pt
curl -SL -O https://hf-mirror.com/FunAudioLLM/SenseVoiceSmall/resolve/main/am.mvn
curl -SL -O https://hf-mirror.com/FunAudioLLM/SenseVoiceSmall/resolve/main/chn_jpn_yue_eng_ko_spectok.bpe.model
curl -SL -O https://huggingface.co/csukuangfj/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-2024-07-17/resolve/main/test_wavs/en.wav
curl -SL -O https://huggingface.co/csukuangfj/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-2024-07-17/resolve/main/test_wavs/yue.wav
curl -SL -O https://huggingface.co/csukuangfj/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-2024-07-17/resolve/main/test_wavs/zh.wav
for i in $(seq 0 17); do
curl -SL -O https://huggingface.co/csukuangfj/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09/resolve/main/test_wavs/yue-$i.wav
done
rm -f README.md || true
curl -SL -O https://huggingface.co/csukuangfj/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09/resolve/main/README.md
echo "export to onnx"
t=${{ matrix.input_in_seconds }}
p=${{ matrix.platform }}
echo "----$t---"
export model_author="ASLP-lab"
export comment="ASLP-lab/WSYue-ASR"
export url="https://huggingface.co/ASLP-lab/WSYue-ASR/tree/main/sensevoice_small_yue"
python3 ./export-onnx.py --input-len-in-seconds $t
ls -lh *.onnx
echo "test exported onnx models"
echo "----------$t----------"
python3 ./test_onnx.py --model model-$t-seconds.onnx --tokens ./tokens.txt --wave ./en.wav
python3 ./test_onnx.py --model model-$t-seconds.onnx --tokens ./tokens.txt --wave ./yue.wav
python3 ./test_onnx.py --model model-$t-seconds.onnx --tokens ./tokens.txt --wave ./zh.wav
for i in $(seq 0 17); do
echo "yue-$i.wav"
python3 ./test_onnx.py --model model-$t-seconds.onnx --tokens ./tokens.txt --wave ./yue-$i.wav
done
echo "export to rknn"
echo "----------$t----------"
echo "----------$p----------"
python3 export-rknn.py --target-platform $p --in-model model-$t-seconds.onnx --out-model model-$p-$t-seconds.rknn >/dev/null 2>&1
ls -lh *.rknn
echo "collect results"
d=sherpa-onnx-$p-$t-seconds-sense-voice-zh-en-ja-ko-yue-2025-09-09
mkdir -p $d
mkdir -p $d/test_wavs
cp -v README.md $d
cp -v model-$p-$t-seconds.rknn $d/model.rknn
cp -v tokens.txt $d
cp -v *.wav $d/test_wavs
ls -lh $d
tar cjfv $d.tar.bz2 $d
ls -lh *.tar.bz2
rm -rf d
echo "----show---"
ls -lh *.tar.bz2
mv *.tar.bz2 ../../..
- name: Release
if: github.repository_owner == 'csukuangfj'
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
file: ./*.tar.bz2
overwrite: true
repo_name: k2-fsa/sherpa-onnx
repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
tag: asr-models
- name: Release
if: github.repository_owner == 'k2-fsa'
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
file: ./*.tar.bz2
overwrite: true
tag: asr-models
... ...
name: export-t-one-to-onnx
on:
workflow_dispatch:
concurrency:
group: export-t-one-to-onnx-${{ github.ref }}
cancel-in-progress: true
jobs:
export-t-one-to-onnx:
if: github.repository_owner == 'k2-fsa' || github.repository_owner == 'csukuangfj'
name: export t-one
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
python-version: ["3.10"]
steps:
- uses: actions/checkout@v4
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install Python dependencies
shell: bash
run: |
pip install onnx==1.17.0 onnxruntime==1.17.1 soundfile librosa kaldi_native_fbank "numpy<2"
- name: Run
shell: bash
run: |
cd scripts/t-one
wget https://raw.githubusercontent.com/voicekit-team/T-one/refs/heads/main/LICENSE
./run.sh
d=sherpa-onnx-streaming-t-one-russian-2025-09-08
mkdir $d
cp -v ./tokens.txt $d
cp -v ./model.onnx $d
cp -v ./russian_test_short_from_t_one.wav $d/0.wav
cp -v ./LICENSE $d
cp -v ./README.md $d
ls -lh $d
tar cjfv $d.tar.bz2 $d
ls -lh $d.tar.bz2
mv $d.tar.bz2 ../..
mv $d ../..
- name: Publish to huggingface
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
uses: nick-fields/retry@v3
with:
max_attempts: 20
timeout_seconds: 200
shell: bash
command: |
git config --global user.email "csukuangfj@gmail.com"
git config --global user.name "Fangjun Kuang"
rm -rf huggingface
export GIT_LFS_SKIP_SMUDGE=1
export GIT_CLONE_PROTECTION_ACTIVE=false
m=sherpa-onnx-streaming-t-one-russian-2025-09-08
git clone https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/$m huggingface
cd huggingface
git fetch
git pull
echo "pwd: $PWD"
ls -lh ../$m
git lfs track "*.wav"
rm -rf ./*
cp -v ../$m/* ./
git lfs track "*.onnx"
git add .
ls -lh
git status
git commit -m "add models"
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/$m main || true
cd ..
- name: Release
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
file: ./*.tar.bz2
overwrite: true
repo_name: k2-fsa/sherpa-onnx
repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
tag: asr-models
... ...
... ... @@ -16,8 +16,9 @@ jobs:
fail-fast: false
matrix:
os: [macos-latest]
model: ["turbo", "distil-medium.en", "distil-small.en", "tiny.en", "base.en", "small.en", "medium.en", "tiny", "base", "small", "medium", "medium-aishell", "large", "large-v1", "large-v2", "large-v3", "distil-large-v2"]
model: ["turbo", "distil-medium.en", "distil-small.en", "tiny.en", "base.en", "small.en", "medium.en", "tiny", "base", "small", "medium", "medium-aishell", "large", "large-v1", "large-v2", "large-v3", "distil-large-v2", "distil-large-v3", "distil-large-v3.5"]
# model: ["large", "large-v1", "large-v2", "large-v3", "distil-large-v2"]
# model: ["distil-large-v3.5", "distil-large-v3"]
python-version: ["3.8"]
steps:
... ... @@ -47,6 +48,12 @@ jobs:
elif [[ $model == distil-large-v2 ]]; then
wget -q -O distil-large-v2-original-model.bin https://huggingface.co/distil-whisper/distil-large-v2/resolve/main/original-model.bin
ls -lh
elif [[ $model == distil-large-v3 ]]; then
wget -q -O distil-large-v3-original-model.bin https://huggingface.co/distil-whisper/distil-large-v3-openai/resolve/main/model.bin
ls -lh
elif [[ $model == distil-large-v3.5 ]]; then
wget -q -O distil-large-v3.5-original-model.bin https://huggingface.co/distil-whisper/distil-large-v3.5-openai/resolve/main/model.bin
ls -lh
elif [[ $model == distil-small.en ]]; then
wget -q -O distil-small-en-original-model.bin https://huggingface.co/distil-whisper/distil-small.en/resolve/main/original-model.bin
ls -lh
... ... @@ -155,6 +162,7 @@ jobs:
git status
ls -lh
git lfs track "*.wav*"
git lfs track "*onnx*"
git lfs track "*weights*"
... ...
name: generate-tts-samples
on:
push:
branches:
- tts-samples-2
workflow_dispatch:
concurrency:
group: generate-tts-samples-${{ github.ref }}
cancel-in-progress: true
jobs:
generate_tts_samples:
name: ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
python-version: ["3.10"]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install Python dependencies
shell: bash
run: |
pip install "numpy<=1.26.4" sherpa-onnx soundfile
- name: kitten
if: true
shell: bash
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
run: |
git config --global user.email "csukuangfj@gmail.com"
git config --global user.name "Fangjun Kuang"
cd scripts/kitten-tts
pwd=$PWD
export GIT_LFS_SKIP_SMUDGE=1
export GIT_CLONE_PROTECTION_ACTIVE=false
git clone https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-tts-samples hf
mkdir -p ./hf/kitten/v0.1-nano/mp3
mkdir -p ./hf/kitten/v0.2-nano/mp3
mkdir -p ./hf/kitten/v0.1-mini/mp3
for v in 1 2; do
pushd nano_v0_$v
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/kitten-nano-en-v0_$v-fp16.tar.bz2
tar xf kitten-nano-en-v0_$v-fp16.tar.bz2
rm kitten-nano-en-v0_$v-fp16.tar.bz2
ln -s ../hf .
python3 ./generate_samples.py
rm -rf kitten-nano-en-v0_$v-fp16
popd
done
for v in 1; do
pushd mini_v0_$v
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/kitten-mini-en-v0_$v-fp16.tar.bz2
tar xf kitten-mini-en-v0_$v-fp16.tar.bz2
rm kitten-mini-en-v0_$v-fp16.tar.bz2
ln -s ../hf .
python3 ./generate_samples.py
rm -rf kitten-mini-en-v0_$v-fp16
popd
done
pushd hf
git pull
git add .
git commit -m 'add kitten tts samples'
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-tts-samples main
popd
rm -rf hf
- name: matcha en (ljspeech)
if: false
shell: bash
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
run: |
git config --global user.email "csukuangfj@gmail.com"
git config --global user.name "Fangjun Kuang"
cd scripts/matcha-tts/en/
pwd=$PWD
export GIT_LFS_SKIP_SMUDGE=1
export GIT_CLONE_PROTECTION_ACTIVE=false
git clone https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-tts-samples hf
mkdir -p ./hf/matcha/icefall-en-ljspeech/mp3
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/matcha-icefall-en_US-ljspeech.tar.bz2
tar xvf matcha-icefall-en_US-ljspeech.tar.bz2
rm matcha-icefall-en_US-ljspeech.tar.bz2
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/vocoder-models/vocos-22khz-univ.onnx
python3 ./generate_samples.py
pushd hf
git pull
git add .
git commit -m 'add matcha tts en (ljspeech) samples'
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-tts-samples main
popd
rm -rf hf
... ...
name: jar
on:
push:
branches:
- refactor-jar
tags:
- 'v[0-9]+.[0-9]+.[0-9]+*'
workflow_dispatch:
concurrency:
group: jar-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: write
jobs:
jar:
runs-on: ${{ matrix.os }}
name: ${{ matrix.os }} ${{ matrix.arch }}
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-24.04-arm
arch: "arm64"
- os: ubuntu-latest
arch: "x64"
- os: macos-latest
arch: "arm64"
- os: macos-13
arch: "x64"
- os: windows-latest
arch: "x64"
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-java@v4
with:
distribution: 'temurin' # See 'Supported distributions' for available options
java-version: '21'
- name: Show java version
shell: bash
run: |
java --version
- name: Download libs ${{ matrix.os }} ${{ matrix.arch }}
if: ${{ matrix.os == 'ubuntu-24.04-arm' && matrix.arch == 'arm64' }}
shell: bash
run: |
SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/v$SHERPA_ONNX_VERSION/sherpa-onnx-v$SHERPA_ONNX_VERSION-linux-aarch64-jni.tar.bz2
tar xvf ./*.tar.bz2
src=sherpa-onnx-v$SHERPA_ONNX_VERSION-linux-aarch64-jni
dst=sherpa-onnx/java-api/resources/sherpa-onnx/native/linux-aarch64
mkdir -p $dst
cp -v $src/lib/libsherpa-onnx-jni.so $dst/
cp -v $src/lib/libonnxruntime.so $dst/
ls -lh $dst
rm -rf $src*
- name: Download libs ${{ matrix.os }} ${{ matrix.arch }}
if: ${{ matrix.os == 'ubuntu-latest' && matrix.arch == 'x64' }}
shell: bash
run: |
SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/v$SHERPA_ONNX_VERSION/sherpa-onnx-v$SHERPA_ONNX_VERSION-linux-x64-jni.tar.bz2
tar xvf ./*.tar.bz2
src=sherpa-onnx-v$SHERPA_ONNX_VERSION-linux-x64-jni
dst=sherpa-onnx/java-api/resources/sherpa-onnx/native/linux-x64
mkdir -p $dst
cp -v $src/lib/libsherpa-onnx-jni.so $dst/
cp -v $src/lib/libonnxruntime.so $dst/
ls -lh $dst
rm -rf $src*
- name: Download libs ${{ matrix.os }} ${{ matrix.arch }}
if: ${{ matrix.os == 'macos-latest' && matrix.arch == 'arm64' }}
shell: bash
run: |
SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/v$SHERPA_ONNX_VERSION/sherpa-onnx-v$SHERPA_ONNX_VERSION-osx-arm64-jni.tar.bz2
tar xvf ./*.tar.bz2
src=sherpa-onnx-v$SHERPA_ONNX_VERSION-osx-arm64-jni
dst=sherpa-onnx/java-api/resources/sherpa-onnx/native/osx-aarch64
mkdir -p $dst
cp -v $src/lib/libonnxruntime.1.17.1.dylib $dst/
cp -v $src/lib/libsherpa-onnx-jni.dylib $dst/
ls -lh $dst
rm -rf $src*
- name: Download libs ${{ matrix.os }} ${{ matrix.arch }}
if: ${{ matrix.os == 'macos-13' && matrix.arch == 'x64' }}
shell: bash
run: |
SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/v$SHERPA_ONNX_VERSION/sherpa-onnx-v$SHERPA_ONNX_VERSION-osx-x86_64-jni.tar.bz2
tar xvf ./*.tar.bz2
src=sherpa-onnx-v$SHERPA_ONNX_VERSION-osx-x86_64-jni
dst=sherpa-onnx/java-api/resources/sherpa-onnx/native/osx-x64
mkdir -p $dst
cp -v $src/lib/libonnxruntime.1.17.1.dylib $dst/
cp -v $src/lib/libsherpa-onnx-jni.dylib $dst/
ls -lh $dst
rm -rf $src*
- name: Download libs ${{ matrix.os }} ${{ matrix.arch }}
if: ${{ matrix.os == 'windows-latest' && matrix.arch == 'x64' }}
shell: bash
run: |
SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/v$SHERPA_ONNX_VERSION/sherpa-onnx-v$SHERPA_ONNX_VERSION-win-x64-jni.tar.bz2
tar xvf ./*.tar.bz2
src=sherpa-onnx-v$SHERPA_ONNX_VERSION-win-x64-jni
ls -lh $src
ls -lh $src/lib
dst=sherpa-onnx/java-api/resources/sherpa-onnx/native/win-x64
mkdir -p $dst
cp -v $src/lib/onnxruntime.dll $dst/
cp -v $src/lib/sherpa-onnx-jni.dll $dst/
ls -lh $dst
rm -rf $src*
- name: Create java jar (source code)
shell: bash
run: |
cd sherpa-onnx/java-api
make
ls -lh build
- name: Create java jar (native lib)
shell: bash
run: |
SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
cd sherpa-onnx/java-api
ls -lh resources/sherpa-onnx/native
echo "--"
ls -lh resources/sherpa-onnx/native/*/
jar cfvm ./sherpa-onnx-native.jar MANIFEST.MF -C ./resources .
ls -lh *.jar
os=${{ matrix.os }}
arch=${{ matrix.arch }}
if [[ $os == "ubuntu-24.04-arm" && $arch == "arm64" ]]; then
mv -v sherpa-onnx-native.jar sherpa-onnx-native-lib-linux-aarch64-$SHERPA_ONNX_VERSION.jar
elif [[ $os == "ubuntu-latest" && $arch == "x64" ]]; then
mv -v sherpa-onnx-native.jar sherpa-onnx-native-lib-linux-x64-$SHERPA_ONNX_VERSION.jar
elif [[ $os == "macos-latest" && $arch == "arm64" ]]; then
mv -v sherpa-onnx-native.jar sherpa-onnx-native-lib-osx-aarch64-$SHERPA_ONNX_VERSION.jar
elif [[ $os == "macos-13" && $arch == "x64" ]]; then
mv -v sherpa-onnx-native.jar sherpa-onnx-native-lib-osx-x64-$SHERPA_ONNX_VERSION.jar
elif [[ $os == "windows-latest" && $arch == "x64" ]]; then
mv -v sherpa-onnx-native.jar sherpa-onnx-native-lib-win-x64-$SHERPA_ONNX_VERSION.jar
else
echo "Unknown os $os with arch $arch"
fi
- name: Show java jar (source code)
shell: bash
run: |
cd sherpa-onnx/java-api
unzip -l build/sherpa-onnx.jar
- name: Show java jar (native lib)
shell: bash
run: |
cd sherpa-onnx/java-api
unzip -l sherpa-onnx*.jar
- name: Release jar
if: github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
overwrite: true
file: ./sherpa-onnx/java-api/sherpa-onnx-native-*.jar
- name: Release jar
if: github.repository_owner == 'csukuangfj' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
overwrite: true
file: ./sherpa-onnx/java-api/sherpa-onnx-native-*.jar
repo_name: k2-fsa/sherpa-onnx
repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
tag: v1.12.10
- name: Test KittenTTS
shell: bash
run: |
SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
os=${{ matrix.os }}
arch=${{ matrix.arch }}
if [[ $os == "ubuntu-24.04-arm" && $arch == "arm64" ]]; then
native_jar=sherpa-onnx-native-lib-linux-aarch64-$SHERPA_ONNX_VERSION.jar
elif [[ $os == "ubuntu-latest" && $arch == "x64" ]]; then
native_jar=sherpa-onnx-native-lib-linux-x64-$SHERPA_ONNX_VERSION.jar
elif [[ $os == "macos-latest" && $arch == "arm64" ]]; then
native_jar=sherpa-onnx-native-lib-osx-aarch64-$SHERPA_ONNX_VERSION.jar
elif [[ $os == "macos-13" && $arch == "x64" ]]; then
native_jar=sherpa-onnx-native-lib-osx-x64-$SHERPA_ONNX_VERSION.jar
elif [[ $os == "windows-latest" && $arch == "x64" ]]; then
native_jar=sherpa-onnx-native-lib-win-x64-$SHERPA_ONNX_VERSION.jar
else
echo "Unknown os $os with arch $arch"
fi
echo "native_jar: $native_jar"
ls -lh sherpa-onnx/java-api/$native_jar
if [[ ${{ matrix.os }} == "windows-latest" ]]; then
SEP=";"
else
SEP=":"
fi
cd java-api-examples
curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/kitten-nano-en-v0_1-fp16.tar.bz2
tar xf kitten-nano-en-v0_1-fp16.tar.bz2
rm kitten-nano-en-v0_1-fp16.tar.bz2
java \
-cp "../sherpa-onnx/java-api/build/sherpa-onnx.jar${SEP}../sherpa-onnx/java-api/$native_jar" \
NonStreamingTtsKittenEn.java
... ...
... ... @@ -10,15 +10,6 @@ on:
- 'kotlin-api-examples/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/jni/*'
pull_request:
branches:
- master
paths:
- '.github/workflows/jni.yaml'
- 'cmake/**'
- 'kotlin-api-examples/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/jni/*'
workflow_dispatch:
... ...
... ... @@ -13,17 +13,6 @@ on:
- 'sherpa-onnx/c-api/*'
- 'sherpa-onnx/pascal-api/*'
- 'scripts/lazarus/*'
pull_request:
branches:
- master
paths:
- '.github/workflows/lazarus.yaml'
- 'cmake/**'
- 'lazarus-examples/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
- 'sherpa-onnx/pascal-api/*'
- 'scripts/lazarus/*'
workflow_dispatch:
... ...
... ... @@ -18,21 +18,6 @@ on:
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
- 'c-api-examples/**'
pull_request:
branches:
- master
paths:
- '.github/workflows/linux-gpu.yaml'
- '.github/scripts/test-online-transducer.sh'
- '.github/scripts/test-online-paraformer.sh'
- '.github/scripts/test-offline-transducer.sh'
- '.github/scripts/test-offline-ctc.sh'
- '.github/scripts/test-online-ctc.sh'
- '.github/scripts/test-online-ctc.sh'
- '.github/scripts/test-offline-tts.sh'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
workflow_dispatch:
... ... @@ -43,13 +28,14 @@ concurrency:
jobs:
linux_gpu:
runs-on: ${{ matrix.os }}
name: ${{ matrix.build_type }}
name: ${{ matrix.build_type }} ${{ matrix.onnxruntime_version }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
# build_type: [Release, Debug]
build_type: [Release]
onnxruntime_version: ["1.17.1", "1.22.0"]
steps:
- uses: actions/checkout@v4
... ... @@ -79,6 +65,17 @@ jobs:
cd /home/runner/work/sherpa-onnx/sherpa-onnx
onnxruntime_version=${{ matrix.onnxruntime_version }}
if [[ $onnxruntime_version == "1.22.0" ]]; then
curl -SL -O https://github.com/csukuangfj/onnxruntime-libs/releases/download/v1.22.0/onnxruntime-linux-x64-gpu-1.22.0-patched.zip
unzip onnxruntime-linux-x64-gpu-1.22.0-patched.zip
export SHERPA_ONNXRUNTIME_LIB_DIR=$PWD/onnxruntime-linux-x64-gpu-1.22.0-patched/lib
export SHERPA_ONNXRUNTIME_INCLUDE_DIR=$PWD/onnxruntime-linux-x64-gpu-1.22.0-patched/include
ls -lh /home/runner/work/sherpa-onnx/sherpa-onnx/onnxruntime-linux-x64-gpu-1.22.0-patched/lib/libonnxruntime.so
fi
git clone --depth 1 --branch v1.2.12 https://github.com/alsa-project/alsa-lib
pushd alsa-lib
./gitcompile
... ... @@ -115,7 +112,7 @@ jobs:
du -h -d1 .
sudo chown -R $USER ./build
ls -lh build/bin
ls -lh build/_deps/onnxruntime-src/lib/
ls -lh build/_deps/onnxruntime-src/lib/ || true
echo "strip"
strip build/bin/*
... ... @@ -135,12 +132,17 @@ jobs:
strings build/install/lib/*.so | grep "^GLIBC_"
- name: Copy files
if: github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
shell: bash
run: |
SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
dst=sherpa-onnx-${SHERPA_ONNX_VERSION}-linux-x64-gpu
onnxruntime_version=${{ matrix.onnxruntime_version }}
if [[ $onnxruntime_version == "1.22.0" ]]; then
dst=sherpa-onnx-${SHERPA_ONNX_VERSION}-cuda-12.x-cudnn-9.x-linux-x64-gpu
fi
mkdir $dst
cp -a build/install/bin $dst/
... ... @@ -152,15 +154,23 @@ jobs:
tar cjvf ${dst}.tar.bz2 $dst
- name: Release pre-compiled binaries and libs for linux x64
if: (github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa') && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
if: github.repository_owner == 'csukuangfj' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
overwrite: true
file: sherpa-onnx-*gpu.tar.bz2
repo_name: k2-fsa/sherpa-onnx
repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
tag: v1.12.13
- name: Release pre-compiled binaries and libs for linux x64
if: github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
overwrite: true
file: sherpa-onnx-*linux-x64-gpu.tar.bz2
# repo_name: k2-fsa/sherpa-onnx
# repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
# tag: v1.11.3
file: sherpa-onnx-*gpu.tar.bz2
- name: Display dependencies of sherpa-onnx for linux
shell: bash
... ...
... ... @@ -20,7 +20,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest]
java-version: ['8', '11', '16', '17', '18', '19', '20', '21', '22', '23', '24']
java-version: ['24']
steps:
- uses: actions/checkout@v4
... ... @@ -46,7 +46,6 @@ jobs:
du -h -d1 .
- name: Build jar ${{ matrix.java-version }}
if: matrix.java-version == '23'
shell: bash
run: |
SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
... ... @@ -57,35 +56,31 @@ jobs:
cd ../..
ls -lh *.jar
- name: Build jar ${{ matrix.java-version }}
shell: bash
run: |
SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
cd sherpa-onnx/java-api
make
ls -lh build/
cp build/sherpa-onnx.jar ../../sherpa-onnx-$SHERPA_ONNX_VERSION-java${{ matrix.java-version }}.jar
cd ../..
ls -lh *.jar
- uses: actions/upload-artifact@v4
with:
name: release-jni-linux-jar-${{ matrix.java-version }}
path: ./*.jar
- name: Release jar
if: (github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa') && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
if: github.repository_owner == 'csukuangfj' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
overwrite: true
file: ./*.jar
repo_name: k2-fsa/sherpa-onnx
repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
tag: v1.12.11
- name: Release jar
if: github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
overwrite: true
file: ./*.jar
# repo_name: k2-fsa/sherpa-onnx
# repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
# tag: v1.12.1
- name: Build sherpa-onnx
if: matrix.java-version == '23'
uses: addnab/docker-run-action@v3
with:
image: quay.io/pypa/manylinux2014_x86_64
... ... @@ -151,7 +146,6 @@ jobs:
ls -lh install/bin
- name: Display dependencies of sherpa-onnx for linux
if: matrix.java-version == '23'
shell: bash
run: |
du -h -d1 .
... ... @@ -170,13 +164,11 @@ jobs:
readelf -d build/bin/sherpa-onnx
- uses: actions/upload-artifact@v4
if: matrix.java-version == '23'
with:
name: release-jni-linux-${{ matrix.java-version }}
path: build/install/*
- name: Copy files
if: matrix.java-version == '23'
shell: bash
run: |
du -h -d1 .
... ... @@ -194,8 +186,27 @@ jobs:
tar cjvf ${dst}.tar.bz2 $dst
du -h -d1 .
- name: Release pre-compiled binaries and libs for linux x64
if: github.repository_owner == 'csukuangfj' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
overwrite: true
file: sherpa-onnx-*.tar.bz2
repo_name: k2-fsa/sherpa-onnx
repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
tag: v1.12.11
- name: Release pre-compiled binaries and libs for linux x64
if: github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
overwrite: true
file: sherpa-onnx-*.tar.bz2
- name: Publish to huggingface
if: (github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa') && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') && matrix.java-version == '23'
if: (github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa') && (github.event_name == 'push' || github.event_name == 'workflow_dispatch')
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
uses: nick-fields/retry@v3
... ... @@ -215,6 +226,7 @@ jobs:
cd huggingface
dst=jni/$SHERPA_ONNX_VERSION
mkdir -p $dst
git lfs track "*.jar"
cp -v ../sherpa-onnx-*.tar.bz2 $dst/
cp -v ../*.jar $dst/
... ... @@ -227,14 +239,3 @@ jobs:
git commit -m "add more files"
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-libs main
- name: Release pre-compiled binaries and libs for linux x64
if: (github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa') && github.event_name == 'push' && contains(github.ref, 'refs/tags/') && matrix.java-version == '23'
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
overwrite: true
file: sherpa-onnx-*.tar.bz2
# repo_name: k2-fsa/sherpa-onnx
# repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
# tag: v1.12.0
... ...
... ... @@ -12,15 +12,6 @@ on:
- 'mfc-examples/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
pull_request:
branches:
- master
paths:
- '.github/workflows/mfc.yaml'
- 'cmake/**'
- 'mfc-examples/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
workflow_dispatch:
... ...
... ... @@ -11,16 +11,6 @@ on:
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
- 'sherpa-onnx/pascal-api/*'
pull_request:
branches:
- master
paths:
- '.github/workflows/pascal.yaml'
- 'cmake/**'
- 'pascal-api-examples/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/c-api/*'
- 'sherpa-onnx/pascal-api/*'
workflow_dispatch:
... ... @@ -136,6 +126,43 @@ jobs:
cp -v ../sherpa-onnx/pascal-api/*.pas ../pascal-api-examples/vad-with-non-streaming-asr
fi
- name: Run Pascal test (Streaming ASR)
shell: bash
run: |
export PATH=/c/lazarus/fpc/3.2.2/bin/x86_64-win64:$PATH
cd ./pascal-api-examples
pushd streaming-asr
./run-t-one-ctc.sh
rm -rf sherpa-onnx-*
echo "---"
./run-zipformer-transducer.sh
rm -rf sherpa-onnx-*
echo "---"
./run-nemo-transducer.sh
rm -rf sherpa-onnx-*
echo "---"
if [[ ${{ matrix.os }} != 'windows-latest' ]]; then
./run-paraformer.sh
rm -rf sherpa-onnx-*
echo "---"
./run-zipformer-ctc.sh
echo "---"
./run-zipformer-ctc-hlg.sh
rm -rf sherpa-onnx-*
echo "---"
fi
ls -lh
popd
- name: Run Pascal test (VAD test)
shell: bash
run: |
... ... @@ -331,36 +358,3 @@ jobs:
echo "---"
ls -lh
popd
- name: Run Pascal test (Streaming ASR)
shell: bash
run: |
export PATH=/c/lazarus/fpc/3.2.2/bin/x86_64-win64:$PATH
cd ./pascal-api-examples
pushd streaming-asr
./run-zipformer-transducer.sh
rm -rf sherpa-onnx-*
echo "---"
./run-nemo-transducer.sh
rm -rf sherpa-onnx-*
echo "---"
if [[ ${{ matrix.os }} != 'windows-latest' ]]; then
./run-paraformer.sh
rm -rf sherpa-onnx-*
echo "---"
./run-zipformer-ctc.sh
echo "---"
./run-zipformer-ctc-hlg.sh
rm -rf sherpa-onnx-*
echo "---"
fi
ls -lh
popd
... ...
... ... @@ -13,16 +13,6 @@ on:
- 'build-riscv64-linux-gnu.sh'
tags:
- 'v[0-9]+.[0-9]+.[0-9]+*'
pull_request:
branches:
- master
paths:
- '.github/workflows/riscv64-linux.yaml'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'toolchains/riscv64-linux-gnu.toolchain.cmake'
- 'sherpa-onnx/c-api/*'
- 'build-riscv64-linux-gnu.sh'
workflow_dispatch:
... ... @@ -137,66 +127,6 @@ jobs:
readelf -d build-riscv64-linux-gnu/bin/sherpa-onnx
- name: Test sherpa-onnx
shell: bash
run: |
export PATH=$GITHUB_WORKSPACE/toolchain/bin:$PATH
export PATH=$GITHUB_WORKSPACE/qemu-install/bin:$PATH
export QEMU_LD_PREFIX=$GITHUB_WORKSPACE/toolchain/sysroot
export LD_LIBRARY_PATH=$GITHUB_WORKSPACE/toolchain/sysroot/lib
ls -lh ./build-riscv64-linux-gnu/bin
echo "----------sherpa-onnx----------"
qemu-riscv64 ./build-riscv64-linux-gnu/bin/sherpa-onnx --help
readelf -d ./build-riscv64-linux-gnu/bin/sherpa-onnx
echo "----------sherpa-onnx-offline----------"
qemu-riscv64 ./build-riscv64-linux-gnu/bin/sherpa-onnx-offline --help
readelf -d ./build-riscv64-linux-gnu/bin/sherpa-onnx-offline
echo "----------sherpa-onnx-offline-tts----------"
qemu-riscv64 ./build-riscv64-linux-gnu/bin/sherpa-onnx-offline-tts --help
readelf -d ./build-riscv64-linux-gnu/bin/sherpa-onnx-offline-tts
- name: Test streaming speech recognition
shell: bash
run: |
export PATH=$GITHUB_WORKSPACE/toolchain/bin:$PATH
export PATH=$GITHUB_WORKSPACE/qemu-install/bin:$PATH
export QEMU_LD_PREFIX=$GITHUB_WORKSPACE/toolchain/sysroot
export LD_LIBRARY_PATH=$GITHUB_WORKSPACE/toolchain/sysroot/lib
wget -q https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23.tar.bz2
tar xvf sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23.tar.bz2
rm sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23.tar.bz2
qemu-riscv64 ./build-riscv64-linux-gnu/bin/sherpa-onnx \
--tokens=./sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23/tokens.txt \
--encoder=./sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23/encoder-epoch-99-avg-1.onnx \
--decoder=./sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23/decoder-epoch-99-avg-1.onnx \
--joiner=./sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23/joiner-epoch-99-avg-1.onnx \
./sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23/test_wavs/0.wav
- name: Test offline tts
shell: bash
run: |
export PATH=$GITHUB_WORKSPACE/toolchain/bin:$PATH
export PATH=$GITHUB_WORKSPACE/qemu-install/bin:$PATH
export QEMU_LD_PREFIX=$GITHUB_WORKSPACE/toolchain/sysroot
export LD_LIBRARY_PATH=$GITHUB_WORKSPACE/toolchain/sysroot/lib
wget -q https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/vits-piper-en_US-lessac-medium.tar.bz2
tar xf vits-piper-en_US-lessac-medium.tar.bz2
rm vits-piper-en_US-lessac-medium.tar.bz2
qemu-riscv64 ./build-riscv64-linux-gnu/bin/sherpa-onnx-offline-tts \
--vits-model=./vits-piper-en_US-lessac-medium/en_US-lessac-medium.onnx \
--vits-data-dir=./vits-piper-en_US-lessac-medium/espeak-ng-data \
--vits-tokens=./vits-piper-en_US-lessac-medium/tokens.txt \
--output-filename=./liliana-piper-en_US-lessac-medium.wav \
'liliana, the most beautiful and lovely assistant of our team!'
- name: Copy files
shell: bash
run: |
... ... @@ -270,21 +200,92 @@ jobs:
git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-libs main
- uses: actions/upload-artifact@v4
if: matrix.lib_type == 'shared'
with:
name: wave
path: ./*.wav
- uses: actions/upload-artifact@v4
if: matrix.lib_type == 'static'
with:
name: sherpa-onnx-linux-riscv64-static
path: sherpa-onnx-*linux-riscv64-static.tar.bz2
- name: Release pre-compiled binaries and libs for riscv64 linux ${{ matrix.lib_type }}
if: (github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa') && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
if: github.repository_owner == 'csukuangfj' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
overwrite: true
file: sherpa-onnx-*linux-riscv64*.tar.bz2
repo_name: k2-fsa/sherpa-onnx
repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
tag: v1.12.11
- name: Release pre-compiled binaries and libs for riscv64 linux ${{ matrix.lib_type }}
if: github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/')
uses: svenstaro/upload-release-action@v2
with:
file_glob: true
overwrite: true
file: sherpa-onnx-*linux-riscv64*.tar.bz2
- name: Test sherpa-onnx
shell: bash
run: |
export PATH=$GITHUB_WORKSPACE/toolchain/bin:$PATH
export PATH=$GITHUB_WORKSPACE/qemu-install/bin:$PATH
export QEMU_LD_PREFIX=$GITHUB_WORKSPACE/toolchain/sysroot
export LD_LIBRARY_PATH=$GITHUB_WORKSPACE/toolchain/sysroot/lib
ls -lh ./build-riscv64-linux-gnu/bin
echo "----------sherpa-onnx----------"
qemu-riscv64 ./build-riscv64-linux-gnu/bin/sherpa-onnx --help
readelf -d ./build-riscv64-linux-gnu/bin/sherpa-onnx
echo "----------sherpa-onnx-offline----------"
qemu-riscv64 ./build-riscv64-linux-gnu/bin/sherpa-onnx-offline --help
readelf -d ./build-riscv64-linux-gnu/bin/sherpa-onnx-offline
echo "----------sherpa-onnx-offline-tts----------"
qemu-riscv64 ./build-riscv64-linux-gnu/bin/sherpa-onnx-offline-tts --help
readelf -d ./build-riscv64-linux-gnu/bin/sherpa-onnx-offline-tts
- name: Test streaming speech recognition
shell: bash
run: |
export PATH=$GITHUB_WORKSPACE/toolchain/bin:$PATH
export PATH=$GITHUB_WORKSPACE/qemu-install/bin:$PATH
export QEMU_LD_PREFIX=$GITHUB_WORKSPACE/toolchain/sysroot
export LD_LIBRARY_PATH=$GITHUB_WORKSPACE/toolchain/sysroot/lib
wget -q https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23.tar.bz2
tar xvf sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23.tar.bz2
rm sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23.tar.bz2
qemu-riscv64 ./build-riscv64-linux-gnu/bin/sherpa-onnx \
--tokens=./sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23/tokens.txt \
--encoder=./sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23/encoder-epoch-99-avg-1.onnx \
--decoder=./sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23/decoder-epoch-99-avg-1.onnx \
--joiner=./sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23/joiner-epoch-99-avg-1.onnx \
./sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23/test_wavs/0.wav
- name: Test offline tts
shell: bash
run: |
export PATH=$GITHUB_WORKSPACE/toolchain/bin:$PATH
export PATH=$GITHUB_WORKSPACE/qemu-install/bin:$PATH
export QEMU_LD_PREFIX=$GITHUB_WORKSPACE/toolchain/sysroot
export LD_LIBRARY_PATH=$GITHUB_WORKSPACE/toolchain/sysroot/lib
wget -q https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/vits-piper-en_US-lessac-medium.tar.bz2
tar xf vits-piper-en_US-lessac-medium.tar.bz2
rm vits-piper-en_US-lessac-medium.tar.bz2
qemu-riscv64 ./build-riscv64-linux-gnu/bin/sherpa-onnx-offline-tts \
--vits-model=./vits-piper-en_US-lessac-medium/en_US-lessac-medium.onnx \
--vits-data-dir=./vits-piper-en_US-lessac-medium/espeak-ng-data \
--vits-tokens=./vits-piper-en_US-lessac-medium/tokens.txt \
--output-filename=./liliana-piper-en_US-lessac-medium.wav \
'liliana, the most beautiful and lovely assistant of our team!'
- uses: actions/upload-artifact@v4
if: matrix.lib_type == 'shared'
with:
name: wave
path: ./*.wav
... ...
... ... @@ -250,7 +250,7 @@ jobs:
file: sherpa-onnx-*linux-aarch64*.tar.bz2
repo_name: k2-fsa/sherpa-onnx
repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }}
tag: v1.12.0
tag: v1.12.13
- name: Test offline Moonshine
if: matrix.build_type != 'Debug'
... ...
... ... @@ -11,16 +11,7 @@ on:
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/jni/*'
- 'sherpa-onnx/java-api/**'
pull_request:
branches:
- master
paths:
- '.github/workflows/run-java-test.yaml'
- 'cmake/**'
- 'java-api-examples/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/jni/*'
- 'sherpa-onnx/java-api/**'
workflow_dispatch:
concurrency:
... ... @@ -117,6 +108,13 @@ jobs:
cd ./java-api-examples
./run-version-test.sh
- name: Run java test (Streaming T-one)
shell: bash
run: |
cd ./java-api-examples
./run-streaming-decode-file-tone-ctc.sh
rm -rf sherpa-onnx-streaming-t-one-*
- name: Run java test (Nemo Canary)
shell: bash
run: |
... ...
... ... @@ -10,15 +10,7 @@ on:
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'python-api-examples/**'
pull_request:
branches:
- master
paths:
- '.github/workflows/run-python-test-macos.yaml'
- '.github/scripts/test-python.sh'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'python-api-examples/**'
workflow_dispatch:
concurrency:
... ... @@ -85,7 +77,7 @@ jobs:
- name: Install Python dependencies
shell: bash
run: |
python3 -m pip install --upgrade pip numpy pypinyin sentencepiece>=0.1.96 soundfile setuptools wheel
python3 -m pip install --upgrade pip numpy pypinyin sentencepiece>=0.1.96 soundfile setuptools wheel librosa
- name: Install sherpa-onnx
shell: bash
... ...
... ... @@ -80,7 +80,7 @@ jobs:
- name: Install Python dependencies
shell: bash
run: |
python3 -m pip install --upgrade pip numpy pypinyin sentencepiece>=0.1.96 soundfile
python3 -m pip install --upgrade pip numpy pypinyin sentencepiece>=0.1.96 soundfile librosa
python3 -m pip install wheel twine setuptools
- uses: afoley587/setup-ffmpeg@main
... ... @@ -157,7 +157,7 @@ jobs:
mkdir t
cd t
unzip ../*.whl
readelf -d _sherpa_onnx*.so
readelf -d sherpa_onnx/lib/_sherpa_onnx*.so
echo "----"
... ... @@ -170,7 +170,7 @@ jobs:
mkdir t
cd t
unzip ../*.whl
readelf -d _sherpa_onnx*.so
readelf -d sherpa_onnx/lib/_sherpa_onnx*.so
echo "----"
... ...
... ... @@ -23,12 +23,7 @@ on:
paths:
- '.github/workflows/style_check.yaml'
- 'sherpa-onnx/**'
pull_request:
branches:
- master
paths:
- '.github/workflows/style_check.yaml'
- 'sherpa-onnx/**'
workflow_dispatch:
concurrency:
... ...
... ... @@ -10,15 +10,6 @@ on:
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/python/**'
pull_request:
branches:
- master
paths:
- 'setup.py'
- '.github/workflows/test-build-wheel.yaml'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'sherpa-onnx/python/**'
workflow_dispatch:
... ... @@ -48,6 +39,19 @@ jobs:
- os: ubuntu-latest
python-version: "3.13"
- os: ubuntu-24.04-arm
python-version: "3.8"
- os: ubuntu-24.04-arm
python-version: "3.9"
- os: ubuntu-24.04-arm
python-version: "3.10"
- os: ubuntu-24.04-arm
python-version: "3.11"
- os: ubuntu-24.04-arm
python-version: "3.12"
- os: ubuntu-24.04-arm
python-version: "3.13"
- os: macos-13
python-version: "3.8"
... ... @@ -58,8 +62,10 @@ jobs:
- os: macos-13
python-version: "3.11"
- os: macos-14
- os: macos-latest
python-version: "3.12"
- os: macos-latest
python-version: "3.13"
- os: windows-2022
python-version: "3.7"
... ... @@ -74,6 +80,8 @@ jobs:
python-version: "3.11"
- os: windows-2022
python-version: "3.12"
- os: windows-latest
python-version: "3.13"
steps:
- uses: actions/checkout@v4
... ... @@ -129,6 +137,10 @@ jobs:
cd t
unzip ../*.whl
ls -lh sherpa_onnx/lib
file sherpa_onnx/lib/*
- name: Install wheel
shell: bash
run: |
... ... @@ -137,14 +149,5 @@ jobs:
- name: Test
shell: bash
run: |
# For windows
export PATH=/c/hostedtoolcache/windows/Python/3.7.9/x64/bin:$PATH
export PATH=/c/hostedtoolcache/windows/Python/3.8.10/x64/bin:$PATH
export PATH=/c/hostedtoolcache/windows/Python/3.9.13/x64/bin:$PATH
export PATH=/c/hostedtoolcache/windows/Python/3.10.11/x64/bin:$PATH
export PATH=/c/hostedtoolcache/windows/Python/3.11.9/x64/bin:$PATH
export PATH=/c/hostedtoolcache/windows/Python/3.12.10/x64/bin:$PATH
export PATH=/c/hostedtoolcache/windows/Python/3.13.5/x64/bin:$PATH
which sherpa-onnx
sherpa-onnx --help
... ...
... ... @@ -10,14 +10,6 @@ on:
- '.github/scripts/test-dart.sh'
- 'dart-api-examples/**'
- 'flutter/**'
pull_request:
branches:
- master
paths:
- '.github/workflows/test-dart.yaml'
- '.github/scripts/test-dart.sh'
- 'dart-api-examples/**'
- 'flutter/**'
workflow_dispatch:
... ... @@ -135,7 +127,9 @@ jobs:
cp scripts/dart/speaker-id-pubspec.yaml dart-api-examples/speaker-identification/pubspec.yaml
cp scripts/dart/speaker-diarization-pubspec.yaml dart-api-examples/speaker-diarization/pubspec.yaml
cp scripts/dart/speech-enhancement-gtcrn-pubspec.yaml dart-api-examples/speech-enhancement-gtcrn/pubspec.yaml
cp scripts/dart/slid-pubspec.yaml dart-api-examples/spoken-language-identification/pubspec.yaml
cp scripts/dart/sherpa-onnx-pubspec.yaml flutter/sherpa_onnx/pubspec.yaml
.github/scripts/test-dart.sh
... ...
... ... @@ -11,16 +11,6 @@ on:
- 'dotnet-examples/**'
- 'scripts/dotnet/**'
pull_request:
branches:
- master
paths:
- '.github/workflows/test-dot-net.yaml'
- 'cmake/**'
- 'sherpa-onnx/csrc/*'
- 'dotnet-examples/**'
- 'scripts/dotnet/**'
workflow_dispatch:
concurrency:
... ...