Committed by
GitHub
Use static libraries for MFC examples (#210)
正在显示
18 个修改的文件
包含
307 行增加
和
83 行删除
.github/workflows/mfc.yaml
0 → 100644
| 1 | +name: mfc | ||
| 2 | + | ||
| 3 | +on: | ||
| 4 | + push: | ||
| 5 | + branches: | ||
| 6 | + - master | ||
| 7 | + tags: | ||
| 8 | + - '*' | ||
| 9 | + paths: | ||
| 10 | + - '.github/workflows/mfc.yaml' | ||
| 11 | + - 'CMakeLists.txt' | ||
| 12 | + - 'cmake/**' | ||
| 13 | + - 'mfc-examples/**' | ||
| 14 | + - 'sherpa-onnx/csrc/*' | ||
| 15 | + pull_request: | ||
| 16 | + branches: | ||
| 17 | + - master | ||
| 18 | + paths: | ||
| 19 | + - '.github/workflows/mfc.yaml' | ||
| 20 | + - 'CMakeLists.txt' | ||
| 21 | + - 'cmake/**' | ||
| 22 | + - 'mfc-examples/**' | ||
| 23 | + - 'sherpa-onnx/csrc/*' | ||
| 24 | + release: | ||
| 25 | + types: | ||
| 26 | + - published | ||
| 27 | + | ||
| 28 | + workflow_dispatch: | ||
| 29 | + inputs: | ||
| 30 | + release: | ||
| 31 | + description: "Whether to release" | ||
| 32 | + type: boolean | ||
| 33 | + | ||
| 34 | +env: | ||
| 35 | + RELEASE: | ||
| 36 | + |- # Release if there is a release tag name or a release flag in workflow_dispatch | ||
| 37 | + ${{ github.event.release.tag_name != '' || github.event.inputs.release == 'true' }} | ||
| 38 | + | ||
| 39 | +concurrency: | ||
| 40 | + group: mfc-${{ github.ref }} | ||
| 41 | + cancel-in-progress: true | ||
| 42 | + | ||
| 43 | +jobs: | ||
| 44 | + mfc: | ||
| 45 | + name: MFC for ${{ matrix.arch }} | ||
| 46 | + runs-on: ${{ matrix.os }} | ||
| 47 | + strategy: | ||
| 48 | + fail-fast: false | ||
| 49 | + matrix: | ||
| 50 | + os: [windows-latest] | ||
| 51 | + # arch: [x64, Win32] | ||
| 52 | + arch: [x64] | ||
| 53 | + | ||
| 54 | + steps: | ||
| 55 | + - uses: actions/checkout@v2 | ||
| 56 | + with: | ||
| 57 | + fetch-depth: 0 | ||
| 58 | + | ||
| 59 | + - name: Display MSBuild info | ||
| 60 | + shell: cmd | ||
| 61 | + run: | | ||
| 62 | + set path="C:\Program Files\Microsoft Visual Studio\2022\Enterprise\MSBuild\Current\Bin" | ||
| 63 | + msbuild -help | ||
| 64 | + | ||
| 65 | + - name: Configure CMake | ||
| 66 | + shell: bash | ||
| 67 | + run: | | ||
| 68 | + mkdir build | ||
| 69 | + cd build | ||
| 70 | + cmake -A ${{ matrix.arch }} -D CMAKE_BUILD_TYPE=Release -D BUILD_SHARED_LIBS=OFF -DCMAKE_INSTALL_PREFIX=./install .. | ||
| 71 | + | ||
| 72 | + - name: Build sherpa-onnx for windows | ||
| 73 | + shell: bash | ||
| 74 | + run: | | ||
| 75 | + cd build | ||
| 76 | + cmake --build . --config Release -- -m:2 | ||
| 77 | + cmake --build . --config Release --target install -- -m:2 | ||
| 78 | + | ||
| 79 | + ls -lh install/* | ||
| 80 | + | ||
| 81 | + ls -lh install/lib | ||
| 82 | + ls -lh install/bin | ||
| 83 | + | ||
| 84 | + - name: Build MFC | ||
| 85 | + shell: cmd | ||
| 86 | + run: | | ||
| 87 | + set path="C:\Program Files\Microsoft Visual Studio\2022\Enterprise\MSBuild\Current\Bin" | ||
| 88 | + | ||
| 89 | + cd mfc-examples | ||
| 90 | + | ||
| 91 | + msbuild .\mfc-examples.sln /property:Configuration=Release /property:Platform=${{ matrix.arch }} | ||
| 92 | + | ||
| 93 | + - name: Copy files | ||
| 94 | + shell: bash | ||
| 95 | + run: | | ||
| 96 | + SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2) | ||
| 97 | + arch=${{ matrix.arch }} | ||
| 98 | + | ||
| 99 | + cd mfc-examples/$arch/Release | ||
| 100 | + cp StreamingSpeechRecognition.exe sherpa-onnx-streaming-${SHERPA_ONNX_VERSION}.exe | ||
| 101 | + ls -lh | ||
| 102 | + | ||
| 103 | + - name: Upload artifact | ||
| 104 | + uses: actions/upload-artifact@v2 | ||
| 105 | + with: | ||
| 106 | + name: streaming-speech-recognition-${{ matrix.arch }} | ||
| 107 | + path: ./mfc-examples/${{ matrix.arch }}/Release/StreamingSpeechRecognition.exe | ||
| 108 | + | ||
| 109 | + - name: Release pre-compiled binaries and libs for macOS | ||
| 110 | + if: env.RELEASE == 'true' | ||
| 111 | + uses: svenstaro/upload-release-action@v2 | ||
| 112 | + with: | ||
| 113 | + file_glob: true | ||
| 114 | + overwrite: true | ||
| 115 | + file: ./mfc-examples/${{ matrix.arch }}/Release/sherpa-onnx*.exe |
| @@ -46,27 +46,13 @@ concurrency: | @@ -46,27 +46,13 @@ concurrency: | ||
| 46 | 46 | ||
| 47 | jobs: | 47 | jobs: |
| 48 | windows_x64: | 48 | windows_x64: |
| 49 | + name: Windows x64 | ||
| 49 | runs-on: ${{ matrix.os }} | 50 | runs-on: ${{ matrix.os }} |
| 50 | - name: ${{ matrix.vs-version }} | ||
| 51 | strategy: | 51 | strategy: |
| 52 | fail-fast: false | 52 | fail-fast: false |
| 53 | matrix: | 53 | matrix: |
| 54 | - include: | ||
| 55 | - - vs-version: vs2015 | ||
| 56 | - toolset-version: v140 | ||
| 57 | - os: windows-2019 | ||
| 58 | - | ||
| 59 | - - vs-version: vs2017 | ||
| 60 | - toolset-version: v141 | ||
| 61 | - os: windows-2019 | ||
| 62 | - | ||
| 63 | - - vs-version: vs2019 | ||
| 64 | - toolset-version: v142 | ||
| 65 | - os: windows-2022 | ||
| 66 | - | ||
| 67 | - - vs-version: vs2022 | ||
| 68 | - toolset-version: v143 | ||
| 69 | - os: windows-2022 | 54 | + os: [windows-latest] |
| 55 | + shared_lib: [ON, OFF] | ||
| 70 | 56 | ||
| 71 | steps: | 57 | steps: |
| 72 | - uses: actions/checkout@v2 | 58 | - uses: actions/checkout@v2 |
| @@ -78,7 +64,7 @@ jobs: | @@ -78,7 +64,7 @@ jobs: | ||
| 78 | run: | | 64 | run: | |
| 79 | mkdir build | 65 | mkdir build |
| 80 | cd build | 66 | cd build |
| 81 | - cmake -T ${{ matrix.toolset-version}},host=x64 -A x64 -D CMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=./install .. | 67 | + cmake -A x64 -D CMAKE_BUILD_TYPE=Release -D BUILD_SHARED_LIBS=${{ matrix.shared_lib }} -DCMAKE_INSTALL_PREFIX=./install .. |
| 82 | 68 | ||
| 83 | - name: Build sherpa-onnx for windows | 69 | - name: Build sherpa-onnx for windows |
| 84 | shell: bash | 70 | shell: bash |
| @@ -122,7 +108,7 @@ jobs: | @@ -122,7 +108,7 @@ jobs: | ||
| 122 | .github/scripts/test-online-transducer.sh | 108 | .github/scripts/test-online-transducer.sh |
| 123 | 109 | ||
| 124 | - name: Copy files | 110 | - name: Copy files |
| 125 | - if: env.RELEASE == 'true' && matrix.vs-version == 'vs2015' | 111 | + if: env.RELEASE == 'true' |
| 126 | shell: bash | 112 | shell: bash |
| 127 | run: | | 113 | run: | |
| 128 | SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2) | 114 | SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2) |
| @@ -137,7 +123,7 @@ jobs: | @@ -137,7 +123,7 @@ jobs: | ||
| 137 | tar cjvf ${dst}.tar.bz2 $dst | 123 | tar cjvf ${dst}.tar.bz2 $dst |
| 138 | 124 | ||
| 139 | - name: Release pre-compiled binaries and libs for macOS | 125 | - name: Release pre-compiled binaries and libs for macOS |
| 140 | - if: env.RELEASE == 'true' && matrix.vs-version == 'vs2015' | 126 | + if: env.RELEASE == 'true' |
| 141 | uses: svenstaro/upload-release-action@v2 | 127 | uses: svenstaro/upload-release-action@v2 |
| 142 | with: | 128 | with: |
| 143 | file_glob: true | 129 | file_glob: true |
| @@ -44,28 +44,13 @@ concurrency: | @@ -44,28 +44,13 @@ concurrency: | ||
| 44 | 44 | ||
| 45 | jobs: | 45 | jobs: |
| 46 | windows_x86: | 46 | windows_x86: |
| 47 | - if: true # disable windows x86 CI for now | 47 | + name: Windows x86 |
| 48 | runs-on: ${{ matrix.os }} | 48 | runs-on: ${{ matrix.os }} |
| 49 | - name: ${{ matrix.vs-version }} | ||
| 50 | strategy: | 49 | strategy: |
| 51 | fail-fast: false | 50 | fail-fast: false |
| 52 | matrix: | 51 | matrix: |
| 53 | - include: | ||
| 54 | - - vs-version: vs2015 | ||
| 55 | - toolset-version: v140 | ||
| 56 | - os: windows-2019 | ||
| 57 | - | ||
| 58 | - - vs-version: vs2017 | ||
| 59 | - toolset-version: v141 | ||
| 60 | - os: windows-2019 | ||
| 61 | - | ||
| 62 | - - vs-version: vs2019 | ||
| 63 | - toolset-version: v142 | ||
| 64 | - os: windows-2022 | ||
| 65 | - | ||
| 66 | - - vs-version: vs2022 | ||
| 67 | - toolset-version: v143 | ||
| 68 | - os: windows-2022 | 52 | + os: [windows-latest] |
| 53 | + shared_lib: [ON, OFF] | ||
| 69 | 54 | ||
| 70 | steps: | 55 | steps: |
| 71 | - uses: actions/checkout@v2 | 56 | - uses: actions/checkout@v2 |
| @@ -77,7 +62,7 @@ jobs: | @@ -77,7 +62,7 @@ jobs: | ||
| 77 | run: | | 62 | run: | |
| 78 | mkdir build | 63 | mkdir build |
| 79 | cd build | 64 | cd build |
| 80 | - cmake -T ${{ matrix.toolset-version}},host=x64 -A Win32 -D CMAKE_BUILD_TYPE=Release -D CMAKE_INSTALL_PREFIX=./install .. | 65 | + cmake -A Win32 -D CMAKE_BUILD_TYPE=Release -D BUILD_SHARED_LIBS=${{ matrix.shared_lib }} -D CMAKE_INSTALL_PREFIX=./install .. |
| 81 | 66 | ||
| 82 | - name: Build sherpa-onnx for windows | 67 | - name: Build sherpa-onnx for windows |
| 83 | shell: bash | 68 | shell: bash |
| 1 | cmake_minimum_required(VERSION 3.13 FATAL_ERROR) | 1 | cmake_minimum_required(VERSION 3.13 FATAL_ERROR) |
| 2 | project(sherpa-onnx) | 2 | project(sherpa-onnx) |
| 3 | 3 | ||
| 4 | -set(SHERPA_ONNX_VERSION "1.5.0") | 4 | +set(SHERPA_ONNX_VERSION "1.5.1") |
| 5 | 5 | ||
| 6 | # Disable warning about | 6 | # Disable warning about |
| 7 | # | 7 | # |
| @@ -71,6 +71,18 @@ if(BUILD_SHARED_LIBS AND MSVC) | @@ -71,6 +71,18 @@ if(BUILD_SHARED_LIBS AND MSVC) | ||
| 71 | set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON) | 71 | set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON) |
| 72 | endif() | 72 | endif() |
| 73 | 73 | ||
| 74 | +if(NOT BUILD_SHARED_LIBS AND MSVC) | ||
| 75 | + # see https://cmake.org/cmake/help/latest/prop_tgt/MSVC_RUNTIME_LIBRARY.html | ||
| 76 | + # https://stackoverflow.com/questions/14172856/compile-with-mt-instead-of-md-using-cmake | ||
| 77 | + if(MSVC) | ||
| 78 | + add_compile_options( | ||
| 79 | + $<$<CONFIG:>:/MT> #---------| | ||
| 80 | + $<$<CONFIG:Debug>:/MTd> #---|-- Statically link the runtime libraries | ||
| 81 | + $<$<CONFIG:Release>:/MT> #--| | ||
| 82 | + ) | ||
| 83 | + endif() | ||
| 84 | +endif() | ||
| 85 | + | ||
| 74 | message(STATUS "CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}") | 86 | message(STATUS "CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}") |
| 75 | message(STATUS "CMAKE_INSTALL_PREFIX: ${CMAKE_INSTALL_PREFIX}") | 87 | message(STATUS "CMAKE_INSTALL_PREFIX: ${CMAKE_INSTALL_PREFIX}") |
| 76 | message(STATUS "BUILD_SHARED_LIBS ${BUILD_SHARED_LIBS}") | 88 | message(STATUS "BUILD_SHARED_LIBS ${BUILD_SHARED_LIBS}") |
| @@ -154,3 +166,4 @@ add_subdirectory(sherpa-onnx) | @@ -154,3 +166,4 @@ add_subdirectory(sherpa-onnx) | ||
| 154 | if(SHERPA_ONNX_ENABLE_C_API) | 166 | if(SHERPA_ONNX_ENABLE_C_API) |
| 155 | add_subdirectory(c-api-examples) | 167 | add_subdirectory(c-api-examples) |
| 156 | endif() | 168 | endif() |
| 169 | +message(STATUS "CMAKE_CXX_FLAGS: ${CMAKE_CXX_FLAGS}") |
CPPLINT.cfg
0 → 100644
| 1 | +filter=-./mfc-examples |
| 1 | function(download_kaldi_native_fbank) | 1 | function(download_kaldi_native_fbank) |
| 2 | include(FetchContent) | 2 | include(FetchContent) |
| 3 | 3 | ||
| 4 | - set(kaldi_native_fbank_URL "https://github.com/csukuangfj/kaldi-native-fbank/archive/refs/tags/v1.14.tar.gz") | ||
| 5 | - set(kaldi_native_fbank_URL2 "https://huggingface.co/csukuangfj/sherpa-onnx-cmake-deps/resolve/main/kaldi-native-fbank-1.14.tar.gz") | ||
| 6 | - set(kaldi_native_fbank_HASH "SHA256=6a66638a111d3ce21fe6f29cbf9ab3dbcae2331c77391bf825927df5cbf2babe") | 4 | + set(kaldi_native_fbank_URL "https://github.com/csukuangfj/kaldi-native-fbank/archive/refs/tags/v1.17.tar.gz") |
| 5 | + set(kaldi_native_fbank_URL2 "https://huggingface.co/csukuangfj/sherpa-onnx-cmake-deps/resolve/main/kaldi-native-fbank-1.17.tar.gz") | ||
| 6 | + set(kaldi_native_fbank_HASH "SHA256=300dc282d51d738e70f194ef13a50bf4cf8d54a3b2686d75f7fc2fb821f8c1e6") | ||
| 7 | 7 | ||
| 8 | set(KALDI_NATIVE_FBANK_BUILD_TESTS OFF CACHE BOOL "" FORCE) | 8 | set(KALDI_NATIVE_FBANK_BUILD_TESTS OFF CACHE BOOL "" FORCE) |
| 9 | set(KALDI_NATIVE_FBANK_BUILD_PYTHON OFF CACHE BOOL "" FORCE) | 9 | set(KALDI_NATIVE_FBANK_BUILD_PYTHON OFF CACHE BOOL "" FORCE) |
| @@ -12,11 +12,11 @@ function(download_kaldi_native_fbank) | @@ -12,11 +12,11 @@ function(download_kaldi_native_fbank) | ||
| 12 | # If you don't have access to the Internet, | 12 | # If you don't have access to the Internet, |
| 13 | # please pre-download kaldi-native-fbank | 13 | # please pre-download kaldi-native-fbank |
| 14 | set(possible_file_locations | 14 | set(possible_file_locations |
| 15 | - $ENV{HOME}/Downloads/kaldi-native-fbank-1.14.tar.gz | ||
| 16 | - ${PROJECT_SOURCE_DIR}/kaldi-native-fbank-1.14.tar.gz | ||
| 17 | - ${PROJECT_BINARY_DIR}/kaldi-native-fbank-1.14.tar.gz | ||
| 18 | - /tmp/kaldi-native-fbank-1.14.tar.gz | ||
| 19 | - /star-fj/fangjun/download/github/kaldi-native-fbank-1.14.tar.gz | 15 | + $ENV{HOME}/Downloads/kaldi-native-fbank-1.17.tar.gz |
| 16 | + ${PROJECT_SOURCE_DIR}/kaldi-native-fbank-1.17.tar.gz | ||
| 17 | + ${PROJECT_BINARY_DIR}/kaldi-native-fbank-1.17.tar.gz | ||
| 18 | + /tmp/kaldi-native-fbank-1.17.tar.gz | ||
| 19 | + /star-fj/fangjun/download/github/kaldi-native-fbank-1.17.tar.gz | ||
| 20 | ) | 20 | ) |
| 21 | 21 | ||
| 22 | foreach(f IN LISTS possible_file_locations) | 22 | foreach(f IN LISTS possible_file_locations) |
| @@ -88,6 +88,7 @@ function(download_onnxruntime) | @@ -88,6 +88,7 @@ function(download_onnxruntime) | ||
| 88 | message(STATUS "CMAKE_VS_PLATFORM_NAME: ${CMAKE_VS_PLATFORM_NAME}") | 88 | message(STATUS "CMAKE_VS_PLATFORM_NAME: ${CMAKE_VS_PLATFORM_NAME}") |
| 89 | 89 | ||
| 90 | if(CMAKE_VS_PLATFORM_NAME STREQUAL Win32 OR CMAKE_VS_PLATFORM_NAME STREQUAL win32) | 90 | if(CMAKE_VS_PLATFORM_NAME STREQUAL Win32 OR CMAKE_VS_PLATFORM_NAME STREQUAL win32) |
| 91 | + if(BUILD_SHARED_LIBS) | ||
| 91 | # If you don't have access to the Internet, | 92 | # If you don't have access to the Internet, |
| 92 | # please pre-download onnxruntime | 93 | # please pre-download onnxruntime |
| 93 | # | 94 | # |
| @@ -102,15 +103,28 @@ function(download_onnxruntime) | @@ -102,15 +103,28 @@ function(download_onnxruntime) | ||
| 102 | set(onnxruntime_URL "https://github.com/microsoft/onnxruntime/releases/download/v1.15.1/onnxruntime-win-x86-1.15.1.zip") | 103 | set(onnxruntime_URL "https://github.com/microsoft/onnxruntime/releases/download/v1.15.1/onnxruntime-win-x86-1.15.1.zip") |
| 103 | set(onnxruntime_URL2 "https://huggingface.co/csukuangfj/sherpa-onnx-cmake-deps/resolve/main/onnxruntime-win-x86-1.15.1.zip") | 104 | set(onnxruntime_URL2 "https://huggingface.co/csukuangfj/sherpa-onnx-cmake-deps/resolve/main/onnxruntime-win-x86-1.15.1.zip") |
| 104 | set(onnxruntime_HASH "SHA256=8de18fdf274a8adcd95272fcf58beda0fe2fb37f0cd62c02bc4bb6200429e4e2") | 105 | set(onnxruntime_HASH "SHA256=8de18fdf274a8adcd95272fcf58beda0fe2fb37f0cd62c02bc4bb6200429e4e2") |
| 106 | + else() | ||
| 107 | + set(possible_file_locations | ||
| 108 | + $ENV{HOME}/Downloads/onnxruntime-win-x86-static-1.15.1.tar.bz2 | ||
| 109 | + ${PROJECT_SOURCE_DIR}/onnxruntime-win-x86-static-1.15.1.tar.bz2 | ||
| 110 | + ${PROJECT_BINARY_DIR}/onnxruntime-win-x86-static-1.15.1.tar.bz2 | ||
| 111 | + /tmp/onnxruntime-win-x86-static-1.15.1.tar.bz2 | ||
| 112 | + ) | ||
| 113 | + | ||
| 114 | + set(onnxruntime_URL "https://huggingface.co/csukuangfj/onnxruntime-libs/resolve/main/onnxruntime-win-x86-static-1.15.1.tar.bz2") | ||
| 115 | + set(onnxruntime_URL2 "") | ||
| 116 | + set(onnxruntime_HASH "SHA256=a2b33a3e8a1f89cddf303f0a97a5a88f4202579c653cfb29158c8cf7da3734eb") | ||
| 117 | + endif() | ||
| 105 | 118 | ||
| 106 | if(SHERPA_ONNX_ENABLE_GPU) | 119 | if(SHERPA_ONNX_ENABLE_GPU) |
| 107 | message(FATAL_ERROR "GPU support for Win32 is not supported!") | 120 | message(FATAL_ERROR "GPU support for Win32 is not supported!") |
| 108 | endif() | 121 | endif() |
| 109 | else() | 122 | else() |
| 123 | + # for 64-bit windows | ||
| 124 | + | ||
| 125 | + if(BUILD_SHARED_LIBS) | ||
| 110 | # If you don't have access to the Internet, | 126 | # If you don't have access to the Internet, |
| 111 | # please pre-download onnxruntime | 127 | # please pre-download onnxruntime |
| 112 | - # | ||
| 113 | - # for 64-bit windows | ||
| 114 | set(possible_file_locations | 128 | set(possible_file_locations |
| 115 | $ENV{HOME}/Downloads/onnxruntime-win-x64-1.15.1.zip | 129 | $ENV{HOME}/Downloads/onnxruntime-win-x64-1.15.1.zip |
| 116 | ${PROJECT_SOURCE_DIR}/onnxruntime-win-x64-1.15.1.zip | 130 | ${PROJECT_SOURCE_DIR}/onnxruntime-win-x64-1.15.1.zip |
| @@ -133,6 +147,22 @@ function(download_onnxruntime) | @@ -133,6 +147,22 @@ function(download_onnxruntime) | ||
| 133 | /tmp/onnxruntime-win-x64-gpu-1.15.1.zip | 147 | /tmp/onnxruntime-win-x64-gpu-1.15.1.zip |
| 134 | ) | 148 | ) |
| 135 | endif() | 149 | endif() |
| 150 | + else() | ||
| 151 | + # static libraries for windows x64 | ||
| 152 | + message(STATUS "Use static onnxruntime libraries") | ||
| 153 | + # If you don't have access to the Internet, | ||
| 154 | + # please pre-download onnxruntime | ||
| 155 | + set(possible_file_locations | ||
| 156 | + $ENV{HOME}/Downloads/onnxruntime-win-x64-static-1.15.1.tar.bz2 | ||
| 157 | + ${PROJECT_SOURCE_DIR}/onnxruntime-win-x64-static-1.15.1.tar.bz2 | ||
| 158 | + ${PROJECT_BINARY_DIR}/onnxruntime-win-x64-static-1.15.1.tar.bz2 | ||
| 159 | + /tmp/onnxruntime-win-x64-static-1.15.1.tar.bz2 | ||
| 160 | + ) | ||
| 161 | + | ||
| 162 | + set(onnxruntime_URL "https://huggingface.co/csukuangfj/onnxruntime-libs/resolve/main/onnxruntime-win-x64-static-1.15.1.tar.bz2") | ||
| 163 | + set(onnxruntime_URL2 "") | ||
| 164 | + set(onnxruntime_HASH "SHA256=f5c19ac1fc6a61c78a231a41df10aede2586665ab397bdc3f007eb8d2c8d4a19") | ||
| 165 | + endif() | ||
| 136 | endif() | 166 | endif() |
| 137 | # After downloading, it contains: | 167 | # After downloading, it contains: |
| 138 | # ./lib/onnxruntime.{dll,lib,pdb} | 168 | # ./lib/onnxruntime.{dll,lib,pdb} |
| @@ -170,6 +200,7 @@ function(download_onnxruntime) | @@ -170,6 +200,7 @@ function(download_onnxruntime) | ||
| 170 | endif() | 200 | endif() |
| 171 | message(STATUS "onnxruntime is downloaded to ${onnxruntime_SOURCE_DIR}") | 201 | message(STATUS "onnxruntime is downloaded to ${onnxruntime_SOURCE_DIR}") |
| 172 | 202 | ||
| 203 | + if(BUILD_SHARED_LIBS OR NOT WIN32) | ||
| 173 | find_library(location_onnxruntime onnxruntime | 204 | find_library(location_onnxruntime onnxruntime |
| 174 | PATHS | 205 | PATHS |
| 175 | "${onnxruntime_SOURCE_DIR}/lib" | 206 | "${onnxruntime_SOURCE_DIR}/lib" |
| @@ -184,6 +215,7 @@ function(download_onnxruntime) | @@ -184,6 +215,7 @@ function(download_onnxruntime) | ||
| 184 | IMPORTED_LOCATION ${location_onnxruntime} | 215 | IMPORTED_LOCATION ${location_onnxruntime} |
| 185 | INTERFACE_INCLUDE_DIRECTORIES "${onnxruntime_SOURCE_DIR}/include" | 216 | INTERFACE_INCLUDE_DIRECTORIES "${onnxruntime_SOURCE_DIR}/include" |
| 186 | ) | 217 | ) |
| 218 | + endif() | ||
| 187 | 219 | ||
| 188 | if(SHERPA_ONNX_ENABLE_GPU AND NOT WIN32) | 220 | if(SHERPA_ONNX_ENABLE_GPU AND NOT WIN32) |
| 189 | find_library(location_onnxruntime_cuda_lib onnxruntime_providers_cuda | 221 | find_library(location_onnxruntime_cuda_lib onnxruntime_providers_cuda |
| @@ -198,6 +230,7 @@ function(download_onnxruntime) | @@ -198,6 +230,7 @@ function(download_onnxruntime) | ||
| 198 | endif() | 230 | endif() |
| 199 | 231 | ||
| 200 | if(WIN32) | 232 | if(WIN32) |
| 233 | + if(BUILD_SHARED_LIBS) | ||
| 201 | set_property(TARGET onnxruntime | 234 | set_property(TARGET onnxruntime |
| 202 | PROPERTY | 235 | PROPERTY |
| 203 | IMPORTED_IMPLIB "${onnxruntime_SOURCE_DIR}/lib/onnxruntime.lib" | 236 | IMPORTED_IMPLIB "${onnxruntime_SOURCE_DIR}/lib/onnxruntime.lib" |
| @@ -225,6 +258,10 @@ function(download_onnxruntime) | @@ -225,6 +258,10 @@ function(download_onnxruntime) | ||
| 225 | ${CMAKE_BINARY_DIR}/bin/${CMAKE_BUILD_TYPE} | 258 | ${CMAKE_BINARY_DIR}/bin/${CMAKE_BUILD_TYPE} |
| 226 | ) | 259 | ) |
| 227 | endif() | 260 | endif() |
| 261 | + else() | ||
| 262 | + # for static libraries, we use onnxruntime_lib_files directly below | ||
| 263 | + include_directories(${onnxruntime_SOURCE_DIR}/include) | ||
| 264 | + endif() | ||
| 228 | endif() | 265 | endif() |
| 229 | 266 | ||
| 230 | if(UNIX AND NOT APPLE) | 267 | if(UNIX AND NOT APPLE) |
| @@ -232,7 +269,12 @@ function(download_onnxruntime) | @@ -232,7 +269,12 @@ function(download_onnxruntime) | ||
| 232 | elseif(APPLE) | 269 | elseif(APPLE) |
| 233 | file(GLOB onnxruntime_lib_files "${onnxruntime_SOURCE_DIR}/lib/libonnxruntime.*.*dylib") | 270 | file(GLOB onnxruntime_lib_files "${onnxruntime_SOURCE_DIR}/lib/libonnxruntime.*.*dylib") |
| 234 | elseif(WIN32) | 271 | elseif(WIN32) |
| 272 | + if(BUILD_SHARED_LIBS) | ||
| 235 | file(GLOB onnxruntime_lib_files "${onnxruntime_SOURCE_DIR}/lib/*.dll") | 273 | file(GLOB onnxruntime_lib_files "${onnxruntime_SOURCE_DIR}/lib/*.dll") |
| 274 | + else() | ||
| 275 | + file(GLOB onnxruntime_lib_files "${onnxruntime_SOURCE_DIR}/lib/*.lib") | ||
| 276 | + set(onnxruntime_lib_files ${onnxruntime_lib_files} PARENT_SCOPE) | ||
| 277 | + endif() | ||
| 236 | endif() | 278 | endif() |
| 237 | 279 | ||
| 238 | message(STATUS "onnxruntime lib files: ${onnxruntime_lib_files}") | 280 | message(STATUS "onnxruntime lib files: ${onnxruntime_lib_files}") |
| @@ -17,13 +17,12 @@ git clone https://github.com/k2-fsa/sherpa-onnx | @@ -17,13 +17,12 @@ git clone https://github.com/k2-fsa/sherpa-onnx | ||
| 17 | cd sherpa-onnx | 17 | cd sherpa-onnx |
| 18 | mkdir build | 18 | mkdir build |
| 19 | 19 | ||
| 20 | -cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=ON -DCMAKE_INSTALL_PREFIX=./install .. | 20 | +cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF -DCMAKE_INSTALL_PREFIX=./install .. |
| 21 | cmake --build . --config Release --target install | 21 | cmake --build . --config Release --target install |
| 22 | 22 | ||
| 23 | cd ../mfc-examples | 23 | cd ../mfc-examples |
| 24 | 24 | ||
| 25 | msbuild ./mfc-examples.sln /property:Configuration=Release /property:Platform=x64 | 25 | msbuild ./mfc-examples.sln /property:Configuration=Release /property:Platform=x64 |
| 26 | -cp ../build/install/lib/*.dll ./x64/Release/ | ||
| 27 | 26 | ||
| 28 | # now run the program | 27 | # now run the program |
| 29 | 28 |
| @@ -3,11 +3,12 @@ | @@ -3,11 +3,12 @@ | ||
| 3 | // application. | 3 | // application. |
| 4 | // | 4 | // |
| 5 | 5 | ||
| 6 | -#include "StreamingSpeechRecognition.h" | 6 | +#include "pch.h" |
| 7 | +#include "framework.h" | ||
| 7 | 8 | ||
| 9 | +#include "StreamingSpeechRecognition.h" | ||
| 8 | #include "StreamingSpeechRecognitionDlg.h" | 10 | #include "StreamingSpeechRecognitionDlg.h" |
| 9 | -#include "framework.h" | ||
| 10 | -#include "pch.h" | 11 | + |
| 11 | 12 | ||
| 12 | #ifdef _DEBUG | 13 | #ifdef _DEBUG |
| 13 | #define new DEBUG_NEW | 14 | #define new DEBUG_NEW |
| @@ -51,7 +51,7 @@ | @@ -51,7 +51,7 @@ | ||
| 51 | <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration"> | 51 | <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration"> |
| 52 | <ConfigurationType>Application</ConfigurationType> | 52 | <ConfigurationType>Application</ConfigurationType> |
| 53 | <UseDebugLibraries>false</UseDebugLibraries> | 53 | <UseDebugLibraries>false</UseDebugLibraries> |
| 54 | - <PlatformToolset>v142</PlatformToolset> | 54 | + <PlatformToolset>v143</PlatformToolset> |
| 55 | <WholeProgramOptimization>true</WholeProgramOptimization> | 55 | <WholeProgramOptimization>true</WholeProgramOptimization> |
| 56 | <CharacterSet>Unicode</CharacterSet> | 56 | <CharacterSet>Unicode</CharacterSet> |
| 57 | <UseOfMfc>Static</UseOfMfc> | 57 | <UseOfMfc>Static</UseOfMfc> |
| 1 | 1 | ||
| 2 | // StreamingSpeechRecognitionDlg.cpp : implementation file | 2 | // StreamingSpeechRecognitionDlg.cpp : implementation file |
| 3 | // | 3 | // |
| 4 | +#include "pch.h" | ||
| 5 | +#include "framework.h" | ||
| 6 | +#include "afxdialogex.h" | ||
| 7 | + | ||
| 4 | 8 | ||
| 5 | #include "StreamingSpeechRecognitionDlg.h" | 9 | #include "StreamingSpeechRecognitionDlg.h" |
| 6 | 10 | ||
| @@ -10,9 +14,7 @@ | @@ -10,9 +14,7 @@ | ||
| 10 | #include <vector> | 14 | #include <vector> |
| 11 | 15 | ||
| 12 | #include "StreamingSpeechRecognition.h" | 16 | #include "StreamingSpeechRecognition.h" |
| 13 | -#include "afxdialogex.h" | ||
| 14 | -#include "framework.h" | ||
| 15 | -#include "pch.h" | 17 | + |
| 16 | 18 | ||
| 17 | #ifdef _DEBUG | 19 | #ifdef _DEBUG |
| 18 | #define new DEBUG_NEW | 20 | #define new DEBUG_NEW |
| @@ -131,6 +133,7 @@ static int32_t RecordCallback(const void *input_buffer, | @@ -131,6 +133,7 @@ static int32_t RecordCallback(const void *input_buffer, | ||
| 131 | void CStreamingSpeechRecognitionDlg::OnBnClickedOk() { | 133 | void CStreamingSpeechRecognitionDlg::OnBnClickedOk() { |
| 132 | if (!recognizer_) { | 134 | if (!recognizer_) { |
| 133 | AppendLineToMultilineEditCtrl("Creating recognizer..."); | 135 | AppendLineToMultilineEditCtrl("Creating recognizer..."); |
| 136 | + AppendLineToMultilineEditCtrl("It will take several seconds. Please wait"); | ||
| 134 | InitRecognizer(); | 137 | InitRecognizer(); |
| 135 | if (!recognizer_) { | 138 | if (!recognizer_) { |
| 136 | // failed to create the recognizer | 139 | // failed to create the recognizer |
| @@ -264,20 +267,56 @@ void CStreamingSpeechRecognitionDlg::InitRecognizer() { | @@ -264,20 +267,56 @@ void CStreamingSpeechRecognitionDlg::InitRecognizer() { | ||
| 264 | if (!is_ok) { | 267 | if (!is_ok) { |
| 265 | my_btn_.EnableWindow(FALSE); | 268 | my_btn_.EnableWindow(FALSE); |
| 266 | std::string msg = | 269 | std::string msg = |
| 267 | - "\r\nPlease go to " | 270 | + "\r\nPlease go to\r\n" |
| 268 | "https://k2-fsa.github.io/sherpa/onnx/pretrained_models/index.html " | 271 | "https://k2-fsa.github.io/sherpa/onnx/pretrained_models/index.html " |
| 269 | "\r\n"; | 272 | "\r\n"; |
| 270 | - msg += "to download an offline model."; | 273 | + msg += "to download a streaming model, i.e., an online model.\r\n"; |
| 274 | + msg += | ||
| 275 | + "You need to rename them to encoder.onnx, decoder.onnx, and " | ||
| 276 | + "joiner.onnx correspoondingly.\r\n\r\n"; | ||
| 271 | msg += | 277 | msg += |
| 272 | - " You need to rename them to encoder.onnx, decoder.onnx, and " | ||
| 273 | - "joiner.onnx correspoondingly"; | 278 | + "We use the following model as an example to show you how to do " |
| 279 | + "that.\r\n"; | ||
| 280 | + msg += | ||
| 281 | + "https://huggingface.co/pkufool/" | ||
| 282 | + "icefall-asr-zipformer-streaming-wenetspeech-20230615"; | ||
| 283 | + msg += "\r\n\r\n"; | ||
| 284 | + msg += | ||
| 285 | + "wget https:// " | ||
| 286 | + "huggingface.co/pkufool/" | ||
| 287 | + "icefall-asr-zipformer-streaming-wenetspeech-20230615/resolve/main/exp/" | ||
| 288 | + "encoder-epoch-12-avg-4-chunk-16-left-128.onnx\r\n"; | ||
| 289 | + msg += | ||
| 290 | + "wget https:// " | ||
| 291 | + "huggingface.co/pkufool/" | ||
| 292 | + "icefall-asr-zipformer-streaming-wenetspeech-20230615/resolve/main/exp/" | ||
| 293 | + "decoder-epoch-12-avg-4-chunk-16-left-128.onnx\r\n"; | ||
| 294 | + msg += | ||
| 295 | + "wget https:// " | ||
| 296 | + "huggingface.co/pkufool/" | ||
| 297 | + "icefall-asr-zipformer-streaming-wenetspeech-20230615/resolve/main/exp/" | ||
| 298 | + "joiner-epoch-12-avg-4-chunk-16-left-128.onnx\r\n"; | ||
| 299 | + msg += | ||
| 300 | + "wget " | ||
| 301 | + "https://huggingface.co/pkufool/" | ||
| 302 | + "icefall-asr-zipformer-streaming-wenetspeech-20230615/resolve/main/" | ||
| 303 | + "data/lang_char/tokens.txt\r\n"; | ||
| 304 | + | ||
| 305 | + msg += "\r\nNow rename them.\r\n"; | ||
| 306 | + msg += "mv encoder-epoch-12-avg-4-chunk-16-left-128.onnx encoder.onnx\r\n"; | ||
| 307 | + msg += "mv decoder-epoch-12-avg-4-chunk-16-left-128.onnx decoder.onnx\r\n"; | ||
| 308 | + msg += "mv joiner-epoch-12-avg-4-chunk-16-left-128.onnx joiner.onnx\r\n"; | ||
| 309 | + msg += "\r\n"; | ||
| 310 | + msg += "That's it!\r\n"; | ||
| 311 | + | ||
| 312 | + | ||
| 274 | AppendLineToMultilineEditCtrl(msg); | 313 | AppendLineToMultilineEditCtrl(msg); |
| 275 | return; | 314 | return; |
| 276 | } | 315 | } |
| 277 | 316 | ||
| 278 | SherpaOnnxOnlineRecognizerConfig config; | 317 | SherpaOnnxOnlineRecognizerConfig config; |
| 279 | config.model_config.debug = 0; | 318 | config.model_config.debug = 0; |
| 280 | - config.model_config.num_threads = 2; | 319 | + config.model_config.num_threads = 1; |
| 281 | config.model_config.provider = "cpu"; | 320 | config.model_config.provider = "cpu"; |
| 282 | 321 | ||
| 283 | config.decoding_method = "greedy_search"; | 322 | config.decoding_method = "greedy_search"; |
| @@ -301,7 +340,7 @@ void CStreamingSpeechRecognitionDlg::InitRecognizer() { | @@ -301,7 +340,7 @@ void CStreamingSpeechRecognitionDlg::InitRecognizer() { | ||
| 301 | 340 | ||
| 302 | // see | 341 | // see |
| 303 | // https://stackoverflow.com/questions/7153935/how-to-convert-utf-8-stdstring-to-utf-16-stdwstring | 342 | // https://stackoverflow.com/questions/7153935/how-to-convert-utf-8-stdstring-to-utf-16-stdwstring |
| 304 | -std::wstring Utf8ToUtf16(const std::string &utf8) { | 343 | +static std::wstring Utf8ToUtf16(const std::string &utf8) { |
| 305 | std::vector<unsigned long> unicode; | 344 | std::vector<unsigned long> unicode; |
| 306 | size_t i = 0; | 345 | size_t i = 0; |
| 307 | while (i < utf8.size()) { | 346 | while (i < utf8.size()) { |
| @@ -392,6 +431,7 @@ static std::string Cat(const std::vector<std::string> &results, | @@ -392,6 +431,7 @@ static std::string Cat(const std::vector<std::string> &results, | ||
| 392 | 431 | ||
| 393 | int CStreamingSpeechRecognitionDlg::RunThread() { | 432 | int CStreamingSpeechRecognitionDlg::RunThread() { |
| 394 | std::vector<std::string> results; | 433 | std::vector<std::string> results; |
| 434 | + | ||
| 395 | std::string last_text; | 435 | std::string last_text; |
| 396 | while (started_) { | 436 | while (started_) { |
| 397 | while (IsOnlineStreamReady(recognizer_, stream_)) { | 437 | while (IsOnlineStreamReady(recognizer_, stream_)) { |
| @@ -406,6 +446,8 @@ int CStreamingSpeechRecognitionDlg::RunThread() { | @@ -406,6 +446,8 @@ int CStreamingSpeechRecognitionDlg::RunThread() { | ||
| 406 | // str.Format(_T("%s"), Cat(results, text).c_str()); | 446 | // str.Format(_T("%s"), Cat(results, text).c_str()); |
| 407 | auto str = Utf8ToUtf16(Cat(results, text).c_str()); | 447 | auto str = Utf8ToUtf16(Cat(results, text).c_str()); |
| 408 | my_text_.SetWindowText(str.c_str()); | 448 | my_text_.SetWindowText(str.c_str()); |
| 449 | + my_text_.SetFocus(); | ||
| 450 | + my_text_.SetSel(-1); | ||
| 409 | last_text = text; | 451 | last_text = text; |
| 410 | } | 452 | } |
| 411 | int is_endpoint = IsEndpoint(recognizer_, stream_); | 453 | int is_endpoint = IsEndpoint(recognizer_, stream_); |
| @@ -48,7 +48,7 @@ class CStreamingSpeechRecognitionDlg : public CDialogEx { | @@ -48,7 +48,7 @@ class CStreamingSpeechRecognitionDlg : public CDialogEx { | ||
| 48 | SherpaOnnxOnlineRecognizer *recognizer_ = nullptr; | 48 | SherpaOnnxOnlineRecognizer *recognizer_ = nullptr; |
| 49 | 49 | ||
| 50 | PaStream *pa_stream_ = nullptr; | 50 | PaStream *pa_stream_ = nullptr; |
| 51 | - RecognizerThread *thread_; | 51 | + RecognizerThread *thread_ = nullptr; |
| 52 | CButton my_btn_; | 52 | CButton my_btn_; |
| 53 | CEdit my_text_; | 53 | CEdit my_text_; |
| 54 | 54 |
| @@ -5,16 +5,45 @@ | @@ -5,16 +5,45 @@ | ||
| 5 | <PropertyGroup> | 5 | <PropertyGroup> |
| 6 | <SherpaOnnxBuildDirectory>..\..\build</SherpaOnnxBuildDirectory> | 6 | <SherpaOnnxBuildDirectory>..\..\build</SherpaOnnxBuildDirectory> |
| 7 | <SherpaOnnxInstallDirectory>..\..\build\install</SherpaOnnxInstallDirectory> | 7 | <SherpaOnnxInstallDirectory>..\..\build\install</SherpaOnnxInstallDirectory> |
| 8 | - <SherpaOnnxLibraries>sherpa-onnx-portaudio.lib;sherpa-onnx-c-api.lib;sherpa-onnx-core.lib</SherpaOnnxLibraries> | 8 | + <SherpaOnnxLibraries> |
| 9 | + sherpa-onnx-portaudio_static.lib; | ||
| 10 | + sherpa-onnx-c-api.lib; | ||
| 11 | + sherpa-onnx-core.lib; | ||
| 12 | + kaldi-native-fbank-core.lib; | ||
| 13 | + absl_base.lib; | ||
| 14 | + absl_city.lib; | ||
| 15 | + absl_hash.lib; | ||
| 16 | + absl_low_level_hash.lib; | ||
| 17 | + absl_raw_hash_set.lib; | ||
| 18 | + absl_raw_logging_internal.lib; | ||
| 19 | + absl_throw_delegate.lib; | ||
| 20 | + clog.lib; | ||
| 21 | + cpuinfo.lib; | ||
| 22 | + flatbuffers.lib; | ||
| 23 | + libprotobuf-lite.lib; | ||
| 24 | + onnx.lib; | ||
| 25 | + onnx_proto.lib; | ||
| 26 | + onnxruntime_common.lib; | ||
| 27 | + onnxruntime_flatbuffers.lib; | ||
| 28 | + onnxruntime_framework.lib; | ||
| 29 | + onnxruntime_graph.lib; | ||
| 30 | + onnxruntime_mlas.lib; | ||
| 31 | + onnxruntime_optimizer.lib; | ||
| 32 | + onnxruntime_providers.lib; | ||
| 33 | + onnxruntime_session.lib; | ||
| 34 | + onnxruntime_util.lib; | ||
| 35 | + re2.lib; | ||
| 36 | + </SherpaOnnxLibraries> | ||
| 9 | </PropertyGroup> | 37 | </PropertyGroup> |
| 10 | <ItemDefinitionGroup> | 38 | <ItemDefinitionGroup> |
| 11 | <ClCompile> | 39 | <ClCompile> |
| 12 | <AdditionalIncludeDirectories> | 40 | <AdditionalIncludeDirectories> |
| 13 | - $(SherpaOnnxBuildDirectory)\_deps\portaudio-src\include;$(SherpaOnnxInstallDirectory)\include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> | 41 | + $(SherpaOnnxBuildDirectory)\_deps\portaudio-src\include; |
| 42 | + $(SherpaOnnxInstallDirectory)\include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> | ||
| 14 | </ClCompile> | 43 | </ClCompile> |
| 15 | <Link> | 44 | <Link> |
| 16 | <AdditionalLibraryDirectories>$(SherpaOnnxInstallDirectory)\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories> | 45 | <AdditionalLibraryDirectories>$(SherpaOnnxInstallDirectory)\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories> |
| 17 | - <AdditionalDependencies>$(SherpaOnnxLibraries)</AdditionalDependencies> | 46 | + <AdditionalDependencies>$(SherpaOnnxLibraries);</AdditionalDependencies> |
| 18 | </Link> | 47 | </Link> |
| 19 | </ItemDefinitionGroup> | 48 | </ItemDefinitionGroup> |
| 20 | <ItemGroup /> | 49 | <ItemGroup /> |
| @@ -75,10 +75,13 @@ if(ANDROID_NDK) | @@ -75,10 +75,13 @@ if(ANDROID_NDK) | ||
| 75 | target_link_libraries(sherpa-onnx-core android log) | 75 | target_link_libraries(sherpa-onnx-core android log) |
| 76 | endif() | 76 | endif() |
| 77 | 77 | ||
| 78 | -target_link_libraries(sherpa-onnx-core | ||
| 79 | - onnxruntime | ||
| 80 | - kaldi-native-fbank-core | ||
| 81 | -) | 78 | +target_link_libraries(sherpa-onnx-core kaldi-native-fbank-core) |
| 79 | + | ||
| 80 | +if(BUILD_SHARED_LIBS OR NOT WIN32) | ||
| 81 | + target_link_libraries(sherpa-onnx-core onnxruntime) | ||
| 82 | +else() | ||
| 83 | + target_link_libraries(sherpa-onnx-core ${onnxruntime_lib_files}) | ||
| 84 | +endif() | ||
| 82 | 85 | ||
| 83 | if(SHERPA_ONNX_ENABLE_GPU) | 86 | if(SHERPA_ONNX_ENABLE_GPU) |
| 84 | target_link_libraries(sherpa-onnx-core | 87 | target_link_libraries(sherpa-onnx-core |
| @@ -9,11 +9,11 @@ | @@ -9,11 +9,11 @@ | ||
| 9 | 9 | ||
| 10 | #include <algorithm> | 10 | #include <algorithm> |
| 11 | #include <memory> | 11 | #include <memory> |
| 12 | +#include <numeric> | ||
| 12 | #include <sstream> | 13 | #include <sstream> |
| 13 | #include <string> | 14 | #include <string> |
| 14 | #include <utility> | 15 | #include <utility> |
| 15 | #include <vector> | 16 | #include <vector> |
| 16 | -#include <numeric> | ||
| 17 | 17 | ||
| 18 | #if __ANDROID_API__ >= 9 | 18 | #if __ANDROID_API__ >= 9 |
| 19 | #include "android/asset_manager.h" | 19 | #include "android/asset_manager.h" |
| @@ -252,7 +252,8 @@ std::vector<Ort::Value> OnlineZipformer2TransducerModel::StackStates( | @@ -252,7 +252,8 @@ std::vector<Ort::Value> OnlineZipformer2TransducerModel::StackStates( | ||
| 252 | std::vector<std::vector<Ort::Value>> | 252 | std::vector<std::vector<Ort::Value>> |
| 253 | OnlineZipformer2TransducerModel::UnStackStates( | 253 | OnlineZipformer2TransducerModel::UnStackStates( |
| 254 | const std::vector<Ort::Value> &states) const { | 254 | const std::vector<Ort::Value> &states) const { |
| 255 | - int32_t m = std::accumulate(num_encoder_layers_.begin(), num_encoder_layers_.end(), 0); | 255 | + int32_t m = std::accumulate(num_encoder_layers_.begin(), |
| 256 | + num_encoder_layers_.end(), 0); | ||
| 256 | assert(states.size() == m * 6 + 2); | 257 | assert(states.size() == m * 6 + 2); |
| 257 | 258 | ||
| 258 | int32_t batch_size = states[0].GetTensorTypeAndShapeInfo().GetShape()[1]; | 259 | int32_t batch_size = states[0].GetTensorTypeAndShapeInfo().GetShape()[1]; |
| @@ -332,10 +333,12 @@ OnlineZipformer2TransducerModel::UnStackStates( | @@ -332,10 +333,12 @@ OnlineZipformer2TransducerModel::UnStackStates( | ||
| 332 | return ans; | 333 | return ans; |
| 333 | } | 334 | } |
| 334 | 335 | ||
| 335 | -std::vector<Ort::Value> OnlineZipformer2TransducerModel::GetEncoderInitStates() { | 336 | +std::vector<Ort::Value> |
| 337 | +OnlineZipformer2TransducerModel::GetEncoderInitStates() { | ||
| 336 | std::vector<Ort::Value> ans; | 338 | std::vector<Ort::Value> ans; |
| 337 | int32_t n = static_cast<int32_t>(encoder_dims_.size()); | 339 | int32_t n = static_cast<int32_t>(encoder_dims_.size()); |
| 338 | - int32_t m = std::accumulate(num_encoder_layers_.begin(), num_encoder_layers_.end(), 0); | 340 | + int32_t m = std::accumulate(num_encoder_layers_.begin(), |
| 341 | + num_encoder_layers_.end(), 0); | ||
| 339 | ans.reserve(m * 6 + 2); | 342 | ans.reserve(m * 6 + 2); |
| 340 | 343 | ||
| 341 | for (int32_t i = 0; i != n; ++i) { | 344 | for (int32_t i = 0; i != n; ++i) { |
| @@ -354,7 +357,8 @@ std::vector<Ort::Value> OnlineZipformer2TransducerModel::GetEncoderInitStates() | @@ -354,7 +357,8 @@ std::vector<Ort::Value> OnlineZipformer2TransducerModel::GetEncoderInitStates() | ||
| 354 | } | 357 | } |
| 355 | 358 | ||
| 356 | { | 359 | { |
| 357 | - std::array<int64_t, 4> s{1, 1, left_context_len_[i], nonlin_attn_head_dim}; | 360 | + std::array<int64_t, 4> s{1, 1, left_context_len_[i], |
| 361 | + nonlin_attn_head_dim}; | ||
| 358 | auto v = | 362 | auto v = |
| 359 | Ort::Value::CreateTensor<float>(allocator_, s.data(), s.size()); | 363 | Ort::Value::CreateTensor<float>(allocator_, s.data(), s.size()); |
| 360 | Fill(&v, 0); | 364 | Fill(&v, 0); |
| @@ -378,7 +382,8 @@ std::vector<Ort::Value> OnlineZipformer2TransducerModel::GetEncoderInitStates() | @@ -378,7 +382,8 @@ std::vector<Ort::Value> OnlineZipformer2TransducerModel::GetEncoderInitStates() | ||
| 378 | } | 382 | } |
| 379 | 383 | ||
| 380 | { | 384 | { |
| 381 | - std::array<int64_t, 3> s{1, encoder_dims_[i], cnn_module_kernels_[i] / 2}; | 385 | + std::array<int64_t, 3> s{1, encoder_dims_[i], |
| 386 | + cnn_module_kernels_[i] / 2}; | ||
| 382 | auto v = | 387 | auto v = |
| 383 | Ort::Value::CreateTensor<float>(allocator_, s.data(), s.size()); | 388 | Ort::Value::CreateTensor<float>(allocator_, s.data(), s.size()); |
| 384 | Fill(&v, 0); | 389 | Fill(&v, 0); |
| @@ -386,7 +391,8 @@ std::vector<Ort::Value> OnlineZipformer2TransducerModel::GetEncoderInitStates() | @@ -386,7 +391,8 @@ std::vector<Ort::Value> OnlineZipformer2TransducerModel::GetEncoderInitStates() | ||
| 386 | } | 391 | } |
| 387 | 392 | ||
| 388 | { | 393 | { |
| 389 | - std::array<int64_t, 3> s{1, encoder_dims_[i], cnn_module_kernels_[i] / 2}; | 394 | + std::array<int64_t, 3> s{1, encoder_dims_[i], |
| 395 | + cnn_module_kernels_[i] / 2}; | ||
| 390 | auto v = | 396 | auto v = |
| 391 | Ort::Value::CreateTensor<float>(allocator_, s.data(), s.size()); | 397 | Ort::Value::CreateTensor<float>(allocator_, s.data(), s.size()); |
| 392 | Fill(&v, 0); | 398 | Fill(&v, 0); |
| @@ -17,9 +17,11 @@ static void PybindOnlineRecognizerResult(py::module *m) { | @@ -17,9 +17,11 @@ static void PybindOnlineRecognizerResult(py::module *m) { | ||
| 17 | .def_property_readonly( | 17 | .def_property_readonly( |
| 18 | "text", [](PyClass &self) -> std::string { return self.text; }) | 18 | "text", [](PyClass &self) -> std::string { return self.text; }) |
| 19 | .def_property_readonly( | 19 | .def_property_readonly( |
| 20 | - "tokens", [](PyClass &self) -> std::vector<std::string> { return self.tokens; }) | 20 | + "tokens", |
| 21 | + [](PyClass &self) -> std::vector<std::string> { return self.tokens; }) | ||
| 21 | .def_property_readonly( | 22 | .def_property_readonly( |
| 22 | - "timestamps", [](PyClass &self) -> std::vector<float> { return self.timestamps; }); | 23 | + "timestamps", |
| 24 | + [](PyClass &self) -> std::vector<float> { return self.timestamps; }); | ||
| 23 | } | 25 | } |
| 24 | 26 | ||
| 25 | static void PybindOnlineRecognizerConfig(py::module *m) { | 27 | static void PybindOnlineRecognizerConfig(py::module *m) { |
-
请 注册 或 登录 后发表评论