Fangjun Kuang
Committed by GitHub

Support windows (#17)

* add onnxruntime for windows
1 -name: test-linux-macos 1 +name: test-linux-macos-windows
2 2
3 on: 3 on:
4 push: 4 push:
5 branches: 5 branches:
6 - master 6 - master
7 paths: 7 paths:
8 - - '.github/workflows/test-linux-macos.yaml' 8 + - '.github/workflows/test-linux-macos-windows.yaml'
9 - 'CMakeLists.txt' 9 - 'CMakeLists.txt'
10 - 'cmake/**' 10 - 'cmake/**'
11 - 'sherpa-onnx/csrc/*' 11 - 'sherpa-onnx/csrc/*'
@@ -13,42 +13,54 @@ on: @@ -13,42 +13,54 @@ on:
13 branches: 13 branches:
14 - master 14 - master
15 paths: 15 paths:
16 - - '.github/workflows/test-linux-macos.yaml' 16 + - '.github/workflows/test-linux-macos-windows.yaml'
17 - 'CMakeLists.txt' 17 - 'CMakeLists.txt'
18 - 'cmake/**' 18 - 'cmake/**'
19 - 'sherpa-onnx/csrc/*' 19 - 'sherpa-onnx/csrc/*'
20 20
21 concurrency: 21 concurrency:
22 - group: test-linux-macos-${{ github.ref }} 22 + group: test-linux-macos-windows-${{ github.ref }}
23 cancel-in-progress: true 23 cancel-in-progress: true
24 24
25 permissions: 25 permissions:
26 contents: read 26 contents: read
27 27
28 jobs: 28 jobs:
29 - test-linux: 29 + test-linux-macos-windows:
30 runs-on: ${{ matrix.os }} 30 runs-on: ${{ matrix.os }}
31 strategy: 31 strategy:
32 fail-fast: false 32 fail-fast: false
33 matrix: 33 matrix:
34 - os: [ubuntu-latest, macos-latest] 34 + os: [ubuntu-latest, macos-latest, windows-latest]
35 35
36 steps: 36 steps:
37 - uses: actions/checkout@v2 37 - uses: actions/checkout@v2
38 with: 38 with:
39 fetch-depth: 0 39 fetch-depth: 0
40 40
  41 + # see https://github.com/microsoft/setup-msbuild
  42 + - name: Add msbuild to PATH
  43 + if: startsWith(matrix.os, 'windows')
  44 + uses: microsoft/setup-msbuild@v1.0.2
  45 +
41 - name: Download pretrained model and test-data (English) 46 - name: Download pretrained model and test-data (English)
42 shell: bash 47 shell: bash
43 run: | 48 run: |
44 git lfs install 49 git lfs install
45 - git clone https://huggingface.co/csukuangfj/icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13 50 + GIT_LFS_SKIP_SMUDGE=1 git clone https://huggingface.co/csukuangfj/icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13
  51 + cd icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13
  52 + ls -lh exp/onnx/*.onnx
  53 + git lfs pull --include "exp/onnx/*.onnx"
  54 + ls -lh exp/onnx/*.onnx
46 55
47 - name: Download pretrained model and test-data (Chinese) 56 - name: Download pretrained model and test-data (Chinese)
48 shell: bash 57 shell: bash
49 run: | 58 run: |
50 - git lfs install  
51 - git clone https://huggingface.co/luomingshuang/icefall_asr_wenetspeech_pruned_transducer_stateless2 59 + GIT_LFS_SKIP_SMUDGE=1 git clone https://huggingface.co/luomingshuang/icefall_asr_wenetspeech_pruned_transducer_stateless2
  60 + cd icefall_asr_wenetspeech_pruned_transducer_stateless2
  61 + ls -lh exp/*.onnx
  62 + git lfs pull --include "exp/*.onnx"
  63 + ls -lh exp/*.onnx
52 64
53 - name: Configure CMake 65 - name: Configure CMake
54 shell: bash 66 shell: bash
@@ -58,11 +70,21 @@ jobs: @@ -58,11 +70,21 @@ jobs:
58 cmake -D CMAKE_BUILD_TYPE=Release .. 70 cmake -D CMAKE_BUILD_TYPE=Release ..
59 71
60 - name: Build sherpa-onnx for ubuntu/macos 72 - name: Build sherpa-onnx for ubuntu/macos
  73 + if: startsWith(matrix.os, 'ubuntu') || startsWith(matrix.os, 'macos')
  74 + shell: bash
61 run: | 75 run: |
62 cd build 76 cd build
63 make VERBOSE=1 -j3 77 make VERBOSE=1 -j3
64 78
  79 + - name: Build sherpa-onnx for Windows
  80 + if: startsWith(matrix.os, 'windows')
  81 + shell: bash
  82 + run: |
  83 + cmake --build ./build --config Release
  84 +
65 - name: Run tests for ubuntu/macos (English) 85 - name: Run tests for ubuntu/macos (English)
  86 + if: startsWith(matrix.os, 'ubuntu') || startsWith(matrix.os, 'macos')
  87 + shell: bash
66 run: | 88 run: |
67 time ./build/bin/sherpa-onnx \ 89 time ./build/bin/sherpa-onnx \
68 ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/data/lang_bpe_500/tokens.txt \ 90 ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/data/lang_bpe_500/tokens.txt \
@@ -91,7 +113,40 @@ jobs: @@ -91,7 +113,40 @@ jobs:
91 ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_decoder_proj.onnx \ 113 ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_decoder_proj.onnx \
92 ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/test_wavs/1221-135766-0002.wav 114 ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/test_wavs/1221-135766-0002.wav
93 115
  116 + - name: Run tests for Windows (English)
  117 + if: startsWith(matrix.os, 'windows')
  118 + shell: bash
  119 + run: |
  120 + ./build/bin/Release/sherpa-onnx \
  121 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/data/lang_bpe_500/tokens.txt \
  122 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/encoder.onnx \
  123 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/decoder.onnx \
  124 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner.onnx \
  125 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_encoder_proj.onnx \
  126 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_decoder_proj.onnx \
  127 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/test_wavs/1089-134686-0001.wav
  128 +
  129 + ./build/bin/Release/sherpa-onnx \
  130 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/data/lang_bpe_500/tokens.txt \
  131 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/encoder.onnx \
  132 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/decoder.onnx \
  133 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner.onnx \
  134 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_encoder_proj.onnx \
  135 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_decoder_proj.onnx \
  136 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/test_wavs/1221-135766-0001.wav
  137 +
  138 + ./build/bin/Release/sherpa-onnx \
  139 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/data/lang_bpe_500/tokens.txt \
  140 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/encoder.onnx \
  141 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/decoder.onnx \
  142 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner.onnx \
  143 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_encoder_proj.onnx \
  144 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_decoder_proj.onnx \
  145 + ./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/test_wavs/1221-135766-0002.wav
  146 +
94 - name: Run tests for ubuntu/macos (Chinese) 147 - name: Run tests for ubuntu/macos (Chinese)
  148 + if: startsWith(matrix.os, 'ubuntu') || startsWith(matrix.os, 'macos')
  149 + shell: bash
95 run: | 150 run: |
96 time ./build/bin/sherpa-onnx \ 151 time ./build/bin/sherpa-onnx \
97 ./icefall_asr_wenetspeech_pruned_transducer_stateless2/data/lang_char/tokens.txt \ 152 ./icefall_asr_wenetspeech_pruned_transducer_stateless2/data/lang_char/tokens.txt \
@@ -119,3 +174,34 @@ jobs: @@ -119,3 +174,34 @@ jobs:
119 ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_encoder_proj-epoch-10-avg-2.onnx \ 174 ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_encoder_proj-epoch-10-avg-2.onnx \
120 ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_decoder_proj-epoch-10-avg-2.onnx \ 175 ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_decoder_proj-epoch-10-avg-2.onnx \
121 ./icefall_asr_wenetspeech_pruned_transducer_stateless2/test_wavs/DEV_T0000000002.wav 176 ./icefall_asr_wenetspeech_pruned_transducer_stateless2/test_wavs/DEV_T0000000002.wav
  177 +
  178 + - name: Run tests for windows (Chinese)
  179 + if: startsWith(matrix.os, 'windows')
  180 + shell: bash
  181 + run: |
  182 + ./build/bin/Release/sherpa-onnx \
  183 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/data/lang_char/tokens.txt \
  184 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/encoder-epoch-10-avg-2.onnx \
  185 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/decoder-epoch-10-avg-2.onnx \
  186 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner-epoch-10-avg-2.onnx \
  187 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_encoder_proj-epoch-10-avg-2.onnx \
  188 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_decoder_proj-epoch-10-avg-2.onnx \
  189 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/test_wavs/DEV_T0000000000.wav
  190 +
  191 + ./build/bin/Release/sherpa-onnx \
  192 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/data/lang_char/tokens.txt \
  193 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/encoder-epoch-10-avg-2.onnx \
  194 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/decoder-epoch-10-avg-2.onnx \
  195 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner-epoch-10-avg-2.onnx \
  196 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_encoder_proj-epoch-10-avg-2.onnx \
  197 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_decoder_proj-epoch-10-avg-2.onnx \
  198 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/test_wavs/DEV_T0000000001.wav
  199 +
  200 + ./build/bin/Release/sherpa-onnx \
  201 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/data/lang_char/tokens.txt \
  202 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/encoder-epoch-10-avg-2.onnx \
  203 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/decoder-epoch-10-avg-2.onnx \
  204 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner-epoch-10-avg-2.onnx \
  205 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_encoder_proj-epoch-10-avg-2.onnx \
  206 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_decoder_proj-epoch-10-avg-2.onnx \
  207 + ./icefall_asr_wenetspeech_pruned_transducer_stateless2/test_wavs/DEV_T0000000002.wav
@@ -21,10 +21,6 @@ set(CMAKE_INSTALL_RPATH ${SHERPA_ONNX_RPATH_ORIGIN}) @@ -21,10 +21,6 @@ set(CMAKE_INSTALL_RPATH ${SHERPA_ONNX_RPATH_ORIGIN})
21 set(CMAKE_BUILD_RPATH ${SHERPA_ONNX_RPATH_ORIGIN}) 21 set(CMAKE_BUILD_RPATH ${SHERPA_ONNX_RPATH_ORIGIN})
22 22
23 set(BUILD_SHARED_LIBS ON) 23 set(BUILD_SHARED_LIBS ON)
24 -if(WIN32)  
25 - message(STATUS "Set BUILD_SHARED_LIBS to OFF for Windows")  
26 - set(BUILD_SHARED_LIBS OFF CACHE BOOL "" FORCE)  
27 -endif()  
28 24
29 if(NOT CMAKE_BUILD_TYPE) 25 if(NOT CMAKE_BUILD_TYPE)
30 message(STATUS "No CMAKE_BUILD_TYPE given, default to Release") 26 message(STATUS "No CMAKE_BUILD_TYPE given, default to Release")
@@ -20,25 +20,37 @@ the following links: @@ -20,25 +20,37 @@ the following links:
20 **HINT**: The script for exporting the Chinese model can be found at 20 **HINT**: The script for exporting the Chinese model can be found at
21 <https://github.com/k2-fsa/icefall/blob/master/egs/wenetspeech/ASR/pruned_transducer_stateless2/export.py> 21 <https://github.com/k2-fsa/icefall/blob/master/egs/wenetspeech/ASR/pruned_transducer_stateless2/export.py>
22 22
23 -# Usage 23 +## Build for Linux/macOS
24 24
25 ```bash 25 ```bash
26 git clone https://github.com/k2-fsa/sherpa-onnx 26 git clone https://github.com/k2-fsa/sherpa-onnx
27 cd sherpa-onnx 27 cd sherpa-onnx
28 mkdir build 28 mkdir build
29 cd build 29 cd build
30 -cmake .. 30 +cmake -DCMAKE_BUILD_TYPE=Release ..
31 make -j6 31 make -j6
32 cd .. 32 cd ..
33 ``` 33 ```
34 34
35 -## Download the pretrained model (English) 35 +## Build for Windows
36 36
37 -**Caution**: You have to run `git lfs install`. Otherwise, you will be **SAD** later. 37 +```bash
  38 +git clone https://github.com/k2-fsa/sherpa-onnx
  39 +cd sherpa-onnx
  40 +mkdir build
  41 +cd build
  42 +cmake -DCMAKE_BUILD_TYPE=Release ..
  43 +cmake --build . --config Release
  44 +cd ..
  45 +```
  46 +
  47 +## Download the pretrained model (English)
38 48
39 ```bash 49 ```bash
40 -git lfs install  
41 -git clone https://huggingface.co/csukuangfj/icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13 50 +GIT_LFS_SKIP_SMUDGE=1 git clone https://huggingface.co/csukuangfj/icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13
  51 +cd icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13
  52 +git lfs pull --include "exp/onnx/*.onnx"
  53 +cd ..
42 54
43 ./build/bin/sherpa-onnx --help 55 ./build/bin/sherpa-onnx --help
44 56
@@ -54,11 +66,11 @@ git clone https://huggingface.co/csukuangfj/icefall-asr-librispeech-pruned-trans @@ -54,11 +66,11 @@ git clone https://huggingface.co/csukuangfj/icefall-asr-librispeech-pruned-trans
54 66
55 ## Download the pretrained model (Chinese) 67 ## Download the pretrained model (Chinese)
56 68
57 -**Caution**: You have to run `git lfs install`. Otherwise, you will be **SAD** later.  
58 -  
59 ```bash 69 ```bash
60 -git lfs install  
61 -git clone https://huggingface.co/luomingshuang/icefall_asr_wenetspeech_pruned_transducer_stateless2 70 +GIT_LFS_SKIP_SMUDGE=1 git clone https://huggingface.co/luomingshuang/icefall_asr_wenetspeech_pruned_transducer_stateless2
  71 +cd icefall_asr_wenetspeech_pruned_transducer_stateless2
  72 +git lfs pull --include "exp/*.onnx"
  73 +cd ..
62 74
63 ./build/bin/sherpa-onnx --help 75 ./build/bin/sherpa-onnx --help
64 76
@@ -9,8 +9,8 @@ function(download_kaldi_native_fbank) @@ -9,8 +9,8 @@ function(download_kaldi_native_fbank)
9 9
10 include(FetchContent) 10 include(FetchContent)
11 11
12 - set(kaldi_native_fbank_URL "https://github.com/csukuangfj/kaldi-native-fbank/archive/refs/tags/v1.4.tar.gz")  
13 - set(kaldi_native_fbank_HASH "SHA256=771e08cb7edf512c828f4577d0d071a7993991d7e5415b11a843975dcf3e4d2d") 12 + set(kaldi_native_fbank_URL "https://github.com/csukuangfj/kaldi-native-fbank/archive/refs/tags/v1.5.tar.gz")
  13 + set(kaldi_native_fbank_HASH "SHA256=632c68adf8f6de831198a2a0e4c1920b31d5a1de263dcac5105be9da99f40bd5")
14 14
15 set(KALDI_NATIVE_FBANK_BUILD_TESTS OFF CACHE BOOL "" FORCE) 15 set(KALDI_NATIVE_FBANK_BUILD_TESTS OFF CACHE BOOL "" FORCE)
16 set(KALDI_NATIVE_FBANK_BUILD_PYTHON OFF CACHE BOOL "" FORCE) 16 set(KALDI_NATIVE_FBANK_BUILD_PYTHON OFF CACHE BOOL "" FORCE)
@@ -36,4 +36,4 @@ function(download_kaldi_native_fbank) @@ -36,4 +36,4 @@ function(download_kaldi_native_fbank)
36 ) 36 )
37 endfunction() 37 endfunction()
38 38
39 -download_kaldi_native_fbank()  
  39 +download_kaldi_native_fbank()
@@ -31,6 +31,15 @@ function(download_onnxruntime) @@ -31,6 +31,15 @@ function(download_onnxruntime)
31 # 31 #
32 # ./include 32 # ./include
33 # It contains all the needed header files 33 # It contains all the needed header files
  34 + elseif(WIN32)
  35 + set(onnxruntime_URL "https://github.com/microsoft/onnxruntime/releases/download/v1.12.1/onnxruntime-win-x64-1.12.1.zip")
  36 + set(onnxruntime_HASH "SHA256=c69650ba14aeae5903b05256a82e77164fff2de992072bc695a3838c1830b85a")
  37 + # After downloading, it contains:
  38 + # ./lib/onnxruntime.{dll,lib,pdb}
  39 + # ./lib/onnxruntime_providers_shared.{dll,lib,pdb}
  40 + #
  41 + # ./include
  42 + # It contains all the needed header files
34 else() 43 else()
35 message(FATAL_ERROR "Only support Linux and macOS at present. Will support other OSes later") 44 message(FATAL_ERROR "Only support Linux and macOS at present. Will support other OSes later")
36 endif() 45 endif()
@@ -59,6 +68,17 @@ function(download_onnxruntime) @@ -59,6 +68,17 @@ function(download_onnxruntime)
59 IMPORTED_LOCATION ${location_onnxruntime} 68 IMPORTED_LOCATION ${location_onnxruntime}
60 INTERFACE_INCLUDE_DIRECTORIES "${onnxruntime_SOURCE_DIR}/include" 69 INTERFACE_INCLUDE_DIRECTORIES "${onnxruntime_SOURCE_DIR}/include"
61 ) 70 )
  71 + if(WIN32)
  72 + set_property(TARGET onnxruntime
  73 + PROPERTY
  74 + IMPORTED_IMPLIB "${onnxruntime_SOURCE_DIR}/lib/onnxruntime.lib"
  75 + )
  76 +
  77 + file(COPY ${onnxruntime_SOURCE_DIR}/lib/onnxruntime.dll
  78 + DESTINATION
  79 + ${CMAKE_BINARY_DIR}/bin/${CMAKE_BUILD_TYPE}
  80 + )
  81 + endif()
62 endfunction() 82 endfunction()
63 83
64 download_onnxruntime() 84 download_onnxruntime()
@@ -21,8 +21,26 @@ @@ -21,8 +21,26 @@
21 #include <utility> 21 #include <utility>
22 #include <vector> 22 #include <vector>
23 23
  24 +#ifdef _MSC_VER
  25 +// For ToWide() below
  26 +#include <codecvt>
  27 +#include <locale>
  28 +#endif
  29 +
24 namespace sherpa_onnx { 30 namespace sherpa_onnx {
25 31
  32 +#ifdef _MSC_VER
  33 +// See
  34 +// https://stackoverflow.com/questions/2573834/c-convert-string-or-char-to-wstring-or-wchar-t
  35 +static std::wstring ToWide(const std::string &s) {
  36 + std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> converter;
  37 + return converter.from_bytes(s);
  38 +}
  39 +#define SHERPA_MAYBE_WIDE(s) ToWide(s)
  40 +#else
  41 +#define SHERPA_MAYBE_WIDE(s) s
  42 +#endif
  43 +
26 /** 44 /**
27 * Get the input names of a model. 45 * Get the input names of a model.
28 * 46 *
@@ -85,8 +103,8 @@ RnntModel::RnntModel(const std::string &encoder_filename, @@ -85,8 +103,8 @@ RnntModel::RnntModel(const std::string &encoder_filename,
85 } 103 }
86 104
87 void RnntModel::InitEncoder(const std::string &filename) { 105 void RnntModel::InitEncoder(const std::string &filename) {
88 - encoder_sess_ =  
89 - std::make_unique<Ort::Session>(env_, filename.c_str(), sess_opts_); 106 + encoder_sess_ = std::make_unique<Ort::Session>(
  107 + env_, SHERPA_MAYBE_WIDE(filename).c_str(), sess_opts_);
90 GetInputNames(encoder_sess_.get(), &encoder_input_names_, 108 GetInputNames(encoder_sess_.get(), &encoder_input_names_,
91 &encoder_input_names_ptr_); 109 &encoder_input_names_ptr_);
92 110
@@ -95,8 +113,8 @@ void RnntModel::InitEncoder(const std::string &filename) { @@ -95,8 +113,8 @@ void RnntModel::InitEncoder(const std::string &filename) {
95 } 113 }
96 114
97 void RnntModel::InitDecoder(const std::string &filename) { 115 void RnntModel::InitDecoder(const std::string &filename) {
98 - decoder_sess_ =  
99 - std::make_unique<Ort::Session>(env_, filename.c_str(), sess_opts_); 116 + decoder_sess_ = std::make_unique<Ort::Session>(
  117 + env_, SHERPA_MAYBE_WIDE(filename).c_str(), sess_opts_);
100 118
101 GetInputNames(decoder_sess_.get(), &decoder_input_names_, 119 GetInputNames(decoder_sess_.get(), &decoder_input_names_,
102 &decoder_input_names_ptr_); 120 &decoder_input_names_ptr_);
@@ -106,8 +124,8 @@ void RnntModel::InitDecoder(const std::string &filename) { @@ -106,8 +124,8 @@ void RnntModel::InitDecoder(const std::string &filename) {
106 } 124 }
107 125
108 void RnntModel::InitJoiner(const std::string &filename) { 126 void RnntModel::InitJoiner(const std::string &filename) {
109 - joiner_sess_ =  
110 - std::make_unique<Ort::Session>(env_, filename.c_str(), sess_opts_); 127 + joiner_sess_ = std::make_unique<Ort::Session>(
  128 + env_, SHERPA_MAYBE_WIDE(filename).c_str(), sess_opts_);
111 129
112 GetInputNames(joiner_sess_.get(), &joiner_input_names_, 130 GetInputNames(joiner_sess_.get(), &joiner_input_names_,
113 &joiner_input_names_ptr_); 131 &joiner_input_names_ptr_);
@@ -117,8 +135,8 @@ void RnntModel::InitJoiner(const std::string &filename) { @@ -117,8 +135,8 @@ void RnntModel::InitJoiner(const std::string &filename) {
117 } 135 }
118 136
119 void RnntModel::InitJoinerEncoderProj(const std::string &filename) { 137 void RnntModel::InitJoinerEncoderProj(const std::string &filename) {
120 - joiner_encoder_proj_sess_ =  
121 - std::make_unique<Ort::Session>(env_, filename.c_str(), sess_opts_); 138 + joiner_encoder_proj_sess_ = std::make_unique<Ort::Session>(
  139 + env_, SHERPA_MAYBE_WIDE(filename).c_str(), sess_opts_);
122 140
123 GetInputNames(joiner_encoder_proj_sess_.get(), 141 GetInputNames(joiner_encoder_proj_sess_.get(),
124 &joiner_encoder_proj_input_names_, 142 &joiner_encoder_proj_input_names_,
@@ -130,8 +148,8 @@ void RnntModel::InitJoinerEncoderProj(const std::string &filename) { @@ -130,8 +148,8 @@ void RnntModel::InitJoinerEncoderProj(const std::string &filename) {
130 } 148 }
131 149
132 void RnntModel::InitJoinerDecoderProj(const std::string &filename) { 150 void RnntModel::InitJoinerDecoderProj(const std::string &filename) {
133 - joiner_decoder_proj_sess_ =  
134 - std::make_unique<Ort::Session>(env_, filename.c_str(), sess_opts_); 151 + joiner_decoder_proj_sess_ = std::make_unique<Ort::Session>(
  152 + env_, SHERPA_MAYBE_WIDE(filename).c_str(), sess_opts_);
135 153
136 GetInputNames(joiner_decoder_proj_sess_.get(), 154 GetInputNames(joiner_decoder_proj_sess_.get(),
137 &joiner_decoder_proj_input_names_, 155 &joiner_decoder_proj_input_names_,