Toggle navigation
Toggle navigation
此项目
正在载入...
Sign in
xuning
/
sherpaonnx
转到一个项目
Toggle navigation
项目
群组
代码片段
帮助
Toggle navigation pinning
Project
Activity
Repository
Pipelines
Graphs
Issues
0
Merge Requests
0
Wiki
Network
Create a new issue
Builds
Commits
Authored by
Fangjun Kuang
2022-10-13 17:30:30 +0800
Browse Files
Options
Browse Files
Download
Email Patches
Plain Diff
Committed by
GitHub
2022-10-13 17:30:30 +0800
Commit
4614d02d6dc8817fba68c01b33297af0074f238c
4614d02d
1 parent
c70f5625
Support windows (#17)
* add onnxruntime for windows
隐藏空白字符变更
内嵌
并排对比
正在显示
6 个修改的文件
包含
168 行增加
和
36 行删除
.github/workflows/test-linux-macos.yaml → .github/workflows/test-linux-macos-windows.yaml
CMakeLists.txt
README.md
cmake/kaldi-native-fbank.cmake
cmake/onnxruntime.cmake
sherpa-onnx/csrc/rnnt-model.cc
.github/workflows/test-linux-macos.yaml → .github/workflows/test-linux-macos
-windows
.yaml
查看文件 @
4614d02
name
:
test-linux-macos
name
:
test-linux-macos
-windows
on
:
push
:
branches
:
-
master
paths
:
-
'
.github/workflows/test-linux-macos.yaml'
-
'
.github/workflows/test-linux-macos
-windows
.yaml'
-
'
CMakeLists.txt'
-
'
cmake/**'
-
'
sherpa-onnx/csrc/*'
...
...
@@ -13,42 +13,54 @@ on:
branches
:
-
master
paths
:
-
'
.github/workflows/test-linux-macos.yaml'
-
'
.github/workflows/test-linux-macos
-windows
.yaml'
-
'
CMakeLists.txt'
-
'
cmake/**'
-
'
sherpa-onnx/csrc/*'
concurrency
:
group
:
test-linux-macos-${{ github.ref }}
group
:
test-linux-macos-
windows-
${{ github.ref }}
cancel-in-progress
:
true
permissions
:
contents
:
read
jobs
:
test-linux
:
test-linux
-macos-windows
:
runs-on
:
${{ matrix.os }}
strategy
:
fail-fast
:
false
matrix
:
os
:
[
ubuntu-latest
,
macos-latest
]
os
:
[
ubuntu-latest
,
macos-latest
,
windows-latest
]
steps
:
-
uses
:
actions/checkout@v2
with
:
fetch-depth
:
0
# see https://github.com/microsoft/setup-msbuild
-
name
:
Add msbuild to PATH
if
:
startsWith(matrix.os, 'windows')
uses
:
microsoft/setup-msbuild@v1.0.2
-
name
:
Download pretrained model and test-data (English)
shell
:
bash
run
:
|
git lfs install
git clone https://huggingface.co/csukuangfj/icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13
GIT_LFS_SKIP_SMUDGE=1 git clone https://huggingface.co/csukuangfj/icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13
cd icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13
ls -lh exp/onnx/*.onnx
git lfs pull --include "exp/onnx/*.onnx"
ls -lh exp/onnx/*.onnx
-
name
:
Download pretrained model and test-data (Chinese)
shell
:
bash
run
:
|
git lfs install
git clone https://huggingface.co/luomingshuang/icefall_asr_wenetspeech_pruned_transducer_stateless2
GIT_LFS_SKIP_SMUDGE=1 git clone https://huggingface.co/luomingshuang/icefall_asr_wenetspeech_pruned_transducer_stateless2
cd icefall_asr_wenetspeech_pruned_transducer_stateless2
ls -lh exp/*.onnx
git lfs pull --include "exp/*.onnx"
ls -lh exp/*.onnx
-
name
:
Configure CMake
shell
:
bash
...
...
@@ -58,11 +70,21 @@ jobs:
cmake -D CMAKE_BUILD_TYPE=Release ..
-
name
:
Build sherpa-onnx for ubuntu/macos
if
:
startsWith(matrix.os, 'ubuntu') || startsWith(matrix.os, 'macos')
shell
:
bash
run
:
|
cd build
make VERBOSE=1 -j3
-
name
:
Build sherpa-onnx for Windows
if
:
startsWith(matrix.os, 'windows')
shell
:
bash
run
:
|
cmake --build ./build --config Release
-
name
:
Run tests for ubuntu/macos (English)
if
:
startsWith(matrix.os, 'ubuntu') || startsWith(matrix.os, 'macos')
shell
:
bash
run
:
|
time ./build/bin/sherpa-onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/data/lang_bpe_500/tokens.txt \
...
...
@@ -91,7 +113,40 @@ jobs:
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_decoder_proj.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/test_wavs/1221-135766-0002.wav
-
name
:
Run tests for Windows (English)
if
:
startsWith(matrix.os, 'windows')
shell
:
bash
run
:
|
./build/bin/Release/sherpa-onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/data/lang_bpe_500/tokens.txt \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/encoder.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/decoder.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_encoder_proj.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_decoder_proj.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/test_wavs/1089-134686-0001.wav
./build/bin/Release/sherpa-onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/data/lang_bpe_500/tokens.txt \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/encoder.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/decoder.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_encoder_proj.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_decoder_proj.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/test_wavs/1221-135766-0001.wav
./build/bin/Release/sherpa-onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/data/lang_bpe_500/tokens.txt \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/encoder.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/decoder.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_encoder_proj.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/exp/onnx/joiner_decoder_proj.onnx \
./icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13/test_wavs/1221-135766-0002.wav
-
name
:
Run tests for ubuntu/macos (Chinese)
if
:
startsWith(matrix.os, 'ubuntu') || startsWith(matrix.os, 'macos')
shell
:
bash
run
:
|
time ./build/bin/sherpa-onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/data/lang_char/tokens.txt \
...
...
@@ -119,3 +174,34 @@ jobs:
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_encoder_proj-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_decoder_proj-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/test_wavs/DEV_T0000000002.wav
-
name
:
Run tests for windows (Chinese)
if
:
startsWith(matrix.os, 'windows')
shell
:
bash
run
:
|
./build/bin/Release/sherpa-onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/data/lang_char/tokens.txt \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/encoder-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/decoder-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_encoder_proj-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_decoder_proj-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/test_wavs/DEV_T0000000000.wav
./build/bin/Release/sherpa-onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/data/lang_char/tokens.txt \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/encoder-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/decoder-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_encoder_proj-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_decoder_proj-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/test_wavs/DEV_T0000000001.wav
./build/bin/Release/sherpa-onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/data/lang_char/tokens.txt \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/encoder-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/decoder-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_encoder_proj-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/exp/joiner_decoder_proj-epoch-10-avg-2.onnx \
./icefall_asr_wenetspeech_pruned_transducer_stateless2/test_wavs/DEV_T0000000002.wav
...
...
CMakeLists.txt
查看文件 @
4614d02
...
...
@@ -21,10 +21,6 @@ set(CMAKE_INSTALL_RPATH ${SHERPA_ONNX_RPATH_ORIGIN})
set
(
CMAKE_BUILD_RPATH
${
SHERPA_ONNX_RPATH_ORIGIN
}
)
set
(
BUILD_SHARED_LIBS ON
)
if
(
WIN32
)
message
(
STATUS
"Set BUILD_SHARED_LIBS to OFF for Windows"
)
set
(
BUILD_SHARED_LIBS OFF CACHE BOOL
""
FORCE
)
endif
()
if
(
NOT CMAKE_BUILD_TYPE
)
message
(
STATUS
"No CMAKE_BUILD_TYPE given, default to Release"
)
...
...
README.md
查看文件 @
4614d02
...
...
@@ -20,25 +20,37 @@ the following links:
**HINT**
: The script for exporting the Chinese model can be found at
<https://github.com/k2-fsa/icefall/blob/master/egs/wenetspeech/ASR/pruned_transducer_stateless2/export.py>
#
Usage
#
# Build for Linux/macOS
```
bash
git clone https://github.com/k2-fsa/sherpa-onnx
cd
sherpa-onnx
mkdir build
cd
build
cmake ..
cmake
-DCMAKE_BUILD_TYPE
=
Release
..
make -j6
cd
..
```
##
Download the pretrained model (English)
##
Build for Windows
**Caution**
: You have to run
`git lfs install`
. Otherwise, you will be
**SAD**
later.
```
bash
git clone https://github.com/k2-fsa/sherpa-onnx
cd
sherpa-onnx
mkdir build
cd
build
cmake -DCMAKE_BUILD_TYPE
=
Release ..
cmake --build . --config Release
cd
..
```
## Download the pretrained model (English)
```
bash
git lfs install
git clone https://huggingface.co/csukuangfj/icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13
GIT_LFS_SKIP_SMUDGE
=
1 git clone https://huggingface.co/csukuangfj/icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13
cd
icefall-asr-librispeech-pruned-transducer-stateless3-2022-05-13
git lfs pull --include
"exp/onnx/*.onnx"
cd
..
./build/bin/sherpa-onnx --help
...
...
@@ -54,11 +66,11 @@ git clone https://huggingface.co/csukuangfj/icefall-asr-librispeech-pruned-trans
## Download the pretrained model (Chinese)
**Caution**
: You have to run
`git lfs install`
. Otherwise, you will be
**SAD**
later.
```
bash
git lfs install
git clone https://huggingface.co/luomingshuang/icefall_asr_wenetspeech_pruned_transducer_stateless2
GIT_LFS_SKIP_SMUDGE
=
1 git clone https://huggingface.co/luomingshuang/icefall_asr_wenetspeech_pruned_transducer_stateless2
cd
icefall_asr_wenetspeech_pruned_transducer_stateless2
git lfs pull --include
"exp/*.onnx"
cd
..
./build/bin/sherpa-onnx --help
...
...
cmake/kaldi-native-fbank.cmake
查看文件 @
4614d02
...
...
@@ -9,8 +9,8 @@ function(download_kaldi_native_fbank)
include
(
FetchContent
)
set
(
kaldi_native_fbank_URL
"https://github.com/csukuangfj/kaldi-native-fbank/archive/refs/tags/v1.4.tar.gz"
)
set
(
kaldi_native_fbank_HASH
"SHA256=771e08cb7edf512c828f4577d0d071a7993991d7e5415b11a843975dcf3e4d2d"
)
set
(
kaldi_native_fbank_URL
"https://github.com/csukuangfj/kaldi-native-fbank/archive/refs/tags/v1.5.tar.gz"
)
set
(
kaldi_native_fbank_HASH
"SHA256=632c68adf8f6de831198a2a0e4c1920b31d5a1de263dcac5105be9da99f40bd5"
)
set
(
KALDI_NATIVE_FBANK_BUILD_TESTS OFF CACHE BOOL
""
FORCE
)
set
(
KALDI_NATIVE_FBANK_BUILD_PYTHON OFF CACHE BOOL
""
FORCE
)
...
...
@@ -36,4 +36,4 @@ function(download_kaldi_native_fbank)
)
endfunction
()
download_kaldi_native_fbank
()
\ No newline at end of file
download_kaldi_native_fbank
()
...
...
cmake/onnxruntime.cmake
查看文件 @
4614d02
...
...
@@ -31,6 +31,15 @@ function(download_onnxruntime)
#
# ./include
# It contains all the needed header files
elseif
(
WIN32
)
set
(
onnxruntime_URL
"https://github.com/microsoft/onnxruntime/releases/download/v1.12.1/onnxruntime-win-x64-1.12.1.zip"
)
set
(
onnxruntime_HASH
"SHA256=c69650ba14aeae5903b05256a82e77164fff2de992072bc695a3838c1830b85a"
)
# After downloading, it contains:
# ./lib/onnxruntime.{dll,lib,pdb}
# ./lib/onnxruntime_providers_shared.{dll,lib,pdb}
#
# ./include
# It contains all the needed header files
else
()
message
(
FATAL_ERROR
"Only support Linux and macOS at present. Will support other OSes later"
)
endif
()
...
...
@@ -59,6 +68,17 @@ function(download_onnxruntime)
IMPORTED_LOCATION
${
location_onnxruntime
}
INTERFACE_INCLUDE_DIRECTORIES
"
${
onnxruntime_SOURCE_DIR
}
/include"
)
if
(
WIN32
)
set_property
(
TARGET onnxruntime
PROPERTY
IMPORTED_IMPLIB
"
${
onnxruntime_SOURCE_DIR
}
/lib/onnxruntime.lib"
)
file
(
COPY
${
onnxruntime_SOURCE_DIR
}
/lib/onnxruntime.dll
DESTINATION
${
CMAKE_BINARY_DIR
}
/bin/
${
CMAKE_BUILD_TYPE
}
)
endif
()
endfunction
()
download_onnxruntime
()
...
...
sherpa-onnx/csrc/rnnt-model.cc
查看文件 @
4614d02
...
...
@@ -21,8 +21,26 @@
#include <utility>
#include <vector>
#ifdef _MSC_VER
// For ToWide() below
#include <codecvt>
#include <locale>
#endif
namespace
sherpa_onnx
{
#ifdef _MSC_VER
// See
// https://stackoverflow.com/questions/2573834/c-convert-string-or-char-to-wstring-or-wchar-t
static
std
::
wstring
ToWide
(
const
std
::
string
&
s
)
{
std
::
wstring_convert
<
std
::
codecvt_utf8_utf16
<
wchar_t
>>
converter
;
return
converter
.
from_bytes
(
s
);
}
#define SHERPA_MAYBE_WIDE(s) ToWide(s)
#else
#define SHERPA_MAYBE_WIDE(s) s
#endif
/**
* Get the input names of a model.
*
...
...
@@ -85,8 +103,8 @@ RnntModel::RnntModel(const std::string &encoder_filename,
}
void
RnntModel
::
InitEncoder
(
const
std
::
string
&
filename
)
{
encoder_sess_
=
std
::
make_unique
<
Ort
::
Session
>
(
env_
,
filename
.
c_str
(),
sess_opts_
);
encoder_sess_
=
std
::
make_unique
<
Ort
::
Session
>
(
env_
,
SHERPA_MAYBE_WIDE
(
filename
).
c_str
(),
sess_opts_
);
GetInputNames
(
encoder_sess_
.
get
(),
&
encoder_input_names_
,
&
encoder_input_names_ptr_
);
...
...
@@ -95,8 +113,8 @@ void RnntModel::InitEncoder(const std::string &filename) {
}
void
RnntModel
::
InitDecoder
(
const
std
::
string
&
filename
)
{
decoder_sess_
=
std
::
make_unique
<
Ort
::
Session
>
(
env_
,
filename
.
c_str
(),
sess_opts_
);
decoder_sess_
=
std
::
make_unique
<
Ort
::
Session
>
(
env_
,
SHERPA_MAYBE_WIDE
(
filename
).
c_str
(),
sess_opts_
);
GetInputNames
(
decoder_sess_
.
get
(),
&
decoder_input_names_
,
&
decoder_input_names_ptr_
);
...
...
@@ -106,8 +124,8 @@ void RnntModel::InitDecoder(const std::string &filename) {
}
void
RnntModel
::
InitJoiner
(
const
std
::
string
&
filename
)
{
joiner_sess_
=
std
::
make_unique
<
Ort
::
Session
>
(
env_
,
filename
.
c_str
(),
sess_opts_
);
joiner_sess_
=
std
::
make_unique
<
Ort
::
Session
>
(
env_
,
SHERPA_MAYBE_WIDE
(
filename
).
c_str
(),
sess_opts_
);
GetInputNames
(
joiner_sess_
.
get
(),
&
joiner_input_names_
,
&
joiner_input_names_ptr_
);
...
...
@@ -117,8 +135,8 @@ void RnntModel::InitJoiner(const std::string &filename) {
}
void
RnntModel
::
InitJoinerEncoderProj
(
const
std
::
string
&
filename
)
{
joiner_encoder_proj_sess_
=
std
::
make_unique
<
Ort
::
Session
>
(
env_
,
filename
.
c_str
(),
sess_opts_
);
joiner_encoder_proj_sess_
=
std
::
make_unique
<
Ort
::
Session
>
(
env_
,
SHERPA_MAYBE_WIDE
(
filename
).
c_str
(),
sess_opts_
);
GetInputNames
(
joiner_encoder_proj_sess_
.
get
(),
&
joiner_encoder_proj_input_names_
,
...
...
@@ -130,8 +148,8 @@ void RnntModel::InitJoinerEncoderProj(const std::string &filename) {
}
void
RnntModel
::
InitJoinerDecoderProj
(
const
std
::
string
&
filename
)
{
joiner_decoder_proj_sess_
=
std
::
make_unique
<
Ort
::
Session
>
(
env_
,
filename
.
c_str
(),
sess_opts_
);
joiner_decoder_proj_sess_
=
std
::
make_unique
<
Ort
::
Session
>
(
env_
,
SHERPA_MAYBE_WIDE
(
filename
).
c_str
(),
sess_opts_
);
GetInputNames
(
joiner_decoder_proj_sess_
.
get
(),
&
joiner_decoder_proj_input_names_
,
...
...
请
注册
或
登录
后发表评论