Committed by
GitHub
Fix sherpa-onnx-node-version in node examples (#879)
正在显示
19 个修改的文件
包含
36 行增加
和
56 行删除
| @@ -7,5 +7,5 @@ for usages. | @@ -7,5 +7,5 @@ for usages. | ||
| 7 | 7 | ||
| 8 | ||Method|Support multiple threads|Minimum required node version| | 8 | ||Method|Support multiple threads|Minimum required node version| |
| 9 | |---|---|---|---| | 9 | |---|---|---|---| |
| 10 | -|this package| https://github.com/nodejs/node-addon-api | Yes | v10| | 10 | +|this package| https://github.com/nodejs/node-addon-api | Yes | v16| |
| 11 | |https://www.npmjs.com/package/sherpa-onnx| WebAssembly | No | v18| | 11 | |https://www.npmjs.com/package/sherpa-onnx| WebAssembly | No | v18| |
| @@ -42,7 +42,7 @@ jobs: | @@ -42,7 +42,7 @@ jobs: | ||
| 42 | strategy: | 42 | strategy: |
| 43 | fail-fast: false | 43 | fail-fast: false |
| 44 | matrix: | 44 | matrix: |
| 45 | - os: [macos-11, macos-14, ubuntu-20.04, ubuntu-22.04, windows-latest] | 45 | + os: [macos-latest, macos-14, ubuntu-20.04, ubuntu-22.04, windows-latest] |
| 46 | node-version: ["16", "17", "18", "19", "21", "22"] | 46 | node-version: ["16", "17", "18", "19", "21", "22"] |
| 47 | 47 | ||
| 48 | steps: | 48 | steps: |
| @@ -73,6 +73,8 @@ jobs: | @@ -73,6 +73,8 @@ jobs: | ||
| 73 | 73 | ||
| 74 | export DYLD_LIBRARY_PATH=$PWD/node_modules/sherpa-onnx-darwin-x64:$DYLD_LIBRARY_PATH | 74 | export DYLD_LIBRARY_PATH=$PWD/node_modules/sherpa-onnx-darwin-x64:$DYLD_LIBRARY_PATH |
| 75 | export DYLD_LIBRARY_PATH=$PWD/node_modules/sherpa-onnx-darwin-arm64:$DYLD_LIBRARY_PATH | 75 | export DYLD_LIBRARY_PATH=$PWD/node_modules/sherpa-onnx-darwin-arm64:$DYLD_LIBRARY_PATH |
| 76 | + export LD_LIBRARY_PATH=$PWD/node_modules/sherpa-onnx-darwin-x64:$DYLD_LIBRARY_PATH | ||
| 77 | + export LD_LIBRARY_PATH=$PWD/node_modules/sherpa-onnx-darwin-arm64:$DYLD_LIBRARY_PATH | ||
| 76 | export LD_LIBRARY_PATH=$PWD/node_modules/sherpa-onnx-linux-x64:$LD_LIBRARY_PATH | 78 | export LD_LIBRARY_PATH=$PWD/node_modules/sherpa-onnx-linux-x64:$LD_LIBRARY_PATH |
| 77 | export LD_LIBRARY_PATH=$PWD/node_modules/sherpa-onnx-linux-arm64:$LD_LIBRARY_PATH | 79 | export LD_LIBRARY_PATH=$PWD/node_modules/sherpa-onnx-linux-arm64:$LD_LIBRARY_PATH |
| 78 | 80 |
| 1 | # Introduction | 1 | # Introduction |
| 2 | 2 | ||
| 3 | -Note: You need `Node >= 10`. | 3 | +Note: You need `Node >= 16`. |
| 4 | 4 | ||
| 5 | This repo contains examples for NodeJS. | 5 | This repo contains examples for NodeJS. |
| 6 | It uses [node-addon-api](https://github.com/nodejs/node-addon-api) to wrap | 6 | It uses [node-addon-api](https://github.com/nodejs/node-addon-api) to wrap |
| 1 | // Copyright (c) 2024 Xiaomi Corporation | 1 | // Copyright (c) 2024 Xiaomi Corporation |
| 2 | const sherpa_onnx = require('sherpa-onnx-node'); | 2 | const sherpa_onnx = require('sherpa-onnx-node'); |
| 3 | -const performance = require('perf_hooks').performance; | ||
| 4 | 3 | ||
| 5 | // Please download test files from | 4 | // Please download test files from |
| 6 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models | 5 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models |
| @@ -27,14 +26,14 @@ const waveFilename = | @@ -27,14 +26,14 @@ const waveFilename = | ||
| 27 | 26 | ||
| 28 | const recognizer = new sherpa_onnx.OfflineRecognizer(config); | 27 | const recognizer = new sherpa_onnx.OfflineRecognizer(config); |
| 29 | console.log('Started') | 28 | console.log('Started') |
| 30 | -let start = performance.now(); | 29 | +let start = Date.now(); |
| 31 | const stream = recognizer.createStream(); | 30 | const stream = recognizer.createStream(); |
| 32 | const wave = sherpa_onnx.readWave(waveFilename); | 31 | const wave = sherpa_onnx.readWave(waveFilename); |
| 33 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); | 32 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); |
| 34 | 33 | ||
| 35 | recognizer.decode(stream); | 34 | recognizer.decode(stream); |
| 36 | result = recognizer.getResult(stream) | 35 | result = recognizer.getResult(stream) |
| 37 | -let stop = performance.now(); | 36 | +let stop = Date.now(); |
| 38 | console.log('Done') | 37 | console.log('Done') |
| 39 | 38 | ||
| 40 | const elapsed_seconds = (stop - start) / 1000; | 39 | const elapsed_seconds = (stop - start) / 1000; |
| 1 | // Copyright (c) 2024 Xiaomi Corporation | 1 | // Copyright (c) 2024 Xiaomi Corporation |
| 2 | const sherpa_onnx = require('sherpa-onnx-node'); | 2 | const sherpa_onnx = require('sherpa-onnx-node'); |
| 3 | -const performance = require('perf_hooks').performance; | ||
| 4 | - | ||
| 5 | 3 | ||
| 6 | // Please download test files from | 4 | // Please download test files from |
| 7 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models | 5 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models |
| @@ -26,14 +24,14 @@ const waveFilename = | @@ -26,14 +24,14 @@ const waveFilename = | ||
| 26 | 24 | ||
| 27 | const recognizer = new sherpa_onnx.OfflineRecognizer(config); | 25 | const recognizer = new sherpa_onnx.OfflineRecognizer(config); |
| 28 | console.log('Started') | 26 | console.log('Started') |
| 29 | -let start = performance.now(); | 27 | +let start = Date.now(); |
| 30 | const stream = recognizer.createStream(); | 28 | const stream = recognizer.createStream(); |
| 31 | const wave = sherpa_onnx.readWave(waveFilename); | 29 | const wave = sherpa_onnx.readWave(waveFilename); |
| 32 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); | 30 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); |
| 33 | 31 | ||
| 34 | recognizer.decode(stream); | 32 | recognizer.decode(stream); |
| 35 | result = recognizer.getResult(stream) | 33 | result = recognizer.getResult(stream) |
| 36 | -let stop = performance.now(); | 34 | +let stop = Date.now(); |
| 37 | console.log('Done') | 35 | console.log('Done') |
| 38 | 36 | ||
| 39 | const elapsed_seconds = (stop - start) / 1000; | 37 | const elapsed_seconds = (stop - start) / 1000; |
| 1 | // Copyright (c) 2024 Xiaomi Corporation | 1 | // Copyright (c) 2024 Xiaomi Corporation |
| 2 | const sherpa_onnx = require('sherpa-onnx-node'); | 2 | const sherpa_onnx = require('sherpa-onnx-node'); |
| 3 | -const performance = require('perf_hooks').performance; | ||
| 4 | - | ||
| 5 | 3 | ||
| 6 | // Please download test files from | 4 | // Please download test files from |
| 7 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models | 5 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models |
| @@ -30,14 +28,14 @@ const waveFilename = './sherpa-onnx-zipformer-en-2023-04-01/test_wavs/1.wav'; | @@ -30,14 +28,14 @@ const waveFilename = './sherpa-onnx-zipformer-en-2023-04-01/test_wavs/1.wav'; | ||
| 30 | 28 | ||
| 31 | const recognizer = new sherpa_onnx.OfflineRecognizer(config); | 29 | const recognizer = new sherpa_onnx.OfflineRecognizer(config); |
| 32 | console.log('Started') | 30 | console.log('Started') |
| 33 | -let start = performance.now(); | 31 | +let start = Date.now(); |
| 34 | const stream = recognizer.createStream(); | 32 | const stream = recognizer.createStream(); |
| 35 | const wave = sherpa_onnx.readWave(waveFilename); | 33 | const wave = sherpa_onnx.readWave(waveFilename); |
| 36 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); | 34 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); |
| 37 | 35 | ||
| 38 | recognizer.decode(stream); | 36 | recognizer.decode(stream); |
| 39 | result = recognizer.getResult(stream) | 37 | result = recognizer.getResult(stream) |
| 40 | -let stop = performance.now(); | 38 | +let stop = Date.now(); |
| 41 | console.log('Done') | 39 | console.log('Done') |
| 42 | 40 | ||
| 43 | const elapsed_seconds = (stop - start) / 1000; | 41 | const elapsed_seconds = (stop - start) / 1000; |
| 1 | // Copyright (c) 2024 Xiaomi Corporation | 1 | // Copyright (c) 2024 Xiaomi Corporation |
| 2 | const sherpa_onnx = require('sherpa-onnx-node'); | 2 | const sherpa_onnx = require('sherpa-onnx-node'); |
| 3 | -const performance = require('perf_hooks').performance; | ||
| 4 | - | ||
| 5 | 3 | ||
| 6 | // Please download test files from | 4 | // Please download test files from |
| 7 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models | 5 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models |
| @@ -26,14 +24,14 @@ const waveFilename = './sherpa-onnx-whisper-tiny.en/test_wavs/0.wav'; | @@ -26,14 +24,14 @@ const waveFilename = './sherpa-onnx-whisper-tiny.en/test_wavs/0.wav'; | ||
| 26 | 24 | ||
| 27 | const recognizer = new sherpa_onnx.OfflineRecognizer(config); | 25 | const recognizer = new sherpa_onnx.OfflineRecognizer(config); |
| 28 | console.log('Started') | 26 | console.log('Started') |
| 29 | -let start = performance.now(); | 27 | +let start = Date.now(); |
| 30 | const stream = recognizer.createStream(); | 28 | const stream = recognizer.createStream(); |
| 31 | const wave = sherpa_onnx.readWave(waveFilename); | 29 | const wave = sherpa_onnx.readWave(waveFilename); |
| 32 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); | 30 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); |
| 33 | 31 | ||
| 34 | recognizer.decode(stream); | 32 | recognizer.decode(stream); |
| 35 | result = recognizer.getResult(stream) | 33 | result = recognizer.getResult(stream) |
| 36 | -let stop = performance.now(); | 34 | +let stop = Date.now(); |
| 37 | console.log('Done') | 35 | console.log('Done') |
| 38 | 36 | ||
| 39 | const elapsed_seconds = (stop - start) / 1000; | 37 | const elapsed_seconds = (stop - start) / 1000; |
| 1 | // Copyright (c) 2024 Xiaomi Corporation | 1 | // Copyright (c) 2024 Xiaomi Corporation |
| 2 | const sherpa_onnx = require('sherpa-onnx-node'); | 2 | const sherpa_onnx = require('sherpa-onnx-node'); |
| 3 | -const performance = require('perf_hooks').performance; | ||
| 4 | - | ||
| 5 | 3 | ||
| 6 | // Please download test files from | 4 | // Please download test files from |
| 7 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models | 5 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models |
| @@ -28,7 +26,7 @@ const waveFilename = | @@ -28,7 +26,7 @@ const waveFilename = | ||
| 28 | 26 | ||
| 29 | const recognizer = new sherpa_onnx.OnlineRecognizer(config); | 27 | const recognizer = new sherpa_onnx.OnlineRecognizer(config); |
| 30 | console.log('Started') | 28 | console.log('Started') |
| 31 | -let start = performance.now(); | 29 | +let start = Date.now(); |
| 32 | const stream = recognizer.createStream(); | 30 | const stream = recognizer.createStream(); |
| 33 | const wave = sherpa_onnx.readWave(waveFilename); | 31 | const wave = sherpa_onnx.readWave(waveFilename); |
| 34 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); | 32 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); |
| @@ -40,7 +38,7 @@ while (recognizer.isReady(stream)) { | @@ -40,7 +38,7 @@ while (recognizer.isReady(stream)) { | ||
| 40 | recognizer.decode(stream); | 38 | recognizer.decode(stream); |
| 41 | } | 39 | } |
| 42 | result = recognizer.getResult(stream) | 40 | result = recognizer.getResult(stream) |
| 43 | -let stop = performance.now(); | 41 | +let stop = Date.now(); |
| 44 | console.log('Done') | 42 | console.log('Done') |
| 45 | 43 | ||
| 46 | const elapsed_seconds = (stop - start) / 1000; | 44 | const elapsed_seconds = (stop - start) / 1000; |
| 1 | // Copyright (c) 2024 Xiaomi Corporation | 1 | // Copyright (c) 2024 Xiaomi Corporation |
| 2 | const sherpa_onnx = require('sherpa-onnx-node'); | 2 | const sherpa_onnx = require('sherpa-onnx-node'); |
| 3 | -const performance = require('perf_hooks').performance; | ||
| 4 | - | ||
| 5 | 3 | ||
| 6 | // Please download test files from | 4 | // Please download test files from |
| 7 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models | 5 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models |
| @@ -31,7 +29,7 @@ const waveFilename = | @@ -31,7 +29,7 @@ const waveFilename = | ||
| 31 | 29 | ||
| 32 | const recognizer = new sherpa_onnx.OnlineRecognizer(config); | 30 | const recognizer = new sherpa_onnx.OnlineRecognizer(config); |
| 33 | console.log('Started') | 31 | console.log('Started') |
| 34 | -let start = performance.now(); | 32 | +let start = Date.now(); |
| 35 | const stream = recognizer.createStream(); | 33 | const stream = recognizer.createStream(); |
| 36 | const wave = sherpa_onnx.readWave(waveFilename); | 34 | const wave = sherpa_onnx.readWave(waveFilename); |
| 37 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); | 35 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); |
| @@ -43,7 +41,7 @@ while (recognizer.isReady(stream)) { | @@ -43,7 +41,7 @@ while (recognizer.isReady(stream)) { | ||
| 43 | recognizer.decode(stream); | 41 | recognizer.decode(stream); |
| 44 | } | 42 | } |
| 45 | result = recognizer.getResult(stream) | 43 | result = recognizer.getResult(stream) |
| 46 | -let stop = performance.now(); | 44 | +let stop = Date.now(); |
| 47 | console.log('Done') | 45 | console.log('Done') |
| 48 | 46 | ||
| 49 | const elapsed_seconds = (stop - start) / 1000; | 47 | const elapsed_seconds = (stop - start) / 1000; |
| 1 | // Copyright (c) 2024 Xiaomi Corporation | 1 | // Copyright (c) 2024 Xiaomi Corporation |
| 2 | const sherpa_onnx = require('sherpa-onnx-node'); | 2 | const sherpa_onnx = require('sherpa-onnx-node'); |
| 3 | -const performance = require('perf_hooks').performance; | ||
| 4 | - | ||
| 5 | 3 | ||
| 6 | // Please download test files from | 4 | // Please download test files from |
| 7 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models | 5 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models |
| @@ -29,7 +27,7 @@ const waveFilename = | @@ -29,7 +27,7 @@ const waveFilename = | ||
| 29 | 27 | ||
| 30 | const recognizer = new sherpa_onnx.OnlineRecognizer(config); | 28 | const recognizer = new sherpa_onnx.OnlineRecognizer(config); |
| 31 | console.log('Started') | 29 | console.log('Started') |
| 32 | -let start = performance.now(); | 30 | +let start = Date.now(); |
| 33 | const stream = recognizer.createStream(); | 31 | const stream = recognizer.createStream(); |
| 34 | const wave = sherpa_onnx.readWave(waveFilename); | 32 | const wave = sherpa_onnx.readWave(waveFilename); |
| 35 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); | 33 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); |
| @@ -41,7 +39,7 @@ while (recognizer.isReady(stream)) { | @@ -41,7 +39,7 @@ while (recognizer.isReady(stream)) { | ||
| 41 | recognizer.decode(stream); | 39 | recognizer.decode(stream); |
| 42 | } | 40 | } |
| 43 | result = recognizer.getResult(stream) | 41 | result = recognizer.getResult(stream) |
| 44 | -let stop = performance.now(); | 42 | +let stop = Date.now(); |
| 45 | console.log('Done') | 43 | console.log('Done') |
| 46 | 44 | ||
| 47 | const elapsed_seconds = (stop - start) / 1000; | 45 | const elapsed_seconds = (stop - start) / 1000; |
| 1 | // Copyright (c) 2024 Xiaomi Corporation | 1 | // Copyright (c) 2024 Xiaomi Corporation |
| 2 | const sherpa_onnx = require('sherpa-onnx-node'); | 2 | const sherpa_onnx = require('sherpa-onnx-node'); |
| 3 | -const performance = require('perf_hooks').performance; | ||
| 4 | - | ||
| 5 | 3 | ||
| 6 | // Please download test files from | 4 | // Please download test files from |
| 7 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models | 5 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/asr-models |
| @@ -32,7 +30,7 @@ const waveFilename = | @@ -32,7 +30,7 @@ const waveFilename = | ||
| 32 | 30 | ||
| 33 | const recognizer = new sherpa_onnx.OnlineRecognizer(config); | 31 | const recognizer = new sherpa_onnx.OnlineRecognizer(config); |
| 34 | console.log('Started') | 32 | console.log('Started') |
| 35 | -let start = performance.now(); | 33 | +let start = Date.now(); |
| 36 | const stream = recognizer.createStream(); | 34 | const stream = recognizer.createStream(); |
| 37 | const wave = sherpa_onnx.readWave(waveFilename); | 35 | const wave = sherpa_onnx.readWave(waveFilename); |
| 38 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); | 36 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); |
| @@ -44,7 +42,7 @@ while (recognizer.isReady(stream)) { | @@ -44,7 +42,7 @@ while (recognizer.isReady(stream)) { | ||
| 44 | recognizer.decode(stream); | 42 | recognizer.decode(stream); |
| 45 | } | 43 | } |
| 46 | result = recognizer.getResult(stream) | 44 | result = recognizer.getResult(stream) |
| 47 | -let stop = performance.now(); | 45 | +let stop = Date.now(); |
| 48 | console.log('Done') | 46 | console.log('Done') |
| 49 | 47 | ||
| 50 | const elapsed_seconds = (stop - start) / 1000; | 48 | const elapsed_seconds = (stop - start) / 1000; |
| @@ -38,12 +38,12 @@ const testWaves = [ | @@ -38,12 +38,12 @@ const testWaves = [ | ||
| 38 | console.log('------'); | 38 | console.log('------'); |
| 39 | 39 | ||
| 40 | for (let filename of testWaves) { | 40 | for (let filename of testWaves) { |
| 41 | - const start = performance.now(); | 41 | + const start = Date.now(); |
| 42 | const stream = at.createStream(); | 42 | const stream = at.createStream(); |
| 43 | const wave = sherpa_onnx.readWave(filename); | 43 | const wave = sherpa_onnx.readWave(filename); |
| 44 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); | 44 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); |
| 45 | const events = at.compute(stream); | 45 | const events = at.compute(stream); |
| 46 | - const stop = performance.now(); | 46 | + const stop = Date.now(); |
| 47 | 47 | ||
| 48 | const elapsed_seconds = (stop - start) / 1000; | 48 | const elapsed_seconds = (stop - start) / 1000; |
| 49 | const duration = wave.samples.length / wave.sampleRate; | 49 | const duration = wave.samples.length / wave.sampleRate; |
| @@ -41,12 +41,12 @@ const testWaves = [ | @@ -41,12 +41,12 @@ const testWaves = [ | ||
| 41 | console.log('------'); | 41 | console.log('------'); |
| 42 | 42 | ||
| 43 | for (let filename of testWaves) { | 43 | for (let filename of testWaves) { |
| 44 | - const start = performance.now(); | 44 | + const start = Date.now(); |
| 45 | const stream = at.createStream(); | 45 | const stream = at.createStream(); |
| 46 | const wave = sherpa_onnx.readWave(filename); | 46 | const wave = sherpa_onnx.readWave(filename); |
| 47 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); | 47 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); |
| 48 | const events = at.compute(stream); | 48 | const events = at.compute(stream); |
| 49 | - const stop = performance.now(); | 49 | + const stop = Date.now(); |
| 50 | 50 | ||
| 51 | const elapsed_seconds = (stop - start) / 1000; | 51 | const elapsed_seconds = (stop - start) / 1000; |
| 52 | const duration = wave.samples.length / wave.sampleRate; | 52 | const duration = wave.samples.length / wave.sampleRate; |
| 1 | // Copyright (c) 2024 Xiaomi Corporation | 1 | // Copyright (c) 2024 Xiaomi Corporation |
| 2 | const sherpa_onnx = require('sherpa-onnx-node'); | 2 | const sherpa_onnx = require('sherpa-onnx-node'); |
| 3 | -const performance = require('perf_hooks').performance; | ||
| 4 | - | ||
| 5 | 3 | ||
| 6 | // Please download test files from | 4 | // Please download test files from |
| 7 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/kws-models | 5 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/kws-models |
| @@ -34,7 +32,7 @@ const waveFilename = | @@ -34,7 +32,7 @@ const waveFilename = | ||
| 34 | 32 | ||
| 35 | const kws = new sherpa_onnx.KeywordSpotter(config); | 33 | const kws = new sherpa_onnx.KeywordSpotter(config); |
| 36 | console.log('Started') | 34 | console.log('Started') |
| 37 | -let start = performance.now(); | 35 | +let start = Date.now(); |
| 38 | const stream = kws.createStream(); | 36 | const stream = kws.createStream(); |
| 39 | const wave = sherpa_onnx.readWave(waveFilename); | 37 | const wave = sherpa_onnx.readWave(waveFilename); |
| 40 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); | 38 | stream.acceptWaveform({sampleRate: wave.sampleRate, samples: wave.samples}); |
| @@ -50,7 +48,7 @@ while (kws.isReady(stream)) { | @@ -50,7 +48,7 @@ while (kws.isReady(stream)) { | ||
| 50 | } | 48 | } |
| 51 | kws.decode(stream); | 49 | kws.decode(stream); |
| 52 | } | 50 | } |
| 53 | -let stop = performance.now(); | 51 | +let stop = Date.now(); |
| 54 | 52 | ||
| 55 | console.log('Done') | 53 | console.log('Done') |
| 56 | 54 |
| 1 | // Copyright (c) 2024 Xiaomi Corporation | 1 | // Copyright (c) 2024 Xiaomi Corporation |
| 2 | const sherpa_onnx = require('sherpa-onnx-node'); | 2 | const sherpa_onnx = require('sherpa-onnx-node'); |
| 3 | -const performance = require('perf_hooks').performance; | ||
| 4 | 3 | ||
| 5 | // please download model files from | 4 | // please download model files from |
| 6 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/tts-models | 5 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/tts-models |
| @@ -24,9 +23,9 @@ const tts = createOfflineTts(); | @@ -24,9 +23,9 @@ const tts = createOfflineTts(); | ||
| 24 | 23 | ||
| 25 | const text = 'Alles hat ein Ende, nur die Wurst hat zwei.' | 24 | const text = 'Alles hat ein Ende, nur die Wurst hat zwei.' |
| 26 | 25 | ||
| 27 | -let start = performance.now(); | 26 | +let start = Date.now(); |
| 28 | const audio = tts.generate({text: text, sid: 0, speed: 1.0}); | 27 | const audio = tts.generate({text: text, sid: 0, speed: 1.0}); |
| 29 | -let stop = performance.now(); | 28 | +let stop = Date.now(); |
| 30 | const elapsed_seconds = (stop - start) / 1000; | 29 | const elapsed_seconds = (stop - start) / 1000; |
| 31 | const duration = audio.samples.length / audio.sampleRate; | 30 | const duration = audio.samples.length / audio.sampleRate; |
| 32 | const real_time_factor = elapsed_seconds / duration; | 31 | const real_time_factor = elapsed_seconds / duration; |
| 1 | // Copyright (c) 2024 Xiaomi Corporation | 1 | // Copyright (c) 2024 Xiaomi Corporation |
| 2 | const sherpa_onnx = require('sherpa-onnx-node'); | 2 | const sherpa_onnx = require('sherpa-onnx-node'); |
| 3 | -const performance = require('perf_hooks').performance; | ||
| 4 | 3 | ||
| 5 | // please download model files from | 4 | // please download model files from |
| 6 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/tts-models | 5 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/tts-models |
| @@ -27,9 +26,9 @@ const text = | @@ -27,9 +26,9 @@ const text = | ||
| 27 | 'Today as always, men fall into two groups: slaves and free men. Whoever does not have two-thirds of his day for himself, is a slave, whatever he may be: a statesman, a businessman, an official, or a scholar.' | 26 | 'Today as always, men fall into two groups: slaves and free men. Whoever does not have two-thirds of his day for himself, is a slave, whatever he may be: a statesman, a businessman, an official, or a scholar.' |
| 28 | 27 | ||
| 29 | 28 | ||
| 30 | -let start = performance.now(); | 29 | +let start = Date.now(); |
| 31 | const audio = tts.generate({text: text, sid: 0, speed: 1.0}); | 30 | const audio = tts.generate({text: text, sid: 0, speed: 1.0}); |
| 32 | -let stop = performance.now(); | 31 | +let stop = Date.now(); |
| 33 | const elapsed_seconds = (stop - start) / 1000; | 32 | const elapsed_seconds = (stop - start) / 1000; |
| 34 | const duration = audio.samples.length / audio.sampleRate; | 33 | const duration = audio.samples.length / audio.sampleRate; |
| 35 | const real_time_factor = elapsed_seconds / duration; | 34 | const real_time_factor = elapsed_seconds / duration; |
| 1 | // Copyright (c) 2024 Xiaomi Corporation | 1 | // Copyright (c) 2024 Xiaomi Corporation |
| 2 | const sherpa_onnx = require('sherpa-onnx-node'); | 2 | const sherpa_onnx = require('sherpa-onnx-node'); |
| 3 | -const performance = require('perf_hooks').performance; | ||
| 4 | 3 | ||
| 5 | // please download model files from | 4 | // please download model files from |
| 6 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/tts-models | 5 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/tts-models |
| @@ -29,9 +28,9 @@ const tts = createOfflineTts(); | @@ -29,9 +28,9 @@ const tts = createOfflineTts(); | ||
| 29 | const text = | 28 | const text = |
| 30 | '他在长沙出生,长白山长大,去过长江,现在他是一个银行的行长,主管行政工作。有困难,请拨110,或者13020240513。今天是2024年5月13号, 他上个月的工资是12345块钱。' | 29 | '他在长沙出生,长白山长大,去过长江,现在他是一个银行的行长,主管行政工作。有困难,请拨110,或者13020240513。今天是2024年5月13号, 他上个月的工资是12345块钱。' |
| 31 | 30 | ||
| 32 | -let start = performance.now(); | 31 | +let start = Date.now(); |
| 33 | const audio = tts.generate({text: text, sid: 88, speed: 1.0}); | 32 | const audio = tts.generate({text: text, sid: 88, speed: 1.0}); |
| 34 | -let stop = performance.now(); | 33 | +let stop = Date.now(); |
| 35 | const elapsed_seconds = (stop - start) / 1000; | 34 | const elapsed_seconds = (stop - start) / 1000; |
| 36 | const duration = audio.samples.length / audio.sampleRate; | 35 | const duration = audio.samples.length / audio.sampleRate; |
| 37 | const real_time_factor = elapsed_seconds / duration; | 36 | const real_time_factor = elapsed_seconds / duration; |
| 1 | // Copyright (c) 2024 Xiaomi Corporation | 1 | // Copyright (c) 2024 Xiaomi Corporation |
| 2 | const sherpa_onnx = require('sherpa-onnx-node'); | 2 | const sherpa_onnx = require('sherpa-onnx-node'); |
| 3 | -const performance = require('perf_hooks').performance; | ||
| 4 | 3 | ||
| 5 | // please download model files from | 4 | // please download model files from |
| 6 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/tts-models | 5 | // https://github.com/k2-fsa/sherpa-onnx/releases/tag/tts-models |
| @@ -29,9 +28,9 @@ const tts = createOfflineTts(); | @@ -29,9 +28,9 @@ const tts = createOfflineTts(); | ||
| 29 | const text = | 28 | const text = |
| 30 | '当夜幕降临,星光点点,伴随着微风拂面,我在静谧中感受着时光的流转,思念如涟漪荡漾,梦境如画卷展开,我与自然融为一体,沉静在这片宁静的美丽之中,感受着生命的奇迹与温柔。2024年5月13号,拨打110或者18920240513。123456块钱。' | 29 | '当夜幕降临,星光点点,伴随着微风拂面,我在静谧中感受着时光的流转,思念如涟漪荡漾,梦境如画卷展开,我与自然融为一体,沉静在这片宁静的美丽之中,感受着生命的奇迹与温柔。2024年5月13号,拨打110或者18920240513。123456块钱。' |
| 31 | 30 | ||
| 32 | -let start = performance.now(); | 31 | +let start = Date.now(); |
| 33 | const audio = tts.generate({text: text, sid: 2, speed: 1.0}); | 32 | const audio = tts.generate({text: text, sid: 2, speed: 1.0}); |
| 34 | -let stop = performance.now(); | 33 | +let stop = Date.now(); |
| 35 | const elapsed_seconds = (stop - start) / 1000; | 34 | const elapsed_seconds = (stop - start) / 1000; |
| 36 | const duration = audio.samples.length / audio.sampleRate; | 35 | const duration = audio.samples.length / audio.sampleRate; |
| 37 | const real_time_factor = elapsed_seconds / duration; | 36 | const real_time_factor = elapsed_seconds / duration; |
-
请 注册 或 登录 后发表评论