Wei Kang
Committed by GitHub

format to linux file (\r\n -> \n) (#320)

ENTRY_POINT = ./
LIB_SRC_DIR := ../sherpa-onnx/java-api/src/com/k2fsa/sherpa/onnx
LIB_FILES = \
$(LIB_SRC_DIR)/EndpointRule.java \
$(LIB_SRC_DIR)/EndpointConfig.java \
$(LIB_SRC_DIR)/FeatureConfig.java \
$(LIB_SRC_DIR)/OnlineLMConfig.java \
$(LIB_SRC_DIR)/OnlineTransducerModelConfig.java \
$(LIB_SRC_DIR)/OnlineParaformerModelConfig.java \
$(LIB_SRC_DIR)/OnlineModelConfig.java \
$(LIB_SRC_DIR)/OnlineRecognizerConfig.java \
$(LIB_SRC_DIR)/OnlineStream.java \
$(LIB_SRC_DIR)/OnlineRecognizer.java \
WEBSOCKET_DIR:= ./src/websocketsrv
WEBSOCKET_FILES = \
$(WEBSOCKET_DIR)/ConnectionData.java \
$(WEBSOCKET_DIR)/DecoderThreadHandler.java \
$(WEBSOCKET_DIR)/StreamThreadHandler.java \
$(WEBSOCKET_DIR)/AsrWebsocketServer.java \
$(WEBSOCKET_DIR)/AsrWebsocketClient.java \
LIB_BUILD_DIR = ./lib
EXAMPLE_FILE = DecodeFile.java
EXAMPLE_Mic = DecodeMic.java
JAVAC = javac
BUILD_DIR = build
RUNJFLAGS = -Dfile.encoding=utf-8
vpath %.class $(BUILD_DIR)
vpath %.java src
buildfile:
$(JAVAC) -cp lib/sherpaonnx.jar -d $(BUILD_DIR) -encoding UTF-8 src/$(EXAMPLE_FILE)
buildmic:
$(JAVAC) -cp lib/sherpaonnx.jar -d $(BUILD_DIR) -encoding UTF-8 src/$(EXAMPLE_Mic)
rebuild: clean all
.PHONY: clean run downjar
downjar:
wget https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.jar -P ./lib/
wget https://repo1.maven.org/maven2/org/slf4j/slf4j-simple/1.7.25/slf4j-simple-1.7.25.jar -P ./lib/
wget https://github.com/TooTallNate/Java-WebSocket/releases/download/v1.5.3/Java-WebSocket-1.5.3.jar -P ./lib/
clean:
rm -frv $(BUILD_DIR)/*
rm -frv $(LIB_BUILD_DIR)/*
mkdir -p $(BUILD_DIR)
mkdir -p ./lib
runfile:
java -cp ./lib/sherpaonnx.jar:build $(RUNJFLAGS) DecodeFile
runmic:
java -cp ./lib/sherpaonnx.jar:build $(RUNJFLAGS) DecodeMic
runsrv:
java -cp $(BUILD_DIR):lib/Java-WebSocket-1.5.3.jar:lib/slf4j-simple-1.7.25.jar:lib/slf4j-api-1.7.25.jar:../lib/sherpaonnx.jar $(RUNJFLAGS) websocketsrv.AsrWebsocketServer ../build/lib/libsherpa-onnx-jni.so ./modeltest.cfg
runclient:
java -cp $(BUILD_DIR):lib/Java-WebSocket-1.5.3.jar:lib/slf4j-simple-1.7.25.jar:lib/slf4j-api-1.7.25.jar:../lib/sherpaonnx.jar $(RUNJFLAGS) websocketsrv.AsrWebsocketClient ../build/lib/libsherpa-onnx-jni.so 127.0.0.1 8890 ./test.wav 32
buildlib: $(LIB_FILES:.java=.class)
%.class: %.java
$(JAVAC) -cp $(BUILD_DIR) -d $(BUILD_DIR) -encoding UTF-8 $<
buildwebsocket: $(WEBSOCKET_FILES:.java=.class)
%.class: %.java
$(JAVAC) -cp $(BUILD_DIR):lib/slf4j-simple-1.7.25.jar:lib/slf4j-api-1.7.25.jar:lib/Java-WebSocket-1.5.3.jar:../lib/sherpaonnx.jar -d $(BUILD_DIR) -encoding UTF-8 $<
packjar:
jar cvfe lib/sherpaonnx.jar . -C $(BUILD_DIR) .
all: clean buildlib packjar buildfile buildmic downjar buildwebsocket
ENTRY_POINT = ./
LIB_SRC_DIR := ../sherpa-onnx/java-api/src/com/k2fsa/sherpa/onnx
LIB_FILES = \
$(LIB_SRC_DIR)/EndpointRule.java \
$(LIB_SRC_DIR)/EndpointConfig.java \
$(LIB_SRC_DIR)/FeatureConfig.java \
$(LIB_SRC_DIR)/OnlineLMConfig.java \
$(LIB_SRC_DIR)/OnlineTransducerModelConfig.java \
$(LIB_SRC_DIR)/OnlineParaformerModelConfig.java \
$(LIB_SRC_DIR)/OnlineModelConfig.java \
$(LIB_SRC_DIR)/OnlineRecognizerConfig.java \
$(LIB_SRC_DIR)/OnlineStream.java \
$(LIB_SRC_DIR)/OnlineRecognizer.java \
WEBSOCKET_DIR:= ./src/websocketsrv
WEBSOCKET_FILES = \
$(WEBSOCKET_DIR)/ConnectionData.java \
$(WEBSOCKET_DIR)/DecoderThreadHandler.java \
$(WEBSOCKET_DIR)/StreamThreadHandler.java \
$(WEBSOCKET_DIR)/AsrWebsocketServer.java \
$(WEBSOCKET_DIR)/AsrWebsocketClient.java \
LIB_BUILD_DIR = ./lib
EXAMPLE_FILE = DecodeFile.java
EXAMPLE_Mic = DecodeMic.java
JAVAC = javac
BUILD_DIR = build
RUNJFLAGS = -Dfile.encoding=utf-8
vpath %.class $(BUILD_DIR)
vpath %.java src
buildfile:
$(JAVAC) -cp lib/sherpaonnx.jar -d $(BUILD_DIR) -encoding UTF-8 src/$(EXAMPLE_FILE)
buildmic:
$(JAVAC) -cp lib/sherpaonnx.jar -d $(BUILD_DIR) -encoding UTF-8 src/$(EXAMPLE_Mic)
rebuild: clean all
.PHONY: clean run downjar
downjar:
wget https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.jar -P ./lib/
wget https://repo1.maven.org/maven2/org/slf4j/slf4j-simple/1.7.25/slf4j-simple-1.7.25.jar -P ./lib/
wget https://github.com/TooTallNate/Java-WebSocket/releases/download/v1.5.3/Java-WebSocket-1.5.3.jar -P ./lib/
clean:
rm -frv $(BUILD_DIR)/*
rm -frv $(LIB_BUILD_DIR)/*
mkdir -p $(BUILD_DIR)
mkdir -p ./lib
runfile:
java -cp ./lib/sherpaonnx.jar:build $(RUNJFLAGS) DecodeFile
runmic:
java -cp ./lib/sherpaonnx.jar:build $(RUNJFLAGS) DecodeMic
runsrv:
java -cp $(BUILD_DIR):lib/Java-WebSocket-1.5.3.jar:lib/slf4j-simple-1.7.25.jar:lib/slf4j-api-1.7.25.jar:../lib/sherpaonnx.jar $(RUNJFLAGS) websocketsrv.AsrWebsocketServer ../build/lib/libsherpa-onnx-jni.so ./modeltest.cfg
runclient:
java -cp $(BUILD_DIR):lib/Java-WebSocket-1.5.3.jar:lib/slf4j-simple-1.7.25.jar:lib/slf4j-api-1.7.25.jar:../lib/sherpaonnx.jar $(RUNJFLAGS) websocketsrv.AsrWebsocketClient ../build/lib/libsherpa-onnx-jni.so 127.0.0.1 8890 ./test.wav 32
buildlib: $(LIB_FILES:.java=.class)
%.class: %.java
$(JAVAC) -cp $(BUILD_DIR) -d $(BUILD_DIR) -encoding UTF-8 $<
buildwebsocket: $(WEBSOCKET_FILES:.java=.class)
%.class: %.java
$(JAVAC) -cp $(BUILD_DIR):lib/slf4j-simple-1.7.25.jar:lib/slf4j-api-1.7.25.jar:lib/Java-WebSocket-1.5.3.jar:../lib/sherpaonnx.jar -d $(BUILD_DIR) -encoding UTF-8 $<
packjar:
jar cvfe lib/sherpaonnx.jar . -C $(BUILD_DIR) .
all: clean buildlib packjar buildfile buildmic downjar buildwebsocket
... ...
#model config
sample_rate=16000
feature_dim=80
rule1_min_trailing_silence=2.4
rule2_min_trailing_silence=1.2
rule3_min_utterance_length=20
encoder=/sherpa/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/encoder-epoch-99-avg-1.onnx
decoder=/sherpa/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/decoder-epoch-99-avg-1.onnx
joiner=/sherpa/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/joiner-epoch-99-avg-1.onnx
tokens=/sherpa/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/tokens.txt
num_threads=4
enable_endpoint_detection=true
decoding_method=modified_beam_search
max_active_paths=4
lm_model=
lm_scale=0.5
model_type=zipformer
#websocket server config
port=8890
connection_thread_num=16
stream_thread_num=16
decoder_thread_num=16
parallel_decoder_num=16
decoder_time_idle=200
deocder_time_out=30000
#model config
sample_rate=16000
feature_dim=80
rule1_min_trailing_silence=2.4
rule2_min_trailing_silence=1.2
rule3_min_utterance_length=20
encoder=/sherpa/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/encoder-epoch-99-avg-1.onnx
decoder=/sherpa/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/decoder-epoch-99-avg-1.onnx
joiner=/sherpa/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/joiner-epoch-99-avg-1.onnx
tokens=/sherpa/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/tokens.txt
num_threads=4
enable_endpoint_detection=true
decoding_method=modified_beam_search
max_active_paths=4
lm_model=
lm_scale=0.5
model_type=zipformer
#websocket server config
port=8890
connection_thread_num=16
stream_thread_num=16
decoder_thread_num=16
parallel_decoder_num=16
decoder_time_idle=200
deocder_time_out=30000
... ...
/*
* // Copyright 2022-2023 by zhaoming
*/
/*
Config modelconfig.cfg
sample_rate=16000
feature_dim=80
rule1_min_trailing_silence=2.4
rule2_min_trailing_silence=1.2
rule3_min_utterance_length=20
encoder=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/encoder-epoch-99-avg-1.onnx
decoder=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/decoder-epoch-99-avg-1.onnx
joiner=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/joiner-epoch-99-avg-1.onnx
tokens=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/tokens.txt
num_threads=4
enable_endpoint_detection=false
decoding_method=greedy_search
max_active_paths=4
*/
import com.k2fsa.sherpa.onnx.OnlineRecognizer;
import com.k2fsa.sherpa.onnx.OnlineStream;
import java.io.*;
import java.nio.charset.StandardCharsets;
public class DecodeFile {
OnlineRecognizer rcgOjb;
OnlineStream streamObj;
String wavfilename;
public DecodeFile(String fileName) {
wavfilename = fileName;
}
public void initModelWithPara() {
try {
String modelDir =
"/sherpa-onnx/build_old/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20";
String encoder = modelDir + "/encoder-epoch-99-avg-1.onnx";
String decoder = modelDir + "/decoder-epoch-99-avg-1.onnx";
String joiner = modelDir + "/joiner-epoch-99-avg-1.onnx";
String tokens = modelDir + "/tokens.txt";
int numThreads = 4;
int sampleRate = 16000;
int featureDim = 80;
boolean enableEndpointDetection = false;
float rule1MinTrailingSilence = 2.4F;
float rule2MinTrailingSilence = 1.2F;
float rule3MinUtteranceLength = 20F;
String decodingMethod = "greedy_search";
int maxActivePaths = 4;
String lm_model = "";
float lm_scale = 0.5F;
String modelType = "zipformer";
rcgOjb =
new OnlineRecognizer(
tokens,
encoder,
decoder,
joiner,
numThreads,
sampleRate,
featureDim,
enableEndpointDetection,
rule1MinTrailingSilence,
rule2MinTrailingSilence,
rule3MinUtteranceLength,
decodingMethod,
lm_model,
lm_scale,
maxActivePaths,
modelType);
streamObj = rcgOjb.createStream();
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
public void initModelWithCfg(String cfgFile) {
try {
// you should set setCfgPath() before running this
rcgOjb = new OnlineRecognizer(cfgFile);
streamObj = rcgOjb.createStream();
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
public void simpleExample() {
try {
float[] buffer = rcgOjb.readWavFile(wavfilename); // read data from file
streamObj.acceptWaveform(buffer); // feed stream with data
streamObj.inputFinished(); // tell engine you done with all data
OnlineStream ssObj[] = new OnlineStream[1];
while (rcgOjb.isReady(streamObj)) { // engine is ready for unprocessed data
ssObj[0] = streamObj;
rcgOjb.decodeStreams(ssObj); // decode for multiple stream
// rcgOjb.DecodeStream(streamObj); // decode for single stream
}
String recText = "simple:" + rcgOjb.getResult(streamObj) + "\n";
byte[] utf8Data = recText.getBytes(StandardCharsets.UTF_8);
System.out.println(new String(utf8Data));
rcgOjb.reSet(streamObj);
rcgOjb.releaseStream(streamObj); // release stream
rcgOjb.release(); // release recognizer
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
public void streamExample() {
try {
float[] buffer = rcgOjb.readWavFile(wavfilename); // read data from file
float[] chunk = new float[1600]; // //each time read 1600(0.1s) data
int chunkIndex = 0;
for (int i = 0; i < buffer.length; i++) // total wav length loop
{
chunk[chunkIndex] = buffer[i];
chunkIndex++;
if (chunkIndex >= 1600 || i == (buffer.length - 1)) {
chunkIndex = 0;
streamObj.acceptWaveform(chunk); // feed chunk
if (rcgOjb.isReady(streamObj)) {
rcgOjb.decodeStream(streamObj);
}
String testDate = rcgOjb.getResult(streamObj);
byte[] utf8Data = testDate.getBytes(StandardCharsets.UTF_8);
if (utf8Data.length > 0) {
System.out.println(Float.valueOf((float) i / 16000) + ":" + new String(utf8Data));
}
}
}
streamObj.inputFinished();
while (rcgOjb.isReady(streamObj)) {
rcgOjb.decodeStream(streamObj);
}
String recText = "stream:" + rcgOjb.getResult(streamObj) + "\n";
byte[] utf8Data = recText.getBytes(StandardCharsets.UTF_8);
System.out.println(new String(utf8Data));
rcgOjb.reSet(streamObj);
rcgOjb.releaseStream(streamObj); // release stream
rcgOjb.release(); // release recognizer
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
public static void main(String[] args) {
try {
String appDir = System.getProperty("user.dir");
System.out.println("appdir=" + appDir);
String fileName = appDir + "/test.wav";
String cfgPath = appDir + "/modeltest.cfg";
String soPath = appDir + "/../build/lib/libsherpa-onnx-jni.so";
OnlineRecognizer.setSoPath(soPath);
DecodeFile rcgDemo = new DecodeFile(fileName);
// ***************** */
rcgDemo.initModelWithCfg(cfgPath);
rcgDemo.streamExample();
// **************** */
rcgDemo.initModelWithCfg(cfgPath);
rcgDemo.simpleExample();
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
}
/*
* // Copyright 2022-2023 by zhaoming
*/
/*
Config modelconfig.cfg
sample_rate=16000
feature_dim=80
rule1_min_trailing_silence=2.4
rule2_min_trailing_silence=1.2
rule3_min_utterance_length=20
encoder=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/encoder-epoch-99-avg-1.onnx
decoder=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/decoder-epoch-99-avg-1.onnx
joiner=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/joiner-epoch-99-avg-1.onnx
tokens=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/tokens.txt
num_threads=4
enable_endpoint_detection=false
decoding_method=greedy_search
max_active_paths=4
*/
import com.k2fsa.sherpa.onnx.OnlineRecognizer;
import com.k2fsa.sherpa.onnx.OnlineStream;
import java.io.*;
import java.nio.charset.StandardCharsets;
public class DecodeFile {
OnlineRecognizer rcgOjb;
OnlineStream streamObj;
String wavfilename;
public DecodeFile(String fileName) {
wavfilename = fileName;
}
public void initModelWithPara() {
try {
String modelDir =
"/sherpa-onnx/build_old/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20";
String encoder = modelDir + "/encoder-epoch-99-avg-1.onnx";
String decoder = modelDir + "/decoder-epoch-99-avg-1.onnx";
String joiner = modelDir + "/joiner-epoch-99-avg-1.onnx";
String tokens = modelDir + "/tokens.txt";
int numThreads = 4;
int sampleRate = 16000;
int featureDim = 80;
boolean enableEndpointDetection = false;
float rule1MinTrailingSilence = 2.4F;
float rule2MinTrailingSilence = 1.2F;
float rule3MinUtteranceLength = 20F;
String decodingMethod = "greedy_search";
int maxActivePaths = 4;
String lm_model = "";
float lm_scale = 0.5F;
String modelType = "zipformer";
rcgOjb =
new OnlineRecognizer(
tokens,
encoder,
decoder,
joiner,
numThreads,
sampleRate,
featureDim,
enableEndpointDetection,
rule1MinTrailingSilence,
rule2MinTrailingSilence,
rule3MinUtteranceLength,
decodingMethod,
lm_model,
lm_scale,
maxActivePaths,
modelType);
streamObj = rcgOjb.createStream();
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
public void initModelWithCfg(String cfgFile) {
try {
// you should set setCfgPath() before running this
rcgOjb = new OnlineRecognizer(cfgFile);
streamObj = rcgOjb.createStream();
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
public void simpleExample() {
try {
float[] buffer = rcgOjb.readWavFile(wavfilename); // read data from file
streamObj.acceptWaveform(buffer); // feed stream with data
streamObj.inputFinished(); // tell engine you done with all data
OnlineStream ssObj[] = new OnlineStream[1];
while (rcgOjb.isReady(streamObj)) { // engine is ready for unprocessed data
ssObj[0] = streamObj;
rcgOjb.decodeStreams(ssObj); // decode for multiple stream
// rcgOjb.DecodeStream(streamObj); // decode for single stream
}
String recText = "simple:" + rcgOjb.getResult(streamObj) + "\n";
byte[] utf8Data = recText.getBytes(StandardCharsets.UTF_8);
System.out.println(new String(utf8Data));
rcgOjb.reSet(streamObj);
rcgOjb.releaseStream(streamObj); // release stream
rcgOjb.release(); // release recognizer
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
public void streamExample() {
try {
float[] buffer = rcgOjb.readWavFile(wavfilename); // read data from file
float[] chunk = new float[1600]; // //each time read 1600(0.1s) data
int chunkIndex = 0;
for (int i = 0; i < buffer.length; i++) // total wav length loop
{
chunk[chunkIndex] = buffer[i];
chunkIndex++;
if (chunkIndex >= 1600 || i == (buffer.length - 1)) {
chunkIndex = 0;
streamObj.acceptWaveform(chunk); // feed chunk
if (rcgOjb.isReady(streamObj)) {
rcgOjb.decodeStream(streamObj);
}
String testDate = rcgOjb.getResult(streamObj);
byte[] utf8Data = testDate.getBytes(StandardCharsets.UTF_8);
if (utf8Data.length > 0) {
System.out.println(Float.valueOf((float) i / 16000) + ":" + new String(utf8Data));
}
}
}
streamObj.inputFinished();
while (rcgOjb.isReady(streamObj)) {
rcgOjb.decodeStream(streamObj);
}
String recText = "stream:" + rcgOjb.getResult(streamObj) + "\n";
byte[] utf8Data = recText.getBytes(StandardCharsets.UTF_8);
System.out.println(new String(utf8Data));
rcgOjb.reSet(streamObj);
rcgOjb.releaseStream(streamObj); // release stream
rcgOjb.release(); // release recognizer
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
public static void main(String[] args) {
try {
String appDir = System.getProperty("user.dir");
System.out.println("appdir=" + appDir);
String fileName = appDir + "/test.wav";
String cfgPath = appDir + "/modeltest.cfg";
String soPath = appDir + "/../build/lib/libsherpa-onnx-jni.so";
OnlineRecognizer.setSoPath(soPath);
DecodeFile rcgDemo = new DecodeFile(fileName);
// ***************** */
rcgDemo.initModelWithCfg(cfgPath);
rcgDemo.streamExample();
// **************** */
rcgDemo.initModelWithCfg(cfgPath);
rcgDemo.simpleExample();
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
}
... ...
/*
* // Copyright 2022-2023 by zhaoming
*/
/*
Real-time speech recognition from a microphone with com.k2fsa.sherpa.onnx Java API
example for cfgFile modelconfig.cfg
sample_rate=16000
feature_dim=80
rule1_min_trailing_silence=2.4
rule2_min_trailing_silence=1.2
rule3_min_utterance_length=20
encoder=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/encoder-epoch-99-avg-1.onnx
decoder=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/decoder-epoch-99-avg-1.onnx
joiner=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/joiner-epoch-99-avg-1.onnx
tokens=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/tokens.txt
num_threads=4
enable_endpoint_detection=true
decoding_method=greedy_search
max_active_paths=4
*/
import com.k2fsa.sherpa.onnx.OnlineRecognizer;
import com.k2fsa.sherpa.onnx.OnlineStream;
import java.io.*;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import java.nio.charset.StandardCharsets;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.TargetDataLine;
/** Microphone Example */
public class DecodeMic {
MicRcgThread micRcgThread = null; // thread handle
OnlineRecognizer rcgOjb; // the recognizer
OnlineStream streamObj; // the stream
public DecodeMic() {
micRcgThread = new MicRcgThread(); // create a new instance for MicRcgThread
}
public void open() {
micRcgThread.start(); // start to capture microphone data
}
public void close() {
micRcgThread.stop(); // close capture
}
/** init asr engine with config file */
public void initModelWithCfg(String cfgFile) {
try {
// set setSoPath() before running this
rcgOjb = new OnlineRecognizer(cfgFile);
streamObj = rcgOjb.createStream(); // create a stream for asr engine to feed data
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
/** read data from mic and feed to asr engine */
class MicRcgThread implements Runnable {
TargetDataLine capline; // line for capture mic data
Thread thread; // this thread
int segmentId = 0; // record the segment id when detect endpoint
String preText = ""; // decoded text
public MicRcgThread() {}
public void start() {
thread = new Thread(this);
thread.start(); // start thread
}
public void stop() {
capline.stop();
capline.close();
capline = null;
thread = null;
}
/** feed captured microphone data to asr */
public void decodeSample(byte[] samplebytes) {
try {
ByteBuffer byteBuf = ByteBuffer.wrap(samplebytes); // create a bytebuf for samples
byteBuf.order(ByteOrder.LITTLE_ENDIAN); // set bytebuf to little endian
ShortBuffer shortBuf = byteBuf.asShortBuffer(); // covert to short type
short[] arrShort = new short[shortBuf.capacity()]; // array for copy short data
float[] arrFloat = new float[shortBuf.capacity()]; // array for copy float data
shortBuf.get(arrShort); // put date to arrShort
for (int i = 0; i < arrShort.length; i++) {
arrFloat[i] = arrShort[i] / 32768f; // loop to covert short data to float -1 to 1
}
streamObj.acceptWaveform(arrFloat); // feed asr engine with float data
while (rcgOjb.isReady(streamObj)) { // if engine is ready for unprocessed data
rcgOjb.decodeStream(streamObj); // decode for this stream
}
boolean isEndpoint =
rcgOjb.isEndpoint(
streamObj); // endpoint check, make sure enable_endpoint_detection=true in config
// file
String nowText = rcgOjb.getResult(streamObj); // get asr result
String recText = "";
byte[] utf8Data; // for covert text to utf8
if (isEndpoint && nowText.length() > 0) {
rcgOjb.reSet(streamObj); // reSet stream when detect endpoint
segmentId++;
preText = nowText;
recText = "text(seg_" + String.valueOf(segmentId) + "):" + nowText + "\n";
utf8Data = recText.getBytes(StandardCharsets.UTF_8);
System.out.println(new String(utf8Data));
}
if (!nowText.equals(preText)) { // if preText not equal nowtext
preText = nowText;
recText = nowText + "\n";
utf8Data = recText.getBytes(StandardCharsets.UTF_8);
System.out.println(new String(utf8Data));
}
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
/** run mic capture thread */
public void run() {
System.out.println("Started! Please speak...");
AudioFormat.Encoding encoding = AudioFormat.Encoding.PCM_SIGNED; // the pcm format
float rate = 16000.0f; // using 16 kHz
int channels = 1; // single channel
int sampleSize = 16; // sampleSize 16bit
boolean isBigEndian = false; // using little endian
AudioFormat format =
new AudioFormat(
encoding, rate, sampleSize, channels, (sampleSize / 8) * channels, rate, isBigEndian);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
// check system support such data format
if (!AudioSystem.isLineSupported(info)) {
System.out.println(info + " not supported.");
return;
}
// open a line for capture.
try {
capline = (TargetDataLine) AudioSystem.getLine(info);
capline.open(format, capline.getBufferSize());
} catch (Exception ex) {
System.out.println(ex);
return;
}
// the buf size for mic captured each time
int bufferLengthInBytes = capline.getBufferSize() / 8 * format.getFrameSize();
byte[] micData = new byte[bufferLengthInBytes];
int numBytesRead;
capline.start(); // start to capture mic data
while (thread != null) {
// read data from line
if ((numBytesRead = capline.read(micData, 0, bufferLengthInBytes)) == -1) {
break;
}
decodeSample(micData); // decode mic data
}
// stop and close
try {
if (capline != null) {
capline.stop();
capline.close();
capline = null;
}
} catch (Exception ex) {
System.err.println(ex);
}
}
} // End class DecodeMic
public static void main(String s[]) {
try {
String appDir = System.getProperty("user.dir");
System.out.println("appdir=" + appDir);
String cfgPath = appDir + "/modelconfig.cfg";
String soPath = appDir + "/../build/lib/libsherpa-onnx-jni.so";
OnlineRecognizer.setSoPath(soPath); // set so. lib for OnlineRecognizer
DecodeMic decodeEx = new DecodeMic();
decodeEx.initModelWithCfg(cfgPath); // init asr engine
decodeEx.open(); // open thread for mic
System.out.print("Press Enter to EXIT!\n");
char i = (char) System.in.read();
decodeEx.close();
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
}
/*
* // Copyright 2022-2023 by zhaoming
*/
/*
Real-time speech recognition from a microphone with com.k2fsa.sherpa.onnx Java API
example for cfgFile modelconfig.cfg
sample_rate=16000
feature_dim=80
rule1_min_trailing_silence=2.4
rule2_min_trailing_silence=1.2
rule3_min_utterance_length=20
encoder=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/encoder-epoch-99-avg-1.onnx
decoder=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/decoder-epoch-99-avg-1.onnx
joiner=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/joiner-epoch-99-avg-1.onnx
tokens=/sherpa-onnx/build/bin/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20/tokens.txt
num_threads=4
enable_endpoint_detection=true
decoding_method=greedy_search
max_active_paths=4
*/
import com.k2fsa.sherpa.onnx.OnlineRecognizer;
import com.k2fsa.sherpa.onnx.OnlineStream;
import java.io.*;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import java.nio.charset.StandardCharsets;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.TargetDataLine;
/** Microphone Example */
public class DecodeMic {
MicRcgThread micRcgThread = null; // thread handle
OnlineRecognizer rcgOjb; // the recognizer
OnlineStream streamObj; // the stream
public DecodeMic() {
micRcgThread = new MicRcgThread(); // create a new instance for MicRcgThread
}
public void open() {
micRcgThread.start(); // start to capture microphone data
}
public void close() {
micRcgThread.stop(); // close capture
}
/** init asr engine with config file */
public void initModelWithCfg(String cfgFile) {
try {
// set setSoPath() before running this
rcgOjb = new OnlineRecognizer(cfgFile);
streamObj = rcgOjb.createStream(); // create a stream for asr engine to feed data
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
/** read data from mic and feed to asr engine */
class MicRcgThread implements Runnable {
TargetDataLine capline; // line for capture mic data
Thread thread; // this thread
int segmentId = 0; // record the segment id when detect endpoint
String preText = ""; // decoded text
public MicRcgThread() {}
public void start() {
thread = new Thread(this);
thread.start(); // start thread
}
public void stop() {
capline.stop();
capline.close();
capline = null;
thread = null;
}
/** feed captured microphone data to asr */
public void decodeSample(byte[] samplebytes) {
try {
ByteBuffer byteBuf = ByteBuffer.wrap(samplebytes); // create a bytebuf for samples
byteBuf.order(ByteOrder.LITTLE_ENDIAN); // set bytebuf to little endian
ShortBuffer shortBuf = byteBuf.asShortBuffer(); // covert to short type
short[] arrShort = new short[shortBuf.capacity()]; // array for copy short data
float[] arrFloat = new float[shortBuf.capacity()]; // array for copy float data
shortBuf.get(arrShort); // put date to arrShort
for (int i = 0; i < arrShort.length; i++) {
arrFloat[i] = arrShort[i] / 32768f; // loop to covert short data to float -1 to 1
}
streamObj.acceptWaveform(arrFloat); // feed asr engine with float data
while (rcgOjb.isReady(streamObj)) { // if engine is ready for unprocessed data
rcgOjb.decodeStream(streamObj); // decode for this stream
}
boolean isEndpoint =
rcgOjb.isEndpoint(
streamObj); // endpoint check, make sure enable_endpoint_detection=true in config
// file
String nowText = rcgOjb.getResult(streamObj); // get asr result
String recText = "";
byte[] utf8Data; // for covert text to utf8
if (isEndpoint && nowText.length() > 0) {
rcgOjb.reSet(streamObj); // reSet stream when detect endpoint
segmentId++;
preText = nowText;
recText = "text(seg_" + String.valueOf(segmentId) + "):" + nowText + "\n";
utf8Data = recText.getBytes(StandardCharsets.UTF_8);
System.out.println(new String(utf8Data));
}
if (!nowText.equals(preText)) { // if preText not equal nowtext
preText = nowText;
recText = nowText + "\n";
utf8Data = recText.getBytes(StandardCharsets.UTF_8);
System.out.println(new String(utf8Data));
}
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
/** run mic capture thread */
public void run() {
System.out.println("Started! Please speak...");
AudioFormat.Encoding encoding = AudioFormat.Encoding.PCM_SIGNED; // the pcm format
float rate = 16000.0f; // using 16 kHz
int channels = 1; // single channel
int sampleSize = 16; // sampleSize 16bit
boolean isBigEndian = false; // using little endian
AudioFormat format =
new AudioFormat(
encoding, rate, sampleSize, channels, (sampleSize / 8) * channels, rate, isBigEndian);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
// check system support such data format
if (!AudioSystem.isLineSupported(info)) {
System.out.println(info + " not supported.");
return;
}
// open a line for capture.
try {
capline = (TargetDataLine) AudioSystem.getLine(info);
capline.open(format, capline.getBufferSize());
} catch (Exception ex) {
System.out.println(ex);
return;
}
// the buf size for mic captured each time
int bufferLengthInBytes = capline.getBufferSize() / 8 * format.getFrameSize();
byte[] micData = new byte[bufferLengthInBytes];
int numBytesRead;
capline.start(); // start to capture mic data
while (thread != null) {
// read data from line
if ((numBytesRead = capline.read(micData, 0, bufferLengthInBytes)) == -1) {
break;
}
decodeSample(micData); // decode mic data
}
// stop and close
try {
if (capline != null) {
capline.stop();
capline.close();
capline = null;
}
} catch (Exception ex) {
System.err.println(ex);
}
}
} // End class DecodeMic
public static void main(String s[]) {
try {
String appDir = System.getProperty("user.dir");
System.out.println("appdir=" + appDir);
String cfgPath = appDir + "/modelconfig.cfg";
String soPath = appDir + "/../build/lib/libsherpa-onnx-jni.so";
OnlineRecognizer.setSoPath(soPath); // set so. lib for OnlineRecognizer
DecodeMic decodeEx = new DecodeMic();
decodeEx.initModelWithCfg(cfgPath); // init asr engine
decodeEx.open(); // open thread for mic
System.out.print("Press Enter to EXIT!\n");
char i = (char) System.in.read();
decodeEx.close();
} catch (Exception e) {
System.err.println(e);
e.printStackTrace();
}
}
}
... ...
/*
* // Copyright 2022-2023 by zhaomingwork
*/
// java AsrWebsocketClient
// usage: AsrWebsocketClient soPath srvIp srvPort wavPath numThreads
package websocketsrv;
import com.k2fsa.sherpa.onnx.OnlineRecognizer;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.*;
import java.util.Map;
import org.java_websocket.client.WebSocketClient;
import org.java_websocket.drafts.Draft;
import org.java_websocket.handshake.ServerHandshake;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** This example demonstrates how to connect to websocket server. */
public class AsrWebsocketClient extends WebSocketClient {
private static final Logger logger = LoggerFactory.getLogger(AsrWebsocketClient.class);
public AsrWebsocketClient(URI serverUri, Draft draft) {
super(serverUri, draft);
}
public AsrWebsocketClient(URI serverURI) {
super(serverURI);
}
public AsrWebsocketClient(URI serverUri, Map<String, String> httpHeaders) {
super(serverUri, httpHeaders);
}
@Override
public void onOpen(ServerHandshake handshakedata) {
float[] floats = OnlineRecognizer.readWavFile(AsrWebsocketClient.wavPath);
ByteBuffer buffer =
ByteBuffer.allocate(4 * floats.length)
.order(ByteOrder.LITTLE_ENDIAN); // float is sizeof 4. allocate enough buffer
for (float f : floats) {
buffer.putFloat(f);
}
buffer.rewind();
buffer.flip();
buffer.order(ByteOrder.LITTLE_ENDIAN);
send(buffer.array()); // send buf to server
send("Done"); // send 'Done' means finished
}
@Override
public void onMessage(String message) {
logger.info("received: " + message);
}
@Override
public void onClose(int code, String reason, boolean remote) {
logger.info(
"Connection closed by "
+ (remote ? "remote peer" : "us")
+ " Code: "
+ code
+ " Reason: "
+ reason);
}
@Override
public void onError(Exception ex) {
ex.printStackTrace();
// if the error is fatal then onClose will be called additionally
}
public static OnlineRecognizer rcgobj;
public static String wavPath;
public static void main(String[] args) throws URISyntaxException {
if (args.length != 5) {
System.out.println("usage: AsrWebsocketClient soPath srvIp srvPort wavPath numThreads");
return;
}
String soPath = args[0];
String srvIp = args[1];
String srvPort = args[2];
String wavPath = args[3];
int numThreads = Integer.parseInt(args[4]);
System.out.println("serIp=" + srvIp + ",srvPort=" + srvPort + ",wavPath=" + wavPath);
class ClientThread implements Runnable {
String soPath;
String srvIp;
String srvPort;
String wavPath;
ClientThread(String soPath, String srvIp, String srvPort, String wavPath) {
this.soPath = soPath;
this.srvIp = srvIp;
this.srvPort = srvPort;
this.wavPath = wavPath;
}
public void run() {
try {
OnlineRecognizer.setSoPath(soPath);
AsrWebsocketClient.wavPath = wavPath;
String wsAddress = "ws://" + srvIp + ":" + srvPort;
AsrWebsocketClient c = new AsrWebsocketClient(new URI(wsAddress));
c.connect();
} catch (Exception e) {
e.printStackTrace();
}
}
}
for (int i = 0; i < numThreads; i++) {
System.out.println("Thread1 is running...");
Thread t = new Thread(new ClientThread(soPath, srvIp, srvPort, wavPath));
t.start();
}
}
}
/*
* // Copyright 2022-2023 by zhaomingwork
*/
// java AsrWebsocketClient
// usage: AsrWebsocketClient soPath srvIp srvPort wavPath numThreads
package websocketsrv;
import com.k2fsa.sherpa.onnx.OnlineRecognizer;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.*;
import java.util.Map;
import org.java_websocket.client.WebSocketClient;
import org.java_websocket.drafts.Draft;
import org.java_websocket.handshake.ServerHandshake;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** This example demonstrates how to connect to websocket server. */
public class AsrWebsocketClient extends WebSocketClient {
private static final Logger logger = LoggerFactory.getLogger(AsrWebsocketClient.class);
public AsrWebsocketClient(URI serverUri, Draft draft) {
super(serverUri, draft);
}
public AsrWebsocketClient(URI serverURI) {
super(serverURI);
}
public AsrWebsocketClient(URI serverUri, Map<String, String> httpHeaders) {
super(serverUri, httpHeaders);
}
@Override
public void onOpen(ServerHandshake handshakedata) {
float[] floats = OnlineRecognizer.readWavFile(AsrWebsocketClient.wavPath);
ByteBuffer buffer =
ByteBuffer.allocate(4 * floats.length)
.order(ByteOrder.LITTLE_ENDIAN); // float is sizeof 4. allocate enough buffer
for (float f : floats) {
buffer.putFloat(f);
}
buffer.rewind();
buffer.flip();
buffer.order(ByteOrder.LITTLE_ENDIAN);
send(buffer.array()); // send buf to server
send("Done"); // send 'Done' means finished
}
@Override
public void onMessage(String message) {
logger.info("received: " + message);
}
@Override
public void onClose(int code, String reason, boolean remote) {
logger.info(
"Connection closed by "
+ (remote ? "remote peer" : "us")
+ " Code: "
+ code
+ " Reason: "
+ reason);
}
@Override
public void onError(Exception ex) {
ex.printStackTrace();
// if the error is fatal then onClose will be called additionally
}
public static OnlineRecognizer rcgobj;
public static String wavPath;
public static void main(String[] args) throws URISyntaxException {
if (args.length != 5) {
System.out.println("usage: AsrWebsocketClient soPath srvIp srvPort wavPath numThreads");
return;
}
String soPath = args[0];
String srvIp = args[1];
String srvPort = args[2];
String wavPath = args[3];
int numThreads = Integer.parseInt(args[4]);
System.out.println("serIp=" + srvIp + ",srvPort=" + srvPort + ",wavPath=" + wavPath);
class ClientThread implements Runnable {
String soPath;
String srvIp;
String srvPort;
String wavPath;
ClientThread(String soPath, String srvIp, String srvPort, String wavPath) {
this.soPath = soPath;
this.srvIp = srvIp;
this.srvPort = srvPort;
this.wavPath = wavPath;
}
public void run() {
try {
OnlineRecognizer.setSoPath(soPath);
AsrWebsocketClient.wavPath = wavPath;
String wsAddress = "ws://" + srvIp + ":" + srvPort;
AsrWebsocketClient c = new AsrWebsocketClient(new URI(wsAddress));
c.connect();
} catch (Exception e) {
e.printStackTrace();
}
}
}
for (int i = 0; i < numThreads; i++) {
System.out.println("Thread1 is running...");
Thread t = new Thread(new ClientThread(soPath, srvIp, srvPort, wavPath));
t.start();
}
}
}
... ...
/*
* // Copyright 2022-2023 by zhaoming
*/
// java DecoderThreadHandler
package websocketsrv;
import com.k2fsa.sherpa.onnx.OnlineRecognizer;
import com.k2fsa.sherpa.onnx.OnlineStream;
import java.nio.*;
import java.nio.charset.StandardCharsets;
import java.time.LocalDateTime;
import java.util.*;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.LinkedBlockingQueue;
import org.java_websocket.WebSocket;
import org.java_websocket.drafts.Draft;
import org.java_websocket.framing.Framedata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DecoderThreadHandler extends Thread {
private static final Logger logger = LoggerFactory.getLogger(DecoderThreadHandler.class);
// Websocket Queue that waiting for decoding
private LinkedBlockingQueue<WebSocket> decoderQueue;
// the mapping between websocket and connection data
private ConcurrentHashMap<WebSocket, ConnectionData> connMap;
private OnlineRecognizer rcgOjb = null; // recgnizer object
// connection data list for this thread to decode in parallel
private List<ConnectionData> connDataList = new ArrayList<ConnectionData>();
private int parallelDecoderNum = 10; // parallel decoding number
private int deocderTimeIdle = 10; // idle time(ms) when no job
private int deocderTimeOut = 3000; // if it is timeout(ms), the connection data will be removed
public DecoderThreadHandler(
LinkedBlockingQueue<WebSocket> decoderQueue,
ConcurrentHashMap<WebSocket, ConnectionData> connMap,
OnlineRecognizer rcgOjb,
int deocderTimeIdle,
int parallelDecoderNum,
int deocderTimeOut) {
this.decoderQueue = decoderQueue;
this.connMap = connMap;
this.rcgOjb = rcgOjb;
this.deocderTimeIdle = deocderTimeIdle;
this.parallelDecoderNum = parallelDecoderNum;
this.deocderTimeOut = deocderTimeOut;
}
public void run() {
while (true) {
try {
// time(ms) idle if there is no job
Thread.sleep(deocderTimeIdle);
// clear data list for this threads
connDataList.clear();
if (rcgOjb == null) continue;
// loop for total decoder Queue
while (!decoderQueue.isEmpty()) {
// get websocket
WebSocket conn = decoderQueue.take();
// get connection data according to websocket
ConnectionData connData = connMap.get(conn);
// if the websocket closed, continue
if (connData == null) continue;
// get the stream
OnlineStream stream = connData.getStream();
// put to decoder list if 1) stream is ready; 2) and
// size not > parallelDecoderNum
if ((rcgOjb.isReady(stream) && connDataList.size() < parallelDecoderNum)) {
// add to this thread's decoder list
connDataList.add(connData);
// change the handled time for this connection data
connData.setLastHandleTime(LocalDateTime.now());
}
// break when decoder list size >= parallelDecoderNum
if (connDataList.size() >= parallelDecoderNum) {
break;
}
}
// if decoder data list for this thread >0
if (connDataList.size() > 0) {
// create a stream array for parallel decoding
OnlineStream[] arr = new OnlineStream[connDataList.size()];
for (int i = 0; i < connDataList.size(); i++) {
arr[i] = connDataList.get(i).getStream();
}
// parallel decoding
rcgOjb.decodeStreams(arr);
}
// get result for each connection
for (ConnectionData connData : connDataList) {
OnlineStream stream = connData.getStream();
WebSocket webSocket = connData.getWebSocket();
String txtResult = rcgOjb.getResult(stream);
// decode text in utf-8
byte[] utf8Data = txtResult.getBytes(StandardCharsets.UTF_8);
boolean isEof = (connData.getEof() == true && !rcgOjb.isReady(stream));
// result
if (utf8Data.length > 0) {
String jsonResult =
"{\"text\":\"" + txtResult + "\",\"eof\":" + String.valueOf(isEof) + "\"}";
if (webSocket.isOpen()) {
// create a TEXT Frame for send back json result
Draft draft = webSocket.getDraft();
List<Framedata> frames = null;
frames = draft.createFrames(jsonResult, false);
// send to client
webSocket.sendFrame(frames);
}
}
}
// loop for each connection data in this thread
for (ConnectionData connData : connDataList) {
OnlineStream stream = connData.getStream();
WebSocket webSocket = connData.getWebSocket();
// if the stream is still ready, put it to decoder Queue again for next decoding
if (rcgOjb.isReady(stream)) {
decoderQueue.put(webSocket);
}
// the duration between last handled time and now
java.time.Duration duration =
java.time.Duration.between(connData.getLastHandleTime(), LocalDateTime.now());
// close the websocket if 1) data is done and stream not ready; 2) or data is time out;
// 3) or
// connection is closed
if ((connData.getEof() == true
&& !rcgOjb.isReady(stream)
&& connData.getQueueSamples().isEmpty())
|| duration.toMillis() > deocderTimeOut
|| !connData.getWebSocket().isOpen()) {
logger.info("close websocket!!!");
// delay close web socket as data may still in processing
Timer timer = new Timer();
timer.schedule(
new TimerTask() {
public void run() {
webSocket.close();
}
},
5000); // 5 seconds
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
/*
* // Copyright 2022-2023 by zhaoming
*/
// java DecoderThreadHandler
package websocketsrv;
import com.k2fsa.sherpa.onnx.OnlineRecognizer;
import com.k2fsa.sherpa.onnx.OnlineStream;
import java.nio.*;
import java.nio.charset.StandardCharsets;
import java.time.LocalDateTime;
import java.util.*;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.LinkedBlockingQueue;
import org.java_websocket.WebSocket;
import org.java_websocket.drafts.Draft;
import org.java_websocket.framing.Framedata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DecoderThreadHandler extends Thread {
private static final Logger logger = LoggerFactory.getLogger(DecoderThreadHandler.class);
// Websocket Queue that waiting for decoding
private LinkedBlockingQueue<WebSocket> decoderQueue;
// the mapping between websocket and connection data
private ConcurrentHashMap<WebSocket, ConnectionData> connMap;
private OnlineRecognizer rcgOjb = null; // recgnizer object
// connection data list for this thread to decode in parallel
private List<ConnectionData> connDataList = new ArrayList<ConnectionData>();
private int parallelDecoderNum = 10; // parallel decoding number
private int deocderTimeIdle = 10; // idle time(ms) when no job
private int deocderTimeOut = 3000; // if it is timeout(ms), the connection data will be removed
public DecoderThreadHandler(
LinkedBlockingQueue<WebSocket> decoderQueue,
ConcurrentHashMap<WebSocket, ConnectionData> connMap,
OnlineRecognizer rcgOjb,
int deocderTimeIdle,
int parallelDecoderNum,
int deocderTimeOut) {
this.decoderQueue = decoderQueue;
this.connMap = connMap;
this.rcgOjb = rcgOjb;
this.deocderTimeIdle = deocderTimeIdle;
this.parallelDecoderNum = parallelDecoderNum;
this.deocderTimeOut = deocderTimeOut;
}
public void run() {
while (true) {
try {
// time(ms) idle if there is no job
Thread.sleep(deocderTimeIdle);
// clear data list for this threads
connDataList.clear();
if (rcgOjb == null) continue;
// loop for total decoder Queue
while (!decoderQueue.isEmpty()) {
// get websocket
WebSocket conn = decoderQueue.take();
// get connection data according to websocket
ConnectionData connData = connMap.get(conn);
// if the websocket closed, continue
if (connData == null) continue;
// get the stream
OnlineStream stream = connData.getStream();
// put to decoder list if 1) stream is ready; 2) and
// size not > parallelDecoderNum
if ((rcgOjb.isReady(stream) && connDataList.size() < parallelDecoderNum)) {
// add to this thread's decoder list
connDataList.add(connData);
// change the handled time for this connection data
connData.setLastHandleTime(LocalDateTime.now());
}
// break when decoder list size >= parallelDecoderNum
if (connDataList.size() >= parallelDecoderNum) {
break;
}
}
// if decoder data list for this thread >0
if (connDataList.size() > 0) {
// create a stream array for parallel decoding
OnlineStream[] arr = new OnlineStream[connDataList.size()];
for (int i = 0; i < connDataList.size(); i++) {
arr[i] = connDataList.get(i).getStream();
}
// parallel decoding
rcgOjb.decodeStreams(arr);
}
// get result for each connection
for (ConnectionData connData : connDataList) {
OnlineStream stream = connData.getStream();
WebSocket webSocket = connData.getWebSocket();
String txtResult = rcgOjb.getResult(stream);
// decode text in utf-8
byte[] utf8Data = txtResult.getBytes(StandardCharsets.UTF_8);
boolean isEof = (connData.getEof() == true && !rcgOjb.isReady(stream));
// result
if (utf8Data.length > 0) {
String jsonResult =
"{\"text\":\"" + txtResult + "\",\"eof\":" + String.valueOf(isEof) + "\"}";
if (webSocket.isOpen()) {
// create a TEXT Frame for send back json result
Draft draft = webSocket.getDraft();
List<Framedata> frames = null;
frames = draft.createFrames(jsonResult, false);
// send to client
webSocket.sendFrame(frames);
}
}
}
// loop for each connection data in this thread
for (ConnectionData connData : connDataList) {
OnlineStream stream = connData.getStream();
WebSocket webSocket = connData.getWebSocket();
// if the stream is still ready, put it to decoder Queue again for next decoding
if (rcgOjb.isReady(stream)) {
decoderQueue.put(webSocket);
}
// the duration between last handled time and now
java.time.Duration duration =
java.time.Duration.between(connData.getLastHandleTime(), LocalDateTime.now());
// close the websocket if 1) data is done and stream not ready; 2) or data is time out;
// 3) or
// connection is closed
if ((connData.getEof() == true
&& !rcgOjb.isReady(stream)
&& connData.getQueueSamples().isEmpty())
|| duration.toMillis() > deocderTimeOut
|| !connData.getWebSocket().isOpen()) {
logger.info("close websocket!!!");
// delay close web socket as data may still in processing
Timer timer = new Timer();
timer.schedule(
new TimerTask() {
public void run() {
webSocket.close();
}
},
5000); // 5 seconds
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
... ...
/*
* // Copyright 2022-2023 by zhaoming
*/
// java StreamThreadHandler
package websocketsrv;
import com.k2fsa.sherpa.onnx.OnlineStream;
import java.nio.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.LinkedBlockingQueue;
import org.java_websocket.WebSocket;
// thread for processing stream
public class StreamThreadHandler extends Thread {
// Queue between io network io thread pool and stream thread pool, use websocket as the key
private LinkedBlockingQueue<WebSocket> streamQueue;
// Queue waiting for deocdeing, use websocket as the key
private LinkedBlockingQueue<WebSocket> decoderQueue;
// mapping between websocket connection and connection data
private ConcurrentHashMap<WebSocket, ConnectionData> connMap;
public StreamThreadHandler(
LinkedBlockingQueue<WebSocket> streamQueue,
LinkedBlockingQueue<WebSocket> decoderQueue,
ConcurrentHashMap<WebSocket, ConnectionData> connMap) {
this.streamQueue = streamQueue;
this.decoderQueue = decoderQueue;
this.connMap = connMap;
}
public void run() {
while (true) {
try {
// fetch one websocket from queue
WebSocket conn = (WebSocket) this.streamQueue.take();
// get the connection data according to websocket
ConnectionData connData = connMap.get(conn);
OnlineStream stream = connData.getStream();
// handle received binary data
if (!connData.getQueueSamples().isEmpty()) {
// loop to put all received binary data to stream
while (!connData.getQueueSamples().isEmpty()) {
float[] samples = connData.getQueueSamples().poll();
stream.acceptWaveform(samples);
}
// if data is finished
if (connData.getEof() == true) {
stream.inputFinished();
}
// add this websocket to decoder Queue if not in the Queue
if (!decoderQueue.contains(conn)) {
decoderQueue.put(conn);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
/*
* // Copyright 2022-2023 by zhaoming
*/
// java StreamThreadHandler
package websocketsrv;
import com.k2fsa.sherpa.onnx.OnlineStream;
import java.nio.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.LinkedBlockingQueue;
import org.java_websocket.WebSocket;
// thread for processing stream
public class StreamThreadHandler extends Thread {
// Queue between io network io thread pool and stream thread pool, use websocket as the key
private LinkedBlockingQueue<WebSocket> streamQueue;
// Queue waiting for deocdeing, use websocket as the key
private LinkedBlockingQueue<WebSocket> decoderQueue;
// mapping between websocket connection and connection data
private ConcurrentHashMap<WebSocket, ConnectionData> connMap;
public StreamThreadHandler(
LinkedBlockingQueue<WebSocket> streamQueue,
LinkedBlockingQueue<WebSocket> decoderQueue,
ConcurrentHashMap<WebSocket, ConnectionData> connMap) {
this.streamQueue = streamQueue;
this.decoderQueue = decoderQueue;
this.connMap = connMap;
}
public void run() {
while (true) {
try {
// fetch one websocket from queue
WebSocket conn = (WebSocket) this.streamQueue.take();
// get the connection data according to websocket
ConnectionData connData = connMap.get(conn);
OnlineStream stream = connData.getStream();
// handle received binary data
if (!connData.getQueueSamples().isEmpty()) {
// loop to put all received binary data to stream
while (!connData.getQueueSamples().isEmpty()) {
float[] samples = connData.getQueueSamples().poll();
stream.acceptWaveform(samples);
}
// if data is finished
if (connData.getEof() == true) {
stream.inputFinished();
}
// add this websocket to decoder Queue if not in the Queue
if (!decoderQueue.contains(conn)) {
decoderQueue.put(conn);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
... ...