Fangjun Kuang
Committed by GitHub

Add microphone demo about VAD+ASR for HarmonyOS (#1581)

@@ -3,47 +3,127 @@ import worker, { MessageEvents } from '@ohos.worker'; @@ -3,47 +3,127 @@ import worker, { MessageEvents } from '@ohos.worker';
3 import { BusinessError } from '@kit.BasicServicesKit'; 3 import { BusinessError } from '@kit.BasicServicesKit';
4 import { picker } from '@kit.CoreFileKit'; 4 import { picker } from '@kit.CoreFileKit';
5 5
  6 +import { Permissions } from '@kit.AbilityKit';
  7 +import { allAllowed, requestPermissions } from './Permission';
  8 +import { audio } from '@kit.AudioKit';
  9 +
6 10
7 @Entry 11 @Entry
8 @Component 12 @Component
9 struct Index { 13 struct Index {
10 @State currentIndex: number = 0; 14 @State currentIndex: number = 0;
11 - @State resultFromFile: string = ''; 15 + @State resultForFile: string = '';
12 @State progressForFile: number = 0; 16 @State progressForFile: number = 0;
13 @State selectFileBtnEnabled: boolean = false; 17 @State selectFileBtnEnabled: boolean = false;
14 - @State message: string = 'To be implemented';  
15 @State lang: string = 'English'; 18 @State lang: string = 'English';
  19 + @State resultForMic: string = '';
  20 + @State micStarted: boolean = false;
  21 + @State message: string = 'Start recording';
  22 + @State micInitDone: boolean = false;
16 private controller: TabsController = new TabsController(); 23 private controller: TabsController = new TabsController();
17 private workerInstance?: worker.ThreadWorker 24 private workerInstance?: worker.ThreadWorker
18 private readonly scriptURL: string = 'entry/ets/workers/NonStreamingAsrWithVadWorker.ets' 25 private readonly scriptURL: string = 'entry/ets/workers/NonStreamingAsrWithVadWorker.ets'
  26 + private mic?: audio.AudioCapturer;
  27 + private sampleList: Float32Array[] = []
  28 +
  29 + flatten(samples: Float32Array[]): Float32Array {
  30 + let n = 0;
  31 + for (let i = 0; i < samples.length; ++i) {
  32 + n += samples[i].length;
  33 + }
  34 +
  35 + const ans: Float32Array = new Float32Array(n);
  36 + let offset: number = 0;
  37 + for (let i = 0; i < samples.length; ++i) {
  38 + ans.set(samples[i], offset);
  39 + offset += samples[i].length;
  40 + }
  41 +
  42 + return ans;
  43 + }
  44 +
  45 + async initMic() {
  46 + const permissions: Permissions[] = ["ohos.permission.MICROPHONE"];
  47 + let allowed: boolean = await allAllowed(permissions);
  48 + if (!allowed) {
  49 + requestPermissions(permissions);
  50 + console.log("request to access the microphone");
  51 +
  52 + allowed = await allAllowed(permissions);
  53 + if (!allowed) {
  54 + console.error('failed to get microphone permission');
  55 + this.resultForMic = "Failed to get microphone permission. Please retry";
  56 + return;
  57 + }
  58 + } else {
  59 + console.log("allowed to access microphone");
  60 + }
  61 +
  62 + const audioStreamInfo: audio.AudioStreamInfo = {
  63 + samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_16000,
  64 + channels: audio.AudioChannel.CHANNEL_1,
  65 + sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
  66 + encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW,
  67 + };
19 68
20 - aboutToAppear(): void { 69 + const audioCapturerInfo: audio.AudioCapturerInfo = {
  70 + source: audio.SourceType.SOURCE_TYPE_MIC,
  71 + capturerFlags: 0
  72 + };
  73 +
  74 + const audioCapturerOptions: audio.AudioCapturerOptions = {
  75 + streamInfo: audioStreamInfo,
  76 + capturerInfo: audioCapturerInfo
  77 +
  78 + };
  79 + audio.createAudioCapturer(audioCapturerOptions, (err, data) => {
  80 + if (err) {
  81 + console.error(`error code is ${err.code}, error message is ${err.message}`);
  82 + this.resultForMic = 'Failed to init microphone';
  83 + } else {
  84 + console.info(`init mic successfully`);
  85 + this.mic = data;
  86 + this.mic.on('readData', this.micCallback);
  87 +
  88 + if (this.workerInstance) {
  89 + this.workerInstance.postMessage({ msgType: 'init-vad-mic', context: getContext() });
  90 + }
  91 + }
  92 + });
  93 + }
  94 +
  95 + async aboutToAppear() {
21 this.workerInstance = new worker.ThreadWorker(this.scriptURL, { 96 this.workerInstance = new worker.ThreadWorker(this.scriptURL, {
22 name: 'NonStreaming ASR worker' 97 name: 'NonStreaming ASR worker'
23 }); 98 });
24 99
25 this.workerInstance.onmessage = (e: MessageEvents) => { 100 this.workerInstance.onmessage = (e: MessageEvents) => {
26 const msgType = e.data['msgType'] as string; 101 const msgType = e.data['msgType'] as string;
27 - console.log(`received data ${msgType}`); 102 + console.log(`received msg from worker: ${msgType}`);
  103 +
  104 + if (msgType == 'init-vad-mic-done') {
  105 + this.micInitDone = true;
  106 + }
28 107
29 if (msgType == 'init-non-streaming-asr-done') { 108 if (msgType == 'init-non-streaming-asr-done') {
30 this.selectFileBtnEnabled = true; 109 this.selectFileBtnEnabled = true;
  110 + this.resultForFile = `Initializing done.\n\nPlease select a wave file of 16kHz in language ${this.lang}`;
31 } 111 }
32 112
33 if (msgType == 'non-streaming-asr-vad-decode-done') { 113 if (msgType == 'non-streaming-asr-vad-decode-done') {
34 - this.resultFromFile = e.data['text'] as string + '\n'; 114 + this.resultForFile = e.data['text'] as string + '\n';
35 } 115 }
36 116
37 if (msgType == 'non-streaming-asr-vad-decode-partial') { 117 if (msgType == 'non-streaming-asr-vad-decode-partial') {
38 - if (this.resultFromFile == '') {  
39 - this.resultFromFile = e.data['text'] as string; 118 + if (this.resultForFile == '') {
  119 + this.resultForFile = e.data['text'] as string;
40 } else { 120 } else {
41 - this.resultFromFile += '\n\n' + e.data['text'] as string; 121 + this.resultForFile += '\n\n' + e.data['text'] as string;
42 } 122 }
43 } 123 }
44 124
45 if (msgType == 'non-streaming-asr-vad-decode-error') { 125 if (msgType == 'non-streaming-asr-vad-decode-error') {
46 - this.resultFromFile = e.data['text'] as string; 126 + this.resultForFile = e.data['text'] as string;
47 } 127 }
48 128
49 if (msgType == 'non-streaming-asr-vad-decode-progress') { 129 if (msgType == 'non-streaming-asr-vad-decode-progress') {
@@ -51,11 +131,26 @@ struct Index { @@ -51,11 +131,26 @@ struct Index {
51 131
52 this.selectFileBtnEnabled = this.progressForFile >= 100; 132 this.selectFileBtnEnabled = this.progressForFile >= 100;
53 } 133 }
  134 +
  135 + if (msgType == 'non-streaming-asr-vad-mic-partial') {
  136 + if (this.resultForMic == '') {
  137 + this.resultForMic = e.data['text'] as string;
  138 + } else {
  139 + this.resultForMic += '\n\n' + e.data['text'] as string;
  140 + }
  141 + }
  142 +
  143 + if (msgType == 'non-streaming-asr-vad-mic-error') {
  144 + this.resultForMic = e.data['text'] as string;
  145 + }
54 } 146 }
55 147
56 const context = getContext(); 148 const context = getContext();
  149 + this.resultForFile = 'Initializing models';
57 this.workerInstance.postMessage({ msgType: 'init-vad', context }); 150 this.workerInstance.postMessage({ msgType: 'init-vad', context });
58 this.workerInstance.postMessage({ msgType: 'init-non-streaming-asr', context }); 151 this.workerInstance.postMessage({ msgType: 'init-non-streaming-asr', context });
  152 +
  153 + await this.initMic();
59 } 154 }
60 155
61 @Builder 156 @Builder
@@ -86,13 +181,13 @@ struct Index { @@ -86,13 +181,13 @@ struct Index {
86 .lineHeight(41) 181 .lineHeight(41)
87 .fontWeight(500) 182 .fontWeight(500)
88 183
89 - Button('Select .wav file ') 184 + Button('Select .wav file (16kHz) ')
90 .enabled(this.selectFileBtnEnabled) 185 .enabled(this.selectFileBtnEnabled)
91 .fontSize(13) 186 .fontSize(13)
92 .width(296) 187 .width(296)
93 .height(60) 188 .height(60)
94 .onClick(() => { 189 .onClick(() => {
95 - this.resultFromFile = ''; 190 + this.resultForFile = '';
96 this.progressForFile = 0; 191 this.progressForFile = 0;
97 192
98 const documentSelectOptions = new picker.DocumentSelectOptions(); 193 const documentSelectOptions = new picker.DocumentSelectOptions();
@@ -103,7 +198,7 @@ struct Index { @@ -103,7 +198,7 @@ struct Index {
103 console.log(`Result: ${result}`); 198 console.log(`Result: ${result}`);
104 199
105 if (!result[0]) { 200 if (!result[0]) {
106 - this.resultFromFile = 'Please select a file to decode'; 201 + this.resultForFile = 'Please select a file to decode';
107 this.selectFileBtnEnabled = true; 202 this.selectFileBtnEnabled = true;
108 return; 203 return;
109 } 204 }
@@ -135,7 +230,7 @@ struct Index { @@ -135,7 +230,7 @@ struct Index {
135 }.width('100%').justifyContent(FlexAlign.Center) 230 }.width('100%').justifyContent(FlexAlign.Center)
136 } 231 }
137 232
138 - TextArea({ text: this.resultFromFile }).width('100%').lineSpacing({ value: 10, unit: LengthUnit.VP }); 233 + TextArea({ text: this.resultForFile }).width('100%').lineSpacing({ value: 10, unit: LengthUnit.VP });
139 234
140 } 235 }
141 .alignItems(HorizontalAlign.Center) 236 .alignItems(HorizontalAlign.Center)
@@ -144,10 +239,50 @@ struct Index { @@ -144,10 +239,50 @@ struct Index {
144 239
145 TabContent() { 240 TabContent() {
146 Column() { 241 Column() {
147 - Text(this.message)  
148 - .fontSize(50)  
149 - .fontWeight(FontWeight.Bold); 242 + Button(this.message)
  243 + .enabled(this.micInitDone)
  244 + .onClick(() => {
  245 + console.log('clicked mic button');
  246 + this.resultForMic = '';
  247 + if (this.mic) {
  248 + if (this.micStarted) {
  249 + this.mic.stop();
  250 + this.message = "Start recording";
  251 + this.micStarted = false;
  252 + console.log('mic stopped');
  253 +
  254 + const samples = this.flatten(this.sampleList);
  255 + let s = 0;
  256 + for (let i = 0; i < samples.length; ++i) {
  257 + s += samples[i];
  258 + }
  259 + console.log(`samples ${samples.length}, sum: ${s}`);
  260 +
  261 + if (this.workerInstance) {
  262 + console.log('decode mic');
  263 + this.workerInstance.postMessage({
  264 + msgType: 'non-streaming-asr-vad-mic',
  265 + samples,
  266 + });
  267 + } else {
  268 + console.log(`this worker instance is undefined ${this.workerInstance}`);
150 } 269 }
  270 + } else {
  271 + this.sampleList = [];
  272 + this.mic.start();
  273 + this.message = "Stop recording";
  274 + this.micStarted = true;
  275 + console.log('mic started');
  276 + }
  277 + }
  278 + });
  279 +
  280 + Text(`Supported languages: ${this.lang}`)
  281 +
  282 + TextArea({ text: this.resultForMic }).width('100%').lineSpacing({ value: 10, unit: LengthUnit.VP });
  283 + }
  284 + .alignItems(HorizontalAlign.Center)
  285 + .justifyContent(FlexAlign.Start)
151 } 286 }
152 .tabBar(this.TabBuilder('From mic', 1, $r('app.media.ic_public_input_voice'), 287 .tabBar(this.TabBuilder('From mic', 1, $r('app.media.ic_public_input_voice'),
153 $r('app.media.ic_public_input_voice_default'))) 288 $r('app.media.ic_public_input_voice_default')))
@@ -170,4 +305,14 @@ struct Index { @@ -170,4 +305,14 @@ struct Index {
170 .width('100%') 305 .width('100%')
171 .justifyContent(FlexAlign.Start) 306 .justifyContent(FlexAlign.Start)
172 } 307 }
  308 +
  309 + private micCallback = (buffer: ArrayBuffer) => {
  310 + const view: Int16Array = new Int16Array(buffer);
  311 +
  312 + const samplesFloat: Float32Array = new Float32Array(view.length);
  313 + for (let i = 0; i < view.length; ++i) {
  314 + samplesFloat[i] = view[i] / 32768.0;
  315 + }
  316 + this.sampleList.push(samplesFloat);
  317 + }
173 } 318 }
@@ -229,9 +229,10 @@ export function getOfflineModelConfig(type: number): OfflineModelConfig { @@ -229,9 +229,10 @@ export function getOfflineModelConfig(type: number): OfflineModelConfig {
229 229
230 break; 230 break;
231 } 231 }
232 - }  
233 - 232 + default: {
234 console.log(`Please specify a supported type. Given type ${type}`); 233 console.log(`Please specify a supported type. Given type ${type}`);
  234 + }
  235 + }
235 236
236 return c; 237 return c;
237 } 238 }
  1 +// This file is modified from
  2 +// https://gitee.com/ukSir/hmchat2/blob/master/entry/src/main/ets/utils/permissionMananger.ets
  3 +import { abilityAccessCtrl, bundleManager, common, Permissions } from '@kit.AbilityKit';
  4 +
  5 +export function allAllowed(permissions: Permissions[]): boolean {
  6 + if (permissions.length == 0) {
  7 + return false;
  8 + }
  9 +
  10 + const mgr: abilityAccessCtrl.AtManager = abilityAccessCtrl.createAtManager();
  11 +
  12 + const bundleInfo = bundleManager.getBundleInfoForSelfSync(bundleManager.BundleFlag.GET_BUNDLE_INFO_WITH_APPLICATION);
  13 +
  14 + let tokenID: number = bundleInfo.appInfo.accessTokenId;
  15 +
  16 + return permissions.every(permission => abilityAccessCtrl.GrantStatus.PERMISSION_GRANTED ==
  17 + mgr.checkAccessTokenSync(tokenID, permission));
  18 +}
  19 +
  20 +export async function requestPermissions(permissions: Permissions[]): Promise<boolean> {
  21 + const mgr: abilityAccessCtrl.AtManager = abilityAccessCtrl.createAtManager();
  22 + const context: Context = getContext() as common.UIAbilityContext;
  23 +
  24 + const result = await mgr.requestPermissionsFromUser(context, permissions);
  25 + return result.authResults.length > 0 && result.authResults.every(authResults => authResults == 0);
  26 +}
@@ -13,11 +13,13 @@ import { @@ -13,11 +13,13 @@ import {
13 import { Context } from '@kit.AbilityKit'; 13 import { Context } from '@kit.AbilityKit';
14 import { fileIo } from '@kit.CoreFileKit'; 14 import { fileIo } from '@kit.CoreFileKit';
15 import { getOfflineModelConfig } from '../pages/NonStreamingAsrModels'; 15 import { getOfflineModelConfig } from '../pages/NonStreamingAsrModels';
  16 +import { BusinessError } from '@kit.BasicServicesKit';
16 17
17 const workerPort: ThreadWorkerGlobalScope = worker.workerPort; 18 const workerPort: ThreadWorkerGlobalScope = worker.workerPort;
18 19
19 let recognizer: OfflineRecognizer; 20 let recognizer: OfflineRecognizer;
20 let vad: Vad; // vad for decoding files 21 let vad: Vad; // vad for decoding files
  22 +let vadMic: Vad; // vad for mic
21 23
22 function initVad(context: Context): Vad { 24 function initVad(context: Context): Vad {
23 let mgr = context.resourceManager; 25 let mgr = context.resourceManager;
@@ -73,7 +75,7 @@ interface Wave { @@ -73,7 +75,7 @@ interface Wave {
73 sampleRate: number; 75 sampleRate: number;
74 } 76 }
75 77
76 -function decode(filename: string): string { 78 +function decodeFile(filename: string): string {
77 vad.reset(); 79 vad.reset();
78 80
79 const fp = fileIo.openSync(filename); 81 const fp = fileIo.openSync(filename);
@@ -83,6 +85,9 @@ function decode(filename: string): string { @@ -83,6 +85,9 @@ function decode(filename: string): string {
83 const data: Uint8Array = new Uint8Array(arrayBuffer); 85 const data: Uint8Array = new Uint8Array(arrayBuffer);
84 86
85 const wave: Wave = readWaveFromBinary(data); 87 const wave: Wave = readWaveFromBinary(data);
  88 + if (wave.sampleRate != 16000) {
  89 + return `the sample rate in ${filename} is not 16000Hz. Given: ${wave.sampleRate}Hz.\nPlease select a wav file of 16kHz.`;
  90 + }
86 91
87 console.log(`sample rate ${wave.sampleRate}`); 92 console.log(`sample rate ${wave.sampleRate}`);
88 console.log(`samples length ${wave.samples.length}`); 93 console.log(`samples length ${wave.samples.length}`);
@@ -130,6 +135,47 @@ function decode(filename: string): string { @@ -130,6 +135,47 @@ function decode(filename: string): string {
130 return resultList.join('\n\n'); 135 return resultList.join('\n\n');
131 } 136 }
132 137
  138 +function decodeMic(samples: Float32Array) {
  139 + const resultList: string[] = [];
  140 +
  141 + const windowSize: number = vad.config.sileroVad.windowSize;
  142 + for (let i = 0; i < samples.length; i += windowSize) {
  143 + const thisWindow: Float32Array = samples.subarray(i, i + windowSize)
  144 + vad.acceptWaveform(thisWindow);
  145 + if (i + windowSize >= samples.length) {
  146 + vad.flush();
  147 + }
  148 + while (!vad.isEmpty()) {
  149 + const segment: SpeechSegment = vad.front();
  150 + const _startTime: number = (segment.start / 16000);
  151 + const _endTime: number = _startTime + segment.samples.length / 16000;
  152 +
  153 + if (_endTime - _startTime < 0.2) {
  154 + vad.pop();
  155 + continue;
  156 + }
  157 +
  158 + const startTime: string = _startTime.toFixed(2);
  159 + const endTime: string = _endTime.toFixed(2);
  160 +
  161 + const stream: OfflineStream = recognizer.createStream();
  162 + stream.acceptWaveform({ samples: segment.samples, sampleRate: 16000 });
  163 + recognizer.decode(stream);
  164 + const result: OnlineRecognizerResult = recognizer.getResult(stream);
  165 +
  166 + const text: string = `${startTime} -- ${endTime} ${result.text}`
  167 + resultList.push(text);
  168 + console.log(`partial result ${text}`);
  169 +
  170 + workerPort.postMessage({ 'msgType': 'non-streaming-asr-vad-mic-partial', text });
  171 +
  172 + vad.pop();
  173 + }
  174 + }
  175 +
  176 + return resultList.join('\n\n');
  177 +}
  178 +
133 /** 179 /**
134 * Defines the event handler to be called when the worker thread receives a message sent by the host thread. 180 * Defines the event handler to be called when the worker thread receives a message sent by the host thread.
135 * The event handler is executed in the worker thread. 181 * The event handler is executed in the worker thread.
@@ -146,6 +192,13 @@ workerPort.onmessage = (e: MessageEvents) => { @@ -146,6 +192,13 @@ workerPort.onmessage = (e: MessageEvents) => {
146 workerPort.postMessage({ 'msgType': 'init-vad-done' }); 192 workerPort.postMessage({ 'msgType': 'init-vad-done' });
147 } 193 }
148 194
  195 + if (msgType == 'init-vad-mic' && !vadMic) {
  196 + const context = e.data['context'] as Context;
  197 + vadMic = initVad(context);
  198 + console.log('init vad mic done');
  199 + workerPort.postMessage({ 'msgType': 'init-vad-mic-done' });
  200 + }
  201 +
149 if (msgType == 'init-non-streaming-asr' && !recognizer) { 202 if (msgType == 'init-non-streaming-asr' && !recognizer) {
150 const context = e.data['context'] as Context; 203 const context = e.data['context'] as Context;
151 recognizer = initNonStreamingAsr(context); 204 recognizer = initNonStreamingAsr(context);
@@ -157,7 +210,7 @@ workerPort.onmessage = (e: MessageEvents) => { @@ -157,7 +210,7 @@ workerPort.onmessage = (e: MessageEvents) => {
157 const filename = e.data['filename'] as string; 210 const filename = e.data['filename'] as string;
158 console.log(`decoding ${filename}`); 211 console.log(`decoding ${filename}`);
159 try { 212 try {
160 - const text = decode(filename); 213 + const text = decodeFile(filename);
161 workerPort.postMessage({ msgType: 'non-streaming-asr-vad-decode-done', text }); 214 workerPort.postMessage({ msgType: 'non-streaming-asr-vad-decode-done', text });
162 } catch (e) { 215 } catch (e) {
163 workerPort.postMessage({ msgType: 'non-streaming-asr-vad-decode-error', text: `Failed to decode ${filename}` }); 216 workerPort.postMessage({ msgType: 'non-streaming-asr-vad-decode-error', text: `Failed to decode ${filename}` });
@@ -165,6 +218,17 @@ workerPort.onmessage = (e: MessageEvents) => { @@ -165,6 +218,17 @@ workerPort.onmessage = (e: MessageEvents) => {
165 218
166 workerPort.postMessage({ 'msgType': 'non-streaming-asr-vad-decode-progress', progress: 100 }); 219 workerPort.postMessage({ 'msgType': 'non-streaming-asr-vad-decode-progress', progress: 100 });
167 } 220 }
  221 +
  222 + if (msgType == 'non-streaming-asr-vad-mic') {
  223 + const samples: Float32Array = e.data['samples'] as Float32Array;
  224 + vadMic.reset();
  225 + try {
  226 + const text = decodeMic(samples);
  227 + workerPort.postMessage({ msgType: 'non-streaming-asr-vad-mic-done', text });
  228 + } catch (e) {
  229 + workerPort.postMessage({ msgType: 'non-streaming-asr-vad-mic-error', text: `Failed to decode` });
  230 + }
  231 + }
168 } 232 }
169 233
170 /** 234 /**
@@ -47,6 +47,18 @@ @@ -47,6 +47,18 @@
47 } 47 }
48 ], 48 ],
49 } 49 }
  50 + ],
  51 + "requestPermissions": [
  52 + {
  53 + "name": "ohos.permission.MICROPHONE",
  54 + "reason": "$string:mic_reason",
  55 + "usedScene": {
  56 + "abilities": [
  57 + "FormAbility",
  58 + ],
  59 + "when": "always",
  60 + }
  61 + }
50 ] 62 ]
51 } 63 }
52 } 64 }
@@ -11,6 +11,10 @@ @@ -11,6 +11,10 @@
11 { 11 {
12 "name": "EntryAbility_label", 12 "name": "EntryAbility_label",
13 "value": "VAD_ASR" 13 "value": "VAD_ASR"
  14 + },
  15 + {
  16 + "name": "mic_reason",
  17 + "value": "access the microhone for speech recognition"
14 } 18 }
15 ] 19 ]
16 } 20 }
@@ -90,7 +90,7 @@ hvigorw assembleHap --mode module -p product=default -p buildMode=release --no-d @@ -90,7 +90,7 @@ hvigorw assembleHap --mode module -p product=default -p buildMode=release --no-d
90 90
91 ls -lh ./entry/build/default/outputs/default/entry-default-unsigned.hap 91 ls -lh ./entry/build/default/outputs/default/entry-default-unsigned.hap
92 92
93 -in_file=./entry/build/default/outputs/default/entry-default-unsigned.hap 93 +in_file=$PWD/entry/build/default/outputs/default/entry-default-unsigned.hap
94 out_file=$PWD/entry/build/default/outputs/default/entry-default-signed.hap 94 out_file=$PWD/entry/build/default/outputs/default/entry-default-signed.hap
95 95
96 java -jar $jar sign-app -keyAlias "$HAP_KEY_ALIAS" -signAlg "SHA256withECDSA" -mode "localSign" \ 96 java -jar $jar sign-app -keyAlias "$HAP_KEY_ALIAS" -signAlg "SHA256withECDSA" -mode "localSign" \
@@ -100,11 +100,12 @@ java -jar $jar sign-app -keyAlias "$HAP_KEY_ALIAS" -signAlg "SHA256withECDSA" -m @@ -100,11 +100,12 @@ java -jar $jar sign-app -keyAlias "$HAP_KEY_ALIAS" -signAlg "SHA256withECDSA" -m
100 100
101 ls -l $in_file $out_file 101 ls -l $in_file $out_file
102 ls -lh $in_file $out_file 102 ls -lh $in_file $out_file
103 -rm $in_file  
104 rm -rf ./entry/src/main/resources/rawfile/$model_name 103 rm -rf ./entry/src/main/resources/rawfile/$model_name
105 popd 104 popd
106 105
107 -mv $out_file ./haps/sherpa-onnx-${SHERPA_ONNX_VERSION}-vad_asr-$lang-$short_name.hap 106 +# Use unsigned hap
  107 +mv $in_file ./haps/sherpa-onnx-${SHERPA_ONNX_VERSION}-vad_asr-$lang-$short_name.hap
  108 +# mv $out_file ./haps/sherpa-onnx-${SHERPA_ONNX_VERSION}-vad_asr-$lang-$short_name.hap
108 109
109 ls -lh haps 110 ls -lh haps
110 111