offline-recognizer-impl.cc
1.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
// sherpa-onnx/csrc/offline-recognizer-impl.cc
//
// Copyright (c) 2023 Xiaomi Corporation
#include "sherpa-onnx/csrc/offline-recognizer-impl.h"
#include <string>
#include "onnxruntime_cxx_api.h" // NOLINT
#include "sherpa-onnx/csrc/macros.h"
#include "sherpa-onnx/csrc/offline-recognizer-paraformer-impl.h"
#include "sherpa-onnx/csrc/offline-recognizer-transducer-impl.h"
#include "sherpa-onnx/csrc/onnx-utils.h"
#include "sherpa-onnx/csrc/text-utils.h"
namespace sherpa_onnx {
std::unique_ptr<OfflineRecognizerImpl> OfflineRecognizerImpl::Create(
const OfflineRecognizerConfig &config) {
Ort::Env env(ORT_LOGGING_LEVEL_ERROR);
Ort::SessionOptions sess_opts;
std::string model_filename;
if (!config.model_config.transducer.encoder_filename.empty()) {
model_filename = config.model_config.transducer.encoder_filename;
} else if (!config.model_config.paraformer.model.empty()) {
model_filename = config.model_config.paraformer.model;
} else {
SHERPA_ONNX_LOGE("Please provide a model");
exit(-1);
}
auto buf = ReadFile(model_filename);
auto encoder_sess =
std::make_unique<Ort::Session>(env, buf.data(), buf.size(), sess_opts);
Ort::ModelMetadata meta_data = encoder_sess->GetModelMetadata();
Ort::AllocatorWithDefaultOptions allocator; // used in the macro below
std::string model_type;
SHERPA_ONNX_READ_META_DATA_STR(model_type, "model_type");
if (model_type == "conformer") {
return std::make_unique<OfflineRecognizerTransducerImpl>(config);
}
if (model_type == "paraformer") {
return std::make_unique<OfflineRecognizerParaformerImpl>(config);
}
SHERPA_ONNX_LOGE(
"\nUnsupported model_type: %s\n"
"We support only the following model types at present: \n"
" - transducer models from icefall\n"
" - Paraformer models from FunASR\n",
model_type.c_str());
exit(-1);
}
} // namespace sherpa_onnx