offline-rnn-lm.h
1.2 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
// sherpa-onnx/csrc/offline-rnn-lm.h
//
// Copyright (c) 2023 Xiaomi Corporation
#ifndef SHERPA_ONNX_CSRC_OFFLINE_RNN_LM_H_
#define SHERPA_ONNX_CSRC_OFFLINE_RNN_LM_H_
#include <memory>
#include "onnxruntime_cxx_api.h" // NOLINT
#include "sherpa-onnx/csrc/offline-lm-config.h"
#include "sherpa-onnx/csrc/offline-lm.h"
namespace sherpa_onnx {
class OfflineRnnLM : public OfflineLM {
public:
~OfflineRnnLM() override;
explicit OfflineRnnLM(const OfflineLMConfig &config);
template <typename Manager>
OfflineRnnLM(Manager *mgr, const OfflineLMConfig &config);
/** Rescore a batch of sentences.
*
* @param x A 2-D tensor of shape (N, L) with data type int64.
* @param x_lens A 1-D tensor of shape (N,) with data type int64.
* It contains number of valid tokens in x before padding.
* @return Return a 1-D tensor of shape (N,) containing the log likelihood
* of each utterance. Its data type is float32.
*
* Caution: It returns log likelihood, not negative log likelihood (nll).
*/
Ort::Value Rescore(Ort::Value x, Ort::Value x_lens) override;
private:
class Impl;
std::unique_ptr<Impl> impl_;
};
} // namespace sherpa_onnx
#endif // SHERPA_ONNX_CSRC_OFFLINE_RNN_LM_H_