Fangjun Kuang
Committed by GitHub

Support Android NNAPI. (#622)

... ... @@ -76,8 +76,42 @@ cmake -DCMAKE_TOOLCHAIN_FILE="$ANDROID_NDK/build/cmake/android.toolchain.cmake"
-DANDROID_ABI="arm64-v8a" \
-DANDROID_PLATFORM=android-21 ..
# Please use -DANDROID_PLATFORM=android-27 if you want to use Android NNAPI
# make VERBOSE=1 -j4
make -j4
make install/strip
cp -fv $onnxruntime_version/jni/arm64-v8a/libonnxruntime.so install/lib
rm -rf install/lib/pkgconfig
# To run the generated binaries on Android, please use the following steps.
#
#
# 1. Copy sherpa-onnx and its dependencies to Android
#
# cd build-android-arm64-v8a/install/lib
# adb push ./lib*.so /data/local/tmp
# cd ../bin
# adb push ./sherpa-onnx /data/local/tmp
#
# 2. Login into Android
#
# adb shell
# cd /data/local/tmp
# ./sherpa-onnx
#
# which shows the following error log:
#
# CANNOT LINK EXECUTABLE "./sherpa-onnx": library "libsherpa-onnx-core.so" not found: needed by main executable
#
# Please run:
#
# export LD_LIBRARY_PATH=$PWD:$LD_LIBRARY_PATH
#
# and then you can run:
#
# ./sherpa-onnx
#
# It should show the help message of sherpa-onnx.
#
# Please use the above approach to copy model files to your phone.
... ...
... ... @@ -22,6 +22,8 @@ Provider StringToProvider(std::string s) {
return Provider::kCoreML;
} else if (s == "xnnpack") {
return Provider::kXnnpack;
} else if (s == "nnapi") {
return Provider::kNNAPI;
} else {
SHERPA_ONNX_LOGE("Unsupported string: %s. Fallback to cpu", s.c_str());
return Provider::kCPU;
... ...
... ... @@ -17,6 +17,7 @@ enum class Provider {
kCUDA = 1, // CUDAExecutionProvider
kCoreML = 2, // CoreMLExecutionProvider
kXnnpack = 3, // XnnpackExecutionProvider
kNNAPI = 4, // NnapiExecutionProvider
};
/**
... ...
... ... @@ -15,6 +15,10 @@
#include "coreml_provider_factory.h" // NOLINT
#endif
#if __ANDROID_API__ >= 27
#include "nnapi_provider_factory.h"
#endif
namespace sherpa_onnx {
static Ort::SessionOptions GetSessionOptionsImpl(int32_t num_threads,
... ... @@ -77,6 +81,38 @@ static Ort::SessionOptions GetSessionOptionsImpl(int32_t num_threads,
#endif
break;
}
case Provider::kNNAPI: {
#if __ANDROID_API__ >= 27
SHERPA_ONNX_LOGE("Current API level %d ", (int32_t)__ANDROID_API__);
// Please see
// https://onnxruntime.ai/docs/execution-providers/NNAPI-ExecutionProvider.html#usage
// to enable different flags
uint32_t nnapi_flags = 0;
// nnapi_flags |= NNAPI_FLAG_USE_FP16;
// nnapi_flags |= NNAPI_FLAG_CPU_DISABLED;
OrtStatus *status = OrtSessionOptionsAppendExecutionProvider_Nnapi(
sess_opts, nnapi_flags);
if (status) {
const auto &api = Ort::GetApi();
const char *msg = api.GetErrorMessage(status);
SHERPA_ONNX_LOGE(
"Failed to enable NNAPI: %s. Available providers: %s. Fallback to "
"cpu",
msg, os.str().c_str());
api.ReleaseStatus(status);
} else {
SHERPA_ONNX_LOGE("Use nnapi");
}
#else
SHERPA_ONNX_LOGE(
"Android NNAPI requires API level >= 27. Current API level %d "
"Fallback to cpu!",
(int32_t)__ANDROID_API__);
#endif
break;
}
}
return sess_opts;
... ...