正在显示
4 个修改的文件
包含
73 行增加
和
0 行删除
| @@ -76,8 +76,42 @@ cmake -DCMAKE_TOOLCHAIN_FILE="$ANDROID_NDK/build/cmake/android.toolchain.cmake" | @@ -76,8 +76,42 @@ cmake -DCMAKE_TOOLCHAIN_FILE="$ANDROID_NDK/build/cmake/android.toolchain.cmake" | ||
| 76 | -DANDROID_ABI="arm64-v8a" \ | 76 | -DANDROID_ABI="arm64-v8a" \ |
| 77 | -DANDROID_PLATFORM=android-21 .. | 77 | -DANDROID_PLATFORM=android-21 .. |
| 78 | 78 | ||
| 79 | +# Please use -DANDROID_PLATFORM=android-27 if you want to use Android NNAPI | ||
| 80 | + | ||
| 79 | # make VERBOSE=1 -j4 | 81 | # make VERBOSE=1 -j4 |
| 80 | make -j4 | 82 | make -j4 |
| 81 | make install/strip | 83 | make install/strip |
| 82 | cp -fv $onnxruntime_version/jni/arm64-v8a/libonnxruntime.so install/lib | 84 | cp -fv $onnxruntime_version/jni/arm64-v8a/libonnxruntime.so install/lib |
| 83 | rm -rf install/lib/pkgconfig | 85 | rm -rf install/lib/pkgconfig |
| 86 | + | ||
| 87 | +# To run the generated binaries on Android, please use the following steps. | ||
| 88 | +# | ||
| 89 | +# | ||
| 90 | +# 1. Copy sherpa-onnx and its dependencies to Android | ||
| 91 | +# | ||
| 92 | +# cd build-android-arm64-v8a/install/lib | ||
| 93 | +# adb push ./lib*.so /data/local/tmp | ||
| 94 | +# cd ../bin | ||
| 95 | +# adb push ./sherpa-onnx /data/local/tmp | ||
| 96 | +# | ||
| 97 | +# 2. Login into Android | ||
| 98 | +# | ||
| 99 | +# adb shell | ||
| 100 | +# cd /data/local/tmp | ||
| 101 | +# ./sherpa-onnx | ||
| 102 | +# | ||
| 103 | +# which shows the following error log: | ||
| 104 | +# | ||
| 105 | +# CANNOT LINK EXECUTABLE "./sherpa-onnx": library "libsherpa-onnx-core.so" not found: needed by main executable | ||
| 106 | +# | ||
| 107 | +# Please run: | ||
| 108 | +# | ||
| 109 | +# export LD_LIBRARY_PATH=$PWD:$LD_LIBRARY_PATH | ||
| 110 | +# | ||
| 111 | +# and then you can run: | ||
| 112 | +# | ||
| 113 | +# ./sherpa-onnx | ||
| 114 | +# | ||
| 115 | +# It should show the help message of sherpa-onnx. | ||
| 116 | +# | ||
| 117 | +# Please use the above approach to copy model files to your phone. |
| @@ -22,6 +22,8 @@ Provider StringToProvider(std::string s) { | @@ -22,6 +22,8 @@ Provider StringToProvider(std::string s) { | ||
| 22 | return Provider::kCoreML; | 22 | return Provider::kCoreML; |
| 23 | } else if (s == "xnnpack") { | 23 | } else if (s == "xnnpack") { |
| 24 | return Provider::kXnnpack; | 24 | return Provider::kXnnpack; |
| 25 | + } else if (s == "nnapi") { | ||
| 26 | + return Provider::kNNAPI; | ||
| 25 | } else { | 27 | } else { |
| 26 | SHERPA_ONNX_LOGE("Unsupported string: %s. Fallback to cpu", s.c_str()); | 28 | SHERPA_ONNX_LOGE("Unsupported string: %s. Fallback to cpu", s.c_str()); |
| 27 | return Provider::kCPU; | 29 | return Provider::kCPU; |
| @@ -17,6 +17,7 @@ enum class Provider { | @@ -17,6 +17,7 @@ enum class Provider { | ||
| 17 | kCUDA = 1, // CUDAExecutionProvider | 17 | kCUDA = 1, // CUDAExecutionProvider |
| 18 | kCoreML = 2, // CoreMLExecutionProvider | 18 | kCoreML = 2, // CoreMLExecutionProvider |
| 19 | kXnnpack = 3, // XnnpackExecutionProvider | 19 | kXnnpack = 3, // XnnpackExecutionProvider |
| 20 | + kNNAPI = 4, // NnapiExecutionProvider | ||
| 20 | }; | 21 | }; |
| 21 | 22 | ||
| 22 | /** | 23 | /** |
| @@ -15,6 +15,10 @@ | @@ -15,6 +15,10 @@ | ||
| 15 | #include "coreml_provider_factory.h" // NOLINT | 15 | #include "coreml_provider_factory.h" // NOLINT |
| 16 | #endif | 16 | #endif |
| 17 | 17 | ||
| 18 | +#if __ANDROID_API__ >= 27 | ||
| 19 | +#include "nnapi_provider_factory.h" | ||
| 20 | +#endif | ||
| 21 | + | ||
| 18 | namespace sherpa_onnx { | 22 | namespace sherpa_onnx { |
| 19 | 23 | ||
| 20 | static Ort::SessionOptions GetSessionOptionsImpl(int32_t num_threads, | 24 | static Ort::SessionOptions GetSessionOptionsImpl(int32_t num_threads, |
| @@ -77,6 +81,38 @@ static Ort::SessionOptions GetSessionOptionsImpl(int32_t num_threads, | @@ -77,6 +81,38 @@ static Ort::SessionOptions GetSessionOptionsImpl(int32_t num_threads, | ||
| 77 | #endif | 81 | #endif |
| 78 | break; | 82 | break; |
| 79 | } | 83 | } |
| 84 | + case Provider::kNNAPI: { | ||
| 85 | +#if __ANDROID_API__ >= 27 | ||
| 86 | + SHERPA_ONNX_LOGE("Current API level %d ", (int32_t)__ANDROID_API__); | ||
| 87 | + | ||
| 88 | + // Please see | ||
| 89 | + // https://onnxruntime.ai/docs/execution-providers/NNAPI-ExecutionProvider.html#usage | ||
| 90 | + // to enable different flags | ||
| 91 | + uint32_t nnapi_flags = 0; | ||
| 92 | + // nnapi_flags |= NNAPI_FLAG_USE_FP16; | ||
| 93 | + // nnapi_flags |= NNAPI_FLAG_CPU_DISABLED; | ||
| 94 | + OrtStatus *status = OrtSessionOptionsAppendExecutionProvider_Nnapi( | ||
| 95 | + sess_opts, nnapi_flags); | ||
| 96 | + | ||
| 97 | + if (status) { | ||
| 98 | + const auto &api = Ort::GetApi(); | ||
| 99 | + const char *msg = api.GetErrorMessage(status); | ||
| 100 | + SHERPA_ONNX_LOGE( | ||
| 101 | + "Failed to enable NNAPI: %s. Available providers: %s. Fallback to " | ||
| 102 | + "cpu", | ||
| 103 | + msg, os.str().c_str()); | ||
| 104 | + api.ReleaseStatus(status); | ||
| 105 | + } else { | ||
| 106 | + SHERPA_ONNX_LOGE("Use nnapi"); | ||
| 107 | + } | ||
| 108 | +#else | ||
| 109 | + SHERPA_ONNX_LOGE( | ||
| 110 | + "Android NNAPI requires API level >= 27. Current API level %d " | ||
| 111 | + "Fallback to cpu!", | ||
| 112 | + (int32_t)__ANDROID_API__); | ||
| 113 | +#endif | ||
| 114 | + break; | ||
| 115 | + } | ||
| 80 | } | 116 | } |
| 81 | 117 | ||
| 82 | return sess_opts; | 118 | return sess_opts; |
-
请 注册 或 登录 后发表评论