xuning

实现人像检测

... ... @@ -55,7 +55,16 @@ class MainViewModel(application: Application) : AndroidViewModel(application) {
),
)
private val processor = OpencvVideoProcessor()
private val processor = OpencvVideoProcessor().apply {
// Load model before processing to avoid unsupported overlay
// modelId: 0=mobilenetv3, sizeId: 6=640, intraInter:1, postproc:0(fast), cpuGpu:0(CPU)
try {
val ok = loadModel(application.assets, 0, 0, 1, 0, 0)
android.util.Log.d("MainViewModel", "OpencvVideoProcessor.loadModel result=$ok")
} catch (t: Throwable) {
android.util.Log.e("MainViewModel", "loadModel failed", t)
}
}
private var cameraProvider: CameraCapturerUtils.CameraProvider? = null
... ...
... ... @@ -38,6 +38,9 @@ public class OpencvVideoProcessor extends NoDropVideoProcessor {
}
}
// Load model before processing to avoid unsupported overlay
public native boolean loadModel(android.content.res.AssetManager mgr, int modelId, int sizeId, int intraInterId, int postprocId, int cpuGpu);
// Core native that processes I420 in/out fully in cpp
private static native boolean processI420ToI420(
ByteBuffer y, int yStride,
... ...
... ... @@ -287,6 +287,93 @@ JNIEXPORT jboolean JNICALL Java_io_livekit_android_track_processing_video_RVMNcn
return JNI_TRUE;
}
// duplicate loadModel for OpencvVideoProcessor
JNIEXPORT jboolean JNICALL Java_io_livekit_android_track_processing_video_OpencvVideoProcessor_loadModel(JNIEnv* env, jobject thiz, jobject assetManager, jint modelid, jint sizeid, jint intrainterid, jint postprocid, jint cpugpu)
{
if (modelid < 0 || modelid > 1 || sizeid < 0 || sizeid > 6 || intrainterid < 0 || intrainterid > 1 || postprocid < 0 || postprocid > 2 || cpugpu < 0 || cpugpu > 2)
{
return JNI_FALSE;
}
AAssetManager* mgr = AAssetManager_fromJava(env, assetManager);
__android_log_print(ANDROID_LOG_DEBUG, "ncnn", "loadModel %p (OpencvVideoProcessor)", mgr);
const char* modeltypes[2] =
{
"mobilenetv3",
"resnet50"
};
const int sizetypes[7] =
{
256,
320,
384,
448,
512,
576,
640
};
std::string parampath = std::string("rvm_") + modeltypes[(int)modelid] + ".ncnn.param";
std::string modelpath = std::string("rvm_") + modeltypes[(int)modelid] + ".ncnn.bin";
bool use_gpu = (int)cpugpu == 1;
bool use_turnip = (int)cpugpu == 2;
{
ncnn::MutexLockGuard g(lock);
{
// reset inter feats
g_feats.r1.release();
g_feats.r2.release();
g_feats.r3.release();
g_feats.r4.release();
static int old_modelid = 0;
static int old_cpugpu = 0;
if (modelid != old_modelid || cpugpu != old_cpugpu)
{
delete g_rvm;
g_rvm = 0;
}
old_modelid = modelid;
old_cpugpu = cpugpu;
ncnn::destroy_gpu_instance();
if (use_turnip)
{
ncnn::create_gpu_instance("libvulkan_freedreno.so");
}
else if (use_gpu)
{
ncnn::create_gpu_instance();
}
if (!g_rvm)
{
g_rvm = new RVM;
g_rvm->load(mgr, parampath.c_str(), modelpath.c_str(), use_gpu || use_turnip);
}
g_rvm->set_model_type((int)modelid);
g_rvm->set_target_size(sizetypes[(int)sizeid]);
g_rvm->set_intra_inter((int)intrainterid);
if (postprocid == 0)
g_rvm->set_postproc_mode(false, true, false);
if (postprocid == 1)
g_rvm->set_postproc_mode(false, false, true);
if (postprocid == 2)
g_rvm->set_postproc_mode(true, false, false);
}
}
return JNI_TRUE;
}
// public native boolean openCamera(int facing);
JNIEXPORT jboolean JNICALL Java_io_livekit_android_track_processing_video_RVMNcnn_openCamera(JNIEnv* env, jobject thiz, jint facing)
{
... ... @@ -498,24 +585,41 @@ JNIEXPORT jboolean JNICALL Java_io_livekit_android_track_processing_video_Opencv
// Wrap as a single-channel Mat (H + H/2) x W and convert to BGR
cv::Mat i420_mat(height + height / 2, width, CV_8UC1, i420_in.data());
cv::Mat bgr;
cv::cvtColor(i420_mat, bgr, cv::COLOR_YUV2BGR_I420);
cv::Mat rgb;
cv::cvtColor(i420_mat, rgb, cv::COLOR_YUV2RGB_I420);
// Rotate to upright orientation for the model
if (rotation == 90) {
cv::rotate(rgb, rgb, cv::ROTATE_90_CLOCKWISE);
} else if (rotation == 180) {
cv::rotate(rgb, rgb, cv::ROTATE_180);
} else if (rotation == 270) {
cv::rotate(rgb, rgb, cv::ROTATE_90_COUNTERCLOCKWISE);
}
// Process with RVM
{
ncnn::MutexLockGuard g(lock);
if (g_rvm) {
cv::Mat fgr, pha, seg;
g_rvm->detect(bgr, g_feats, fgr, pha, seg);
g_rvm->draw(bgr, fgr, pha, seg);
g_rvm->detect(rgb, g_feats, fgr, pha, seg);
g_rvm->draw(rgb, fgr, pha, seg);
} else {
draw_unsupported(bgr);
draw_unsupported(rgb);
}
}
// Rotate back to original orientation before returning to I420
if (rotation == 90) {
cv::rotate(rgb, rgb, cv::ROTATE_90_COUNTERCLOCKWISE);
} else if (rotation == 180) {
cv::rotate(rgb, rgb, cv::ROTATE_180);
} else if (rotation == 270) {
cv::rotate(rgb, rgb, cv::ROTATE_90_CLOCKWISE);
}
// Convert back to I420
cv::Mat i420_out;
cv::cvtColor(bgr, i420_out, cv::COLOR_BGR2YUV_I420);
cv::cvtColor(rgb, i420_out, cv::COLOR_RGB2YUV_I420);
if (i420_out.empty() || i420_out.cols != width || i420_out.rows != height + height / 2)
return JNI_FALSE;
... ...