xuning

解决新的问题,成功接入ncnn库以及实现opencv倒置画面

@@ -33,7 +33,7 @@ import io.livekit.android.room.track.CameraPosition @@ -33,7 +33,7 @@ import io.livekit.android.room.track.CameraPosition
33 import io.livekit.android.room.track.LocalVideoTrack 33 import io.livekit.android.room.track.LocalVideoTrack
34 import io.livekit.android.room.track.LocalVideoTrackOptions 34 import io.livekit.android.room.track.LocalVideoTrackOptions
35 import io.livekit.android.room.track.video.CameraCapturerUtils 35 import io.livekit.android.room.track.video.CameraCapturerUtils
36 -import io.livekit.android.track.processing.video.RVMNcnn 36 +import io.livekit.android.track.processing.video.OpencvVideoProcessor
37 import io.livekit.android.util.LoggingLevel 37 import io.livekit.android.util.LoggingLevel
38 import kotlinx.coroutines.Dispatchers 38 import kotlinx.coroutines.Dispatchers
39 import kotlinx.coroutines.asExecutor 39 import kotlinx.coroutines.asExecutor
@@ -42,14 +42,12 @@ import livekit.org.webrtc.EglBase @@ -42,14 +42,12 @@ import livekit.org.webrtc.EglBase
42 42
43 @OptIn(ExperimentalCamera2Interop::class) 43 @OptIn(ExperimentalCamera2Interop::class)
44 class MainViewModel(application: Application) : AndroidViewModel(application) { 44 class MainViewModel(application: Application) : AndroidViewModel(application) {
45 - val eglBase = EglBase.create()  
46 - private val processor = RVMNcnn(eglBase) 45 +
47 init { 46 init {
48 LiveKit.loggingLevel = LoggingLevel.INFO 47 LiveKit.loggingLevel = LoggingLevel.INFO
49 -  
50 } 48 }
51 49
52 - 50 + val eglBase = EglBase.create()
53 val room = LiveKit.create( 51 val room = LiveKit.create(
54 application, 52 application,
55 overrides = LiveKitOverrides( 53 overrides = LiveKitOverrides(
@@ -57,25 +55,14 @@ class MainViewModel(application: Application) : AndroidViewModel(application) { @@ -57,25 +55,14 @@ class MainViewModel(application: Application) : AndroidViewModel(application) {
57 ), 55 ),
58 ) 56 )
59 57
60 - private val virtualBackground = (AppCompatResources.getDrawable(application, R.drawable.background) as BitmapDrawable).bitmap  
61 -  
62 - private var blur = 16f  
63 - 58 + private val processor = OpencvVideoProcessor()
64 59
65 private var cameraProvider: CameraCapturerUtils.CameraProvider? = null 60 private var cameraProvider: CameraCapturerUtils.CameraProvider? = null
66 61
67 - private var imageAnalysis = ImageAnalysis.Builder()  
68 - .setResolutionSelector(  
69 - ResolutionSelector.Builder()  
70 - // LocalVideoTrack has default aspect ratio 16:9 VideoPreset169.H720  
71 - // ImageAnalysis of CameraX has default aspect ratio 4:3  
72 - .setAspectRatioStrategy(AspectRatioStrategy.RATIO_16_9_FALLBACK_AUTO_STRATEGY)  
73 - .build(),  
74 - )  
75 - .build() 62 +
76 63
77 init { 64 init {
78 - CameraXHelper.createCameraProvider(ProcessLifecycleOwner.get(), arrayOf(imageAnalysis)).let { 65 + CameraXHelper.createCameraProvider(ProcessLifecycleOwner.get(), arrayOf()).let {
79 if (it.isSupported(application)) { 66 if (it.isSupported(application)) {
80 CameraCapturerUtils.registerCameraProvider(it) 67 CameraCapturerUtils.registerCameraProvider(it)
81 cameraProvider = it 68 cameraProvider = it
@@ -99,46 +86,28 @@ class MainViewModel(application: Application) : AndroidViewModel(application) { @@ -99,46 +86,28 @@ class MainViewModel(application: Application) : AndroidViewModel(application) {
99 super.onCleared() 86 super.onCleared()
100 track.value?.stopCapture() 87 track.value?.stopCapture()
101 room.release() 88 room.release()
102 - processor.dispose() 89 +
103 cameraProvider?.let { 90 cameraProvider?.let {
104 CameraCapturerUtils.unregisterCameraProvider(it) 91 CameraCapturerUtils.unregisterCameraProvider(it)
105 } 92 }
106 } 93 }
107 94
108 fun toggleProcessor(): Boolean { 95 fun toggleProcessor(): Boolean {
109 - val newState = !processor.enabled  
110 - processor.enabled = newState  
111 - return newState 96 + // OpencvVideoProcessor 无开关,返回 true 表示占位
  97 + return true
112 } 98 }
113 99
114 fun decreaseBlur() { 100 fun decreaseBlur() {
115 - // RVMNcnn 不支持 blur 调整;保留方法以兼容示例 UI,改为无操作日志  
116 - blur = maxOf(0f, blur - 5)  
117 - android.util.Log.e("MainViewModel", "RVMNcnn: decreaseBlur noop, current blur=$blur, enabled=${processor.enabled}") 101 + // 无操作:OpencvVideoProcessor 不支持模糊
118 } 102 }
119 103
120 fun increaseBlur() { 104 fun increaseBlur() {
121 - // RVMNcnn 不支持 blur 调整;保留方法以兼容示例 UI,改为无操作日志  
122 - blur = minOf(50f, blur + 5)  
123 - android.util.Log.e("MainViewModel", "RVMNcnn: increaseBlur noop, current blur=$blur, enabled=${processor.enabled}") 105 + // 无操作:OpencvVideoProcessor 不支持模糊
124 } 106 }
125 107
126 fun toggleVirtualBackground(): Boolean { 108 fun toggleVirtualBackground(): Boolean {
127 - // 使用 RVMNcnn 的背景图接口  
128 - // 返回 true 表示设置了背景,false 表示清除  
129 - val videoTrack = track.value  
130 - return if (videoTrack != null) {  
131 - // 简单切换:如果当前未设置则设置,已设置则清除  
132 - // 这里无法直接读取 native 状态,使用布尔切换可根据 UI 状态驱动  
133 - val set = processor.updateBackgroundImage(virtualBackground)  
134 - if (!set) {  
135 - processor.updateBackgroundImage(null)  
136 - }  
137 - set  
138 - } else {  
139 - // 未开始采集时可直接设置  
140 - processor.updateBackgroundImage(virtualBackground)  
141 - } 109 + // 无操作:OpencvVideoProcessor 不支持背景图,返回 false
  110 + return false
142 } 111 }
143 112
144 fun flipCamera() { 113 fun flipCamera() {
  1 +package io.livekit.android.track.processing.video;
  2 +
  3 +import androidx.annotation.Nullable;
  4 +
  5 +import io.livekit.android.room.track.video.NoDropVideoProcessor;
  6 +import java.nio.ByteBuffer;
  7 +import livekit.org.webrtc.JavaI420Buffer;
  8 +import livekit.org.webrtc.VideoFrame;
  9 +import livekit.org.webrtc.VideoSink;
  10 +import livekit.org.webrtc.VideoFrame.I420Buffer;
  11 +
  12 +/**
  13 + * OpencvVideoProcessor
  14 + * - Extends NoDropVideoProcessor
  15 + * - Delegates all pixel processing to native (cpp) via processI420ToI420
  16 + * - Java avoids OpenCV, only handles buffers and frame plumbing
  17 + * - Output frame rotation is unified to 180
  18 + */
  19 +public class OpencvVideoProcessor extends NoDropVideoProcessor {
  20 +
  21 + @Nullable
  22 + private VideoSink targetSink;
  23 +
  24 + // Reusable direct buffers for output I420
  25 + private ByteBuffer outY;
  26 + private ByteBuffer outU;
  27 + private ByteBuffer outV;
  28 + private int outYCapacity;
  29 + private int outUCapacity;
  30 + private int outVCapacity;
  31 +
  32 + static {
  33 + try {
  34 + System.loadLibrary("rvmncnn");
  35 + android.util.Log.d("OpencvVideoProcessor", "System.loadLibrary(rvmncnn) success");
  36 + } catch (Throwable t) {
  37 + android.util.Log.e("OpencvVideoProcessor", "System.loadLibrary(rvmncnn) failed", t);
  38 + }
  39 + }
  40 +
  41 + // Core native that processes I420 in/out fully in cpp
  42 + private static native boolean processI420ToI420(
  43 + ByteBuffer y, int yStride,
  44 + ByteBuffer u, int uStride,
  45 + ByteBuffer v, int vStride,
  46 + int width, int height, int rotation,
  47 + ByteBuffer outY, int outYStride,
  48 + ByteBuffer outU, int outUStride,
  49 + ByteBuffer outV, int outVStride
  50 + );
  51 +
  52 + @Override
  53 + public void setSink(@Nullable VideoSink sink) {
  54 + this.targetSink = sink;
  55 + }
  56 +
  57 + @Override
  58 + public void onCapturerStarted(boolean started) {
  59 + // No GL or Surface path here.
  60 + }
  61 +
  62 + @Override
  63 + public void onCapturerStopped() {
  64 + // No-op
  65 + }
  66 +
  67 + @Override
  68 + public void onFrameCaptured(VideoFrame frame) {
  69 + final VideoSink sink = targetSink;
  70 + if (sink == null) return;
  71 +
  72 + I420Buffer i420 = frame.getBuffer().toI420();
  73 + try {
  74 + final int width = i420.getWidth();
  75 + final int height = i420.getHeight();
  76 +
  77 + final ByteBuffer y = i420.getDataY();
  78 + final ByteBuffer u = i420.getDataU();
  79 + final ByteBuffer v = i420.getDataV();
  80 + final int yStride = i420.getStrideY();
  81 + final int uStride = i420.getStrideU();
  82 + final int vStride = i420.getStrideV();
  83 +
  84 + // Ensure output buffers capacity (match input strides)
  85 + final int needY = yStride * height;
  86 + final int needU = uStride * (height / 2);
  87 + final int needV = vStride * (height / 2);
  88 + ensureOutBuffers(needY, needU, needV);
  89 +
  90 + // JNI: cpp processes fully and writes to out buffers
  91 + final boolean ok = processI420ToI420(
  92 + y, yStride,
  93 + u, uStride,
  94 + v, vStride,
  95 + width, height, frame.getRotation(),
  96 + outY, yStride,
  97 + outU, uStride,
  98 + outV, vStride
  99 + );
  100 +
  101 + if (!ok) {
  102 + // Fallback passthrough
  103 + sink.onFrame(frame);
  104 + return;
  105 + }
  106 +
  107 + // Copy processed planes into a freshly-allocated WebRTC buffer to avoid lifecycle issues
  108 + outY.position(0);
  109 + outU.position(0);
  110 + outV.position(0);
  111 +
  112 + JavaI420Buffer outBuf = JavaI420Buffer.allocate(width, height);
  113 + try {
  114 + // Copy Y
  115 + ByteBuffer dstY = outBuf.getDataY();
  116 + int dstYStride = outBuf.getStrideY();
  117 + for (int r = 0; r < height; r++) {
  118 + int srcPos = r * yStride;
  119 + int dstPos = r * dstYStride;
  120 + // copy min(width, stride) bytes per row; strides are expected to be >= width
  121 + int copy = Math.min(width, yStride);
  122 + byte[] row = new byte[copy];
  123 + outY.position(srcPos);
  124 + outY.get(row, 0, copy);
  125 + dstY.position(dstPos);
  126 + dstY.put(row, 0, copy);
  127 + }
  128 +
  129 + // Copy U
  130 + int h2 = height / 2;
  131 + int w2 = width / 2;
  132 + ByteBuffer dstU = outBuf.getDataU();
  133 + int dstUStride = outBuf.getStrideU();
  134 + for (int r = 0; r < h2; r++) {
  135 + int srcPos = r * uStride;
  136 + int dstPos = r * dstUStride;
  137 + int copy = Math.min(w2, uStride);
  138 + byte[] row = new byte[copy];
  139 + outU.position(srcPos);
  140 + outU.get(row, 0, copy);
  141 + dstU.position(dstPos);
  142 + dstU.put(row, 0, copy);
  143 + }
  144 +
  145 + // Copy V
  146 + ByteBuffer dstV = outBuf.getDataV();
  147 + int dstVStride = outBuf.getStrideV();
  148 + for (int r = 0; r < h2; r++) {
  149 + int srcPos = r * vStride;
  150 + int dstPos = r * dstVStride;
  151 + int copy = Math.min(w2, vStride);
  152 + byte[] row = new byte[copy];
  153 + outV.position(srcPos);
  154 + outV.get(row, 0, copy);
  155 + dstV.position(dstPos);
  156 + dstV.put(row, 0, copy);
  157 + }
  158 +
  159 + // Unify rotation to 180 by metadata
  160 + VideoFrame outFrame = new VideoFrame(outBuf, 180, frame.getTimestampNs());
  161 + sink.onFrame(outFrame);
  162 + // Do not release outFrame here; sink owns it
  163 + } finally {
  164 + // If sink did not retain the frame (unlikely), releasing buffer is safe on GC.
  165 + // We intentionally do not call outBuf.release() here to avoid double-release.
  166 + }
  167 + } finally {
  168 + i420.release();
  169 + }
  170 + }
  171 +
  172 + private void ensureOutBuffers(int needY, int needU, int needV) {
  173 + if (outY == null || outYCapacity < needY) {
  174 + outYCapacity = roundUp(needY, 64);
  175 + outY = ByteBuffer.allocateDirect(outYCapacity);
  176 + }
  177 + if (outU == null || outUCapacity < needU) {
  178 + outUCapacity = roundUp(needU, 64);
  179 + outU = ByteBuffer.allocateDirect(outUCapacity);
  180 + }
  181 + if (outV == null || outVCapacity < needV) {
  182 + outVCapacity = roundUp(needV, 64);
  183 + outV = ByteBuffer.allocateDirect(outVCapacity);
  184 + }
  185 + outY.limit(needY).position(0);
  186 + outU.limit(needU).position(0);
  187 + outV.limit(needV).position(0);
  188 + }
  189 +
  190 + private static int roundUp(int x, int a) {
  191 + return ((x + a - 1) / a) * a;
  192 + }
  193 +}
1 -package io.livekit.android.track.processing.video;  
2 -  
3 -import android.content.res.AssetManager;  
4 -import android.graphics.Bitmap;  
5 -import android.view.Surface;  
6 -  
7 -import io.livekit.android.room.track.video.NoDropVideoProcessor;  
8 -import livekit.org.webrtc.EglBase;  
9 -import livekit.org.webrtc.SurfaceTextureHelper;  
10 -import livekit.org.webrtc.VideoFrame;  
11 -import livekit.org.webrtc.VideoSink;  
12 -  
13 -/**  
14 - * RVMNcnn processor that delegates all pixel processing to native (cpp) and  
15 - * renders processed frames directly into a Surface provided by SurfaceTextureHelper.  
16 - * Java does not perform any image processing.  
17 - */  
18 -public class RVMNcnn extends NoDropVideoProcessor {  
19 -  
20 - // Native JNI hooks  
21 - public native boolean loadModel(AssetManager mgr, int modelid, int sizeid, int intrainterid, int postprocid, int cpugpu);  
22 - public native boolean openCamera(int facing);  
23 - public native boolean closeCamera();  
24 - public native boolean setOutputWindow(Surface surface);  
25 - public native boolean setBackgroundImage(Bitmap bitmap);  
26 - public native boolean processFrame();  
27 -  
28 - static {  
29 - try {  
30 - System.loadLibrary("rvmncnn");  
31 - android.util.Log.d("RVMNcnn", "System.loadLibrary(rvmncnn) success");  
32 - } catch (Throwable t) {  
33 - android.util.Log.e("RVMNcnn", "System.loadLibrary(rvmncnn) failed", t);  
34 - }  
35 - }  
36 -  
37 - private final EglBase eglBase;  
38 - private final SurfaceTextureHelper surfaceTextureHelper;  
39 - private final Surface outputSurface;  
40 -  
41 - private VideoSink targetSink;  
42 -  
43 - /**  
44 - * Controls whether the native virtual background is enabled.  
45 - * When enabled, native renders to outputSurface and Java forwards those frames.  
46 - * When disabled, incoming frames are passed through to targetSink.  
47 - */  
48 - public boolean enabled = true;  
49 -  
50 - /**  
51 - * Facing: 0 back, 1 front. Used when starting native camera pipeline.  
52 - */  
53 - private int facing = 1;  
54 -  
55 - public RVMNcnn(EglBase eglBase) {  
56 - this.eglBase = eglBase;  
57 - this.surfaceTextureHelper = SurfaceTextureHelper.create("RVMNcnn", eglBase.getEglBaseContext());  
58 - this.outputSurface = new Surface(surfaceTextureHelper.getSurfaceTexture());  
59 - }  
60 -  
61 - @Override  
62 - public void onCapturerStarted(boolean started) {  
63 - if (started) {  
64 - surfaceTextureHelper.setTextureSize(640, 640);  
65 - android.util.Log.d("RVMNcnn", "onCapturerStarted: setOutputWindow + openCamera");  
66 - // Listen to frames produced from the output surface (rendered by native),  
67 - // and forward to target sink.  
68 - surfaceTextureHelper.stopListening();  
69 - surfaceTextureHelper.startListening(frame -> {  
70 - VideoSink sink = targetSink;  
71 - if (sink != null) {  
72 - sink.onFrame(frame);  
73 - }  
74 - });  
75 -  
76 - if (enabled) {  
77 - // Direct native to render into our SurfaceTextureHelper's surface  
78 - setOutputWindow(outputSurface);  
79 - // Start native camera pipeline (cpp will process and render)  
80 - openCamera(facing);  
81 - }  
82 - }  
83 - }  
84 -  
85 - @Override  
86 - public void onCapturerStopped() {  
87 - // Stop Java-side listening and shutdown native pipeline  
88 - surfaceTextureHelper.stopListening();  
89 - closeCamera();  
90 - }  
91 -  
92 - @Override  
93 - public void onFrameCaptured(VideoFrame frame) {  
94 - // If disabled, pass-through original frames.  
95 - if (!enabled) {  
96 - VideoSink sink = targetSink;  
97 - if (sink != null) {  
98 - sink.onFrame(frame);  
99 - }  
100 - return;  
101 - }  
102 - // Enabled: Java does not process pixels nor forward original frames.  
103 - // Native renders processed frames into outputSurface, which we already forward above.  
104 - // Drop the incoming frame here.  
105 - }  
106 -  
107 - @Override  
108 - public void setSink(VideoSink sink) {  
109 - this.targetSink = sink;  
110 - }  
111 -  
112 - /**  
113 - * Update facing and restart native pipeline if needed.  
114 - * 0 = back, 1 = front.  
115 - */  
116 - public void setFacing(int facing) {  
117 - this.facing = facing == 0 ? 0 : 1;  
118 - if (enabled) {  
119 - // If running, restart native camera with new facing  
120 - closeCamera();  
121 - openCamera(this.facing);  
122 - }  
123 - }  
124 -  
125 - /**  
126 - * Update the background image used by native processor.  
127 - * Pass null to clear.  
128 - */  
129 - public boolean updateBackgroundImage(Bitmap bitmap) {  
130 - return setBackgroundImage(bitmap);  
131 - }  
132 -  
133 - /**  
134 - * Call when disposing the processor.  
135 - */  
136 - public void dispose() {  
137 - surfaceTextureHelper.stopListening();  
138 - closeCamera();  
139 - outputSurface.release();  
140 - surfaceTextureHelper.dispose();  
141 - }  
142 -}  
@@ -23,6 +23,7 @@ @@ -23,6 +23,7 @@
23 23
24 #include <string> 24 #include <string>
25 #include <vector> 25 #include <vector>
  26 +#include <cstring>
26 27
27 #include <platform.h> 28 #include <platform.h>
28 #include <benchmark.h> 29 #include <benchmark.h>
@@ -446,4 +447,95 @@ JNIEXPORT jboolean JNICALL Java_io_livekit_android_track_processing_video_RVMNcn @@ -446,4 +447,95 @@ JNIEXPORT jboolean JNICALL Java_io_livekit_android_track_processing_video_RVMNcn
446 447
447 return JNI_FALSE; 448 return JNI_FALSE;
448 } 449 }
  450 +
  451 +// process I420 in/out without Java-side OpenCV
  452 +// signature: Java_io_livekit_android_track_processing_video_OpencvVideoProcessor_processI420ToI420
  453 +JNIEXPORT jboolean JNICALL Java_io_livekit_android_track_processing_video_OpencvVideoProcessor_processI420ToI420(
  454 + JNIEnv* env, jclass,
  455 + jobject yBuf, jint yStride,
  456 + jobject uBuf, jint uStride,
  457 + jobject vBuf, jint vStride,
  458 + jint width, jint height, jint rotation,
  459 + jobject outYBuf, jint outYStride,
  460 + jobject outUBuf, jint outUStride,
  461 + jobject outVBuf, jint outVStride)
  462 +{
  463 + if (!yBuf || !uBuf || !vBuf || !outYBuf || !outUBuf || !outVBuf || width <= 0 || height <= 0)
  464 + return JNI_FALSE;
  465 +
  466 + uint8_t* yPtr = (uint8_t*)env->GetDirectBufferAddress(yBuf);
  467 + uint8_t* uPtr = (uint8_t*)env->GetDirectBufferAddress(uBuf);
  468 + uint8_t* vPtr = (uint8_t*)env->GetDirectBufferAddress(vBuf);
  469 + uint8_t* outYPtr = (uint8_t*)env->GetDirectBufferAddress(outYBuf);
  470 + uint8_t* outUPtr = (uint8_t*)env->GetDirectBufferAddress(outUBuf);
  471 + uint8_t* outVPtr = (uint8_t*)env->GetDirectBufferAddress(outVBuf);
  472 +
  473 + if (!yPtr || !uPtr || !vPtr || !outYPtr || !outUPtr || !outVPtr)
  474 + return JNI_FALSE;
  475 +
  476 + // Pack input planes with stride into a contiguous I420 buffer
  477 + const int yH = height;
  478 + const int uvH = height / 2;
  479 + const int yW = width;
  480 + const int uvW = width / 2;
  481 +
  482 + const int ySize = yW * yH;
  483 + const int uSize = uvW * uvH;
  484 + const int vSize = uvW * uvH;
  485 +
  486 + std::vector<uint8_t> i420_in(ySize + uSize + vSize);
  487 + uint8_t* inY = i420_in.data();
  488 + uint8_t* inU = inY + ySize;
  489 + uint8_t* inV = inU + uSize;
  490 +
  491 + for (int r = 0; r < yH; ++r) {
  492 + memcpy(inY + r * yW, yPtr + r * yStride, yW);
  493 + }
  494 + for (int r = 0; r < uvH; ++r) {
  495 + memcpy(inU + r * uvW, uPtr + r * uStride, uvW);
  496 + memcpy(inV + r * uvW, vPtr + r * vStride, uvW);
  497 + }
  498 +
  499 + // Wrap as a single-channel Mat (H + H/2) x W and convert to BGR
  500 + cv::Mat i420_mat(height + height / 2, width, CV_8UC1, i420_in.data());
  501 + cv::Mat bgr;
  502 + cv::cvtColor(i420_mat, bgr, cv::COLOR_YUV2BGR_I420);
  503 +
  504 + // Process with RVM
  505 + {
  506 + ncnn::MutexLockGuard g(lock);
  507 + if (g_rvm) {
  508 + cv::Mat fgr, pha, seg;
  509 + g_rvm->detect(bgr, g_feats, fgr, pha, seg);
  510 + g_rvm->draw(bgr, fgr, pha, seg);
  511 + } else {
  512 + draw_unsupported(bgr);
  513 + }
  514 + }
  515 +
  516 + // Convert back to I420
  517 + cv::Mat i420_out;
  518 + cv::cvtColor(bgr, i420_out, cv::COLOR_BGR2YUV_I420);
  519 + if (i420_out.empty() || i420_out.cols != width || i420_out.rows != height + height / 2)
  520 + return JNI_FALSE;
  521 +
  522 + const uint8_t* outBase = i420_out.ptr<uint8_t>(0);
  523 + const uint8_t* srcY = outBase;
  524 + const uint8_t* srcU = srcY + ySize;
  525 + const uint8_t* srcV = srcU + uSize;
  526 +
  527 + // Write back to output planes honoring strides
  528 + for (int r = 0; r < yH; ++r) {
  529 + memcpy(outYPtr + r * outYStride, srcY + r * yW, yW);
  530 + }
  531 + for (int r = 0; r < uvH; ++r) {
  532 + memcpy(outUPtr + r * outUStride, srcU + r * uvW, uvW);
  533 + memcpy(outVPtr + r * outVStride, srcV + r * uvW, uvW);
  534 + }
  535 +
  536 + // We ignore input 'rotation' here and unify to 180 at Java metadata level
  537 + (void)rotation;
  538 +
  539 + return JNI_TRUE;
  540 +}
449 } 541 }