正在显示
4 个修改的文件
包含
172 行增加
和
287 行删除
| @@ -33,7 +33,7 @@ import io.livekit.android.room.track.CameraPosition | @@ -33,7 +33,7 @@ import io.livekit.android.room.track.CameraPosition | ||
| 33 | import io.livekit.android.room.track.LocalVideoTrack | 33 | import io.livekit.android.room.track.LocalVideoTrack |
| 34 | import io.livekit.android.room.track.LocalVideoTrackOptions | 34 | import io.livekit.android.room.track.LocalVideoTrackOptions |
| 35 | import io.livekit.android.room.track.video.CameraCapturerUtils | 35 | import io.livekit.android.room.track.video.CameraCapturerUtils |
| 36 | -import io.livekit.android.track.processing.video.VirtualBackgroundVideoProcessor | 36 | +import io.livekit.android.track.processing.video.RVMNcnn |
| 37 | import io.livekit.android.util.LoggingLevel | 37 | import io.livekit.android.util.LoggingLevel |
| 38 | import kotlinx.coroutines.Dispatchers | 38 | import kotlinx.coroutines.Dispatchers |
| 39 | import kotlinx.coroutines.asExecutor | 39 | import kotlinx.coroutines.asExecutor |
| @@ -45,6 +45,16 @@ class MainViewModel(application: Application) : AndroidViewModel(application) { | @@ -45,6 +45,16 @@ class MainViewModel(application: Application) : AndroidViewModel(application) { | ||
| 45 | 45 | ||
| 46 | init { | 46 | init { |
| 47 | LiveKit.loggingLevel = LoggingLevel.INFO | 47 | LiveKit.loggingLevel = LoggingLevel.INFO |
| 48 | + // 加载 RVM 模型:mobilenetv3,目标尺寸640(sizeid=6),intra/inter=0,postproc=1(fast),CPU(cpugpu=0) | ||
| 49 | + // 如需 GPU,可将 cpugpu=1 或 2(使用 turnip) | ||
| 50 | + processor.loadModel( | ||
| 51 | + application.getAssets(), | ||
| 52 | + /* modelid */ 0, | ||
| 53 | + /* sizeid */ 6, | ||
| 54 | + /* intrainterid */ 0, | ||
| 55 | + /* postprocid */ 1, | ||
| 56 | + /* cpugpu */ 0 | ||
| 57 | + ) | ||
| 48 | } | 58 | } |
| 49 | 59 | ||
| 50 | val eglBase = EglBase.create() | 60 | val eglBase = EglBase.create() |
| @@ -58,10 +68,7 @@ class MainViewModel(application: Application) : AndroidViewModel(application) { | @@ -58,10 +68,7 @@ class MainViewModel(application: Application) : AndroidViewModel(application) { | ||
| 58 | private val virtualBackground = (AppCompatResources.getDrawable(application, R.drawable.background) as BitmapDrawable).bitmap | 68 | private val virtualBackground = (AppCompatResources.getDrawable(application, R.drawable.background) as BitmapDrawable).bitmap |
| 59 | 69 | ||
| 60 | private var blur = 16f | 70 | private var blur = 16f |
| 61 | - private val processor = VirtualBackgroundVideoProcessor(eglBase, Dispatchers.IO, initialBlurRadius = blur).apply { | ||
| 62 | - // 初始状态不设置背景图片,这样blur功能才能正常工作 | ||
| 63 | - // backgroundImage = virtualBackground | ||
| 64 | - } | 71 | + private val processor = RVMNcnn(eglBase) |
| 65 | 72 | ||
| 66 | private var cameraProvider: CameraCapturerUtils.CameraProvider? = null | 73 | private var cameraProvider: CameraCapturerUtils.CameraProvider? = null |
| 67 | 74 | ||
| @@ -74,7 +81,6 @@ class MainViewModel(application: Application) : AndroidViewModel(application) { | @@ -74,7 +81,6 @@ class MainViewModel(application: Application) : AndroidViewModel(application) { | ||
| 74 | .build(), | 81 | .build(), |
| 75 | ) | 82 | ) |
| 76 | .build() | 83 | .build() |
| 77 | - .apply { setAnalyzer(Dispatchers.IO.asExecutor(), processor.imageAnalyzer) } | ||
| 78 | 84 | ||
| 79 | init { | 85 | init { |
| 80 | CameraXHelper.createCameraProvider(ProcessLifecycleOwner.get(), arrayOf(imageAnalysis)).let { | 86 | CameraXHelper.createCameraProvider(ProcessLifecycleOwner.get(), arrayOf(imageAnalysis)).let { |
| @@ -114,24 +120,32 @@ class MainViewModel(application: Application) : AndroidViewModel(application) { | @@ -114,24 +120,32 @@ class MainViewModel(application: Application) : AndroidViewModel(application) { | ||
| 114 | } | 120 | } |
| 115 | 121 | ||
| 116 | fun decreaseBlur() { | 122 | fun decreaseBlur() { |
| 117 | - blur = maxOf(0f, blur - 5) // 确保blur不会小于0 | ||
| 118 | - android.util.Log.e("MainViewModel", "=== DECREASING BLUR TO: $blur, processor enabled: ${processor.enabled} ===") | ||
| 119 | - processor.updateBlurRadius(blur) | 123 | + // RVMNcnn 不支持 blur 调整;保留方法以兼容示例 UI,改为无操作日志 |
| 124 | + blur = maxOf(0f, blur - 5) | ||
| 125 | + android.util.Log.e("MainViewModel", "RVMNcnn: decreaseBlur noop, current blur=$blur, enabled=${processor.enabled}") | ||
| 120 | } | 126 | } |
| 121 | 127 | ||
| 122 | fun increaseBlur() { | 128 | fun increaseBlur() { |
| 123 | - blur = minOf(50f, blur + 5) // 限制最大blur为50,避免过度模糊 | ||
| 124 | - android.util.Log.e("MainViewModel", "=== INCREASING BLUR TO: $blur, processor enabled: ${processor.enabled} ===") | ||
| 125 | - processor.updateBlurRadius(blur) | 129 | + // RVMNcnn 不支持 blur 调整;保留方法以兼容示例 UI,改为无操作日志 |
| 130 | + blur = minOf(50f, blur + 5) | ||
| 131 | + android.util.Log.e("MainViewModel", "RVMNcnn: increaseBlur noop, current blur=$blur, enabled=${processor.enabled}") | ||
| 126 | } | 132 | } |
| 127 | 133 | ||
| 128 | fun toggleVirtualBackground(): Boolean { | 134 | fun toggleVirtualBackground(): Boolean { |
| 129 | - if (processor.backgroundImage != virtualBackground) { | ||
| 130 | - processor.backgroundImage = virtualBackground | ||
| 131 | - return true | 135 | + // 使用 RVMNcnn 的背景图接口 |
| 136 | + // 返回 true 表示设置了背景,false 表示清除 | ||
| 137 | + val videoTrack = track.value | ||
| 138 | + return if (videoTrack != null) { | ||
| 139 | + // 简单切换:如果当前未设置则设置,已设置则清除 | ||
| 140 | + // 这里无法直接读取 native 状态,使用布尔切换可根据 UI 状态驱动 | ||
| 141 | + val set = processor.updateBackgroundImage(virtualBackground) | ||
| 142 | + if (!set) { | ||
| 143 | + processor.updateBackgroundImage(null) | ||
| 144 | + } | ||
| 145 | + set | ||
| 132 | } else { | 146 | } else { |
| 133 | - processor.backgroundImage = null | ||
| 134 | - return false | 147 | + // 未开始采集时可直接设置 |
| 148 | + processor.updateBackgroundImage(virtualBackground) | ||
| 135 | } | 149 | } |
| 136 | } | 150 | } |
| 137 | 151 |
| 1 | -// OpenCVVideoProcessor.kt | ||
| 2 | -package io.livekit.android.track.processing.video | ||
| 3 | - | ||
| 4 | -import android.graphics.Bitmap | ||
| 5 | -import android.view.Surface | ||
| 6 | -import io.livekit.android.room.track.video.NoDropVideoProcessor | ||
| 7 | -import kotlinx.coroutines.CoroutineDispatcher | ||
| 8 | -import kotlinx.coroutines.CoroutineScope | ||
| 9 | -import kotlinx.coroutines.Dispatchers | ||
| 10 | -import kotlinx.coroutines.cancel | ||
| 11 | -import kotlinx.coroutines.channels.BufferOverflow | ||
| 12 | -import kotlinx.coroutines.flow.MutableSharedFlow | ||
| 13 | -import kotlinx.coroutines.launch | ||
| 14 | -import livekit.org.webrtc.EglBase | ||
| 15 | -import livekit.org.webrtc.EglRenderer | ||
| 16 | -import livekit.org.webrtc.GlUtil | ||
| 17 | -import livekit.org.webrtc.SurfaceTextureHelper | ||
| 18 | -import livekit.org.webrtc.VideoFrame | ||
| 19 | -import livekit.org.webrtc.VideoSink | ||
| 20 | -import org.opencv.android.Utils | ||
| 21 | -import org.opencv.core.CvType | ||
| 22 | -import org.opencv.core.Mat | ||
| 23 | -import java.nio.ByteBuffer | ||
| 24 | -import java.nio.ByteOrder | ||
| 25 | -import kotlin.math.roundToInt | ||
| 26 | - | ||
| 27 | -/** | ||
| 28 | - * OpenCV-based video processor using ncnn RVM for real-time video processing. | ||
| 29 | - * Inherits from NoDropVideoProcessor to ensure frames are processed even when not published. | ||
| 30 | - */ | ||
| 31 | -class OpenCVVideoProcessor( | ||
| 32 | - private val eglBase: EglBase, | ||
| 33 | - dispatcher: CoroutineDispatcher = Dispatchers.Default, | ||
| 34 | -) : NoDropVideoProcessor() { | ||
| 35 | - | ||
| 36 | - private var targetSink: VideoSink? = null | ||
| 37 | - private val surfaceTextureHelper = SurfaceTextureHelper.create("OpenCVProcessor", eglBase.eglBaseContext) | ||
| 38 | - private val surface = Surface(surfaceTextureHelper.surfaceTexture) | ||
| 39 | - | ||
| 40 | - private val eglRenderer = EglRenderer(OpenCVVideoProcessor::class.java.simpleName) | ||
| 41 | - .apply { | ||
| 42 | - init(eglBase.eglBaseContext, EglBase.CONFIG_PLAIN, null) | ||
| 43 | - createEglSurface(surface) | ||
| 44 | - } | ||
| 45 | - | ||
| 46 | - private val scope = CoroutineScope(dispatcher) | ||
| 47 | - private val taskFlow = MutableSharedFlow<VideoFrame>( | ||
| 48 | - replay = 0, | ||
| 49 | - extraBufferCapacity = 1, | ||
| 50 | - onBufferOverflow = BufferOverflow.DROP_OLDEST, | ||
| 51 | - ) | ||
| 52 | - | ||
| 53 | - // RVM ncnn instance | ||
| 54 | - private val rvmNcnn = com.tencent.rvmncnn.RVMNcnn() | ||
| 55 | - | ||
| 56 | - private var lastRotation = 0 | ||
| 57 | - private var lastWidth = 0 | ||
| 58 | - private var lastHeight = 0 | ||
| 59 | - | ||
| 60 | - /** | ||
| 61 | - * Enable or disable RVM processing | ||
| 62 | - */ | ||
| 63 | - var enabled: Boolean = true | ||
| 64 | - | ||
| 65 | - /** | ||
| 66 | - * Background image to use for virtual background | ||
| 67 | - * If null, will use default background | ||
| 68 | - */ | ||
| 69 | - var backgroundImage: Bitmap? = null | ||
| 70 | - | ||
| 71 | - init { | ||
| 72 | - // Initialize processing pipeline | ||
| 73 | - scope.launch { | ||
| 74 | - taskFlow.collect { frame -> | ||
| 75 | - processFrame(frame) | ||
| 76 | - frame.release() | ||
| 77 | - } | ||
| 78 | - } | ||
| 79 | - } | ||
| 80 | - | ||
| 81 | - override fun onCapturerStarted(started: Boolean) { | ||
| 82 | - if (started) { | ||
| 83 | - surfaceTextureHelper.stopListening() | ||
| 84 | - surfaceTextureHelper.startListening { frame -> | ||
| 85 | - targetSink?.onFrame(frame) | ||
| 86 | - } | ||
| 87 | - } | ||
| 88 | - } | ||
| 89 | - | ||
| 90 | - override fun onCapturerStopped() { | ||
| 91 | - surfaceTextureHelper.stopListening() | ||
| 92 | - } | ||
| 93 | - | ||
| 94 | - override fun onFrameCaptured(frame: VideoFrame) { | ||
| 95 | - // If disabled, just pass through | ||
| 96 | - if (!enabled) { | ||
| 97 | - targetSink?.onFrame(frame) | ||
| 98 | - return | ||
| 99 | - } | ||
| 100 | - | ||
| 101 | - try { | ||
| 102 | - frame.retain() | ||
| 103 | - } catch (e: Exception) { | ||
| 104 | - return | ||
| 105 | - } | ||
| 106 | - | ||
| 107 | - // Submit frame for processing | ||
| 108 | - if (!taskFlow.tryEmit(frame)) { | ||
| 109 | - frame.release() | ||
| 110 | - } | ||
| 111 | - } | ||
| 112 | - | ||
| 113 | - override fun setSink(sink: VideoSink?) { | ||
| 114 | - targetSink = sink | ||
| 115 | - } | ||
| 116 | - | ||
| 117 | - private fun processFrame(frame: VideoFrame) { | ||
| 118 | - if (lastRotation != frame.rotation || | ||
| 119 | - lastWidth != frame.rotatedWidth || | ||
| 120 | - lastHeight != frame.rotatedHeight) { | ||
| 121 | - | ||
| 122 | - surfaceTextureHelper.setTextureSize(frame.rotatedWidth, frame.rotatedHeight) | ||
| 123 | - lastRotation = frame.rotation | ||
| 124 | - lastWidth = frame.rotatedWidth | ||
| 125 | - lastHeight = frame.rotatedHeight | ||
| 126 | - } | ||
| 127 | - | ||
| 128 | - frame.retain() | ||
| 129 | - surfaceTextureHelper.handler.post { | ||
| 130 | - try { | ||
| 131 | - // Convert VideoFrame to OpenCV Mat | ||
| 132 | - val rgbaMat = videoFrameToMat(frame) | ||
| 133 | - | ||
| 134 | - if (rgbaMat != null && !rgbaMat.empty()) { | ||
| 135 | - // Process with RVM | ||
| 136 | - val success = rvmNcnn.processFrame( | ||
| 137 | - rgbaMat.nativeObjAddr, | ||
| 138 | - rgbaMat.cols(), | ||
| 139 | - rgbaMat.rows(), | ||
| 140 | - frame.rotation | ||
| 141 | - ) | ||
| 142 | - | ||
| 143 | - if (success) { | ||
| 144 | - // Convert processed Mat back to texture and render | ||
| 145 | - val processedFrame = matToVideoFrame(rgbaMat, frame) | ||
| 146 | - eglRenderer.onFrame(processedFrame) | ||
| 147 | - processedFrame.release() | ||
| 148 | - } else { | ||
| 149 | - // If processing failed, pass through original frame | ||
| 150 | - eglRenderer.onFrame(frame) | ||
| 151 | - } | ||
| 152 | - | ||
| 153 | - rgbaMat.release() | ||
| 154 | - } else { | ||
| 155 | - eglRenderer.onFrame(frame) | ||
| 156 | - } | ||
| 157 | - } catch (e: Exception) { | ||
| 158 | - // Fallback to original frame on error | ||
| 159 | - eglRenderer.onFrame(frame) | ||
| 160 | - } | ||
| 161 | - frame.release() | ||
| 162 | - } | ||
| 163 | - } | ||
| 164 | - | ||
| 165 | - private fun videoFrameToMat(frame: VideoFrame): Mat? { | ||
| 166 | - return try { | ||
| 167 | - val i420Buffer = frame.buffer.toI420() | ||
| 168 | - val yPlane = i420Buffer.dataY | ||
| 169 | - val uPlane = i420Buffer.dataU | ||
| 170 | - val vPlane = i420Buffer.dataV | ||
| 171 | - | ||
| 172 | - val yRowStride = i420Buffer.strideY | ||
| 173 | - val uvRowStride = i420Buffer.strideU | ||
| 174 | - val uvPixelStride = i420Buffer.strideU // Simplified | ||
| 175 | - | ||
| 176 | - val width = i420Buffer.width | ||
| 177 | - val height = i420Buffer.height | ||
| 178 | - | ||
| 179 | - // Convert I420 to RGBA | ||
| 180 | - val rgbaMat = Mat(height, width, CvType.CV_8UC4) | ||
| 181 | - | ||
| 182 | - // This is a simplified conversion - in production you'd want a proper YUV to RGBA conversion | ||
| 183 | - // For now, we'll create a placeholder implementation | ||
| 184 | - convertI420ToRGBA(yPlane, uPlane, vPlane, yRowStride, uvRowStride, uvPixelStride, width, height, rgbaMat) | ||
| 185 | - | ||
| 186 | - i420Buffer.release() | ||
| 187 | - rgbaMat | ||
| 188 | - } catch (e: Exception) { | ||
| 189 | - null | ||
| 190 | - } | ||
| 191 | - } | ||
| 192 | - | ||
| 193 | - private fun convertI420ToRGBA( | ||
| 194 | - yPlane: ByteBuffer, | ||
| 195 | - uPlane: ByteBuffer, | ||
| 196 | - vPlane: ByteBuffer, | ||
| 197 | - yRowStride: Int, | ||
| 198 | - uvRowStride: Int, | ||
| 199 | - uvPixelStride: Int, | ||
| 200 | - width: Int, | ||
| 201 | - height: Int, | ||
| 202 | - rgbaMat: Mat | ||
| 203 | - ) { | ||
| 204 | - // Placeholder implementation - you'd need proper YUV to RGBA conversion | ||
| 205 | - // This is a simplified version that just creates a test pattern | ||
| 206 | - val rgbaData = ByteArray(width * height * 4) | ||
| 207 | - var index = 0 | ||
| 208 | - | ||
| 209 | - for (y in 0 until height) { | ||
| 210 | - for (x in 0 until width) { | ||
| 211 | - val yIndex = (y * yRowStride) + x | ||
| 212 | - val uvIndex = ((y / 2) * uvRowStride) + ((x / 2) * uvPixelStride) | ||
| 213 | - | ||
| 214 | - val yValue = yPlane[yIndex].toInt() and 0xFF | ||
| 215 | - val uValue = uPlane[uvIndex].toInt() and 0xFF | ||
| 216 | - val vValue = vPlane[uvIndex].toInt() and 0xFF | ||
| 217 | - | ||
| 218 | - // Simple YUV to RGB conversion (simplified) | ||
| 219 | - val r = (1.164 * (yValue - 16) + 1.596 * (vValue - 128)).toInt().coerceIn(0, 255) | ||
| 220 | - val g = (1.164 * (yValue - 16) - 0.813 * (vValue - 128) - 0.391 * (uValue - 128)).toInt().coerceIn(0, 255) | ||
| 221 | - val b = (1.164 * (yValue - 16) + 2.018 * (uValue - 128)).toInt().coerceIn(0, 255) | ||
| 222 | - | ||
| 223 | - rgbaData[index++] = b.toByte() | ||
| 224 | - rgbaData[index++] = g.toByte() | ||
| 225 | - rgbaData[index++] = r.toByte() | ||
| 226 | - rgbaData[index++] = 255.toByte() // Alpha | ||
| 227 | - } | ||
| 228 | - } | ||
| 229 | - | ||
| 230 | - rgbaMat.put(0, 0, rgbaData) | ||
| 231 | - } | ||
| 232 | - | ||
| 233 | - private fun matToVideoFrame(mat: Mat, originalFrame: VideoFrame): VideoFrame { | ||
| 234 | - // Convert RGBA Mat back to I420 buffer | ||
| 235 | - // This is a simplified implementation - you'd need proper RGBA to I420 conversion | ||
| 236 | - val i420Buffer = originalFrame.buffer // Reuse original buffer format for simplicity | ||
| 237 | - | ||
| 238 | - // In production, you'd convert the RGBA mat back to I420 format | ||
| 239 | - // and create a new VideoFrame with the processed data | ||
| 240 | - | ||
| 241 | - return originalFrame // Placeholder - return original frame | ||
| 242 | - } | ||
| 243 | - | ||
| 244 | - /** | ||
| 245 | - * Load RVM model | ||
| 246 | - */ | ||
| 247 | - fun loadModel( | ||
| 248 | - assetManager: android.content.res.AssetManager, | ||
| 249 | - modelId: Int = 0, | ||
| 250 | - sizeId: Int = 2, | ||
| 251 | - intraInterId: Int = 0, | ||
| 252 | - postProcId: Int = 1, | ||
| 253 | - cpuGpu: Int = 0 | ||
| 254 | - ): Boolean { | ||
| 255 | - return rvmNcnn.loadModel(assetManager, modelId, sizeId, intraInterId, postProcId, cpuGpu) | ||
| 256 | - } | ||
| 257 | - | ||
| 258 | - fun dispose() { | ||
| 259 | - scope.cancel() | ||
| 260 | - surfaceTextureHelper.stopListening() | ||
| 261 | - surfaceTextureHelper.dispose() | ||
| 262 | - surface.release() | ||
| 263 | - eglRenderer.release() | ||
| 264 | - GlUtil.checkNoGLES2Error("OpenCVVideoProcessor.dispose") | ||
| 265 | - } | ||
| 266 | -} |
| 1 | -package io.livekit.android.track.processing.video;// RVMNcnn.java | 1 | +package io.livekit.android.track.processing.video; |
| 2 | 2 | ||
| 3 | import android.content.res.AssetManager; | 3 | import android.content.res.AssetManager; |
| 4 | +import android.graphics.Bitmap; | ||
| 4 | import android.view.Surface; | 5 | import android.view.Surface; |
| 5 | 6 | ||
| 6 | -public class RVMNcnn | ||
| 7 | -{ | 7 | +import io.livekit.android.room.track.video.NoDropVideoProcessor; |
| 8 | +import livekit.org.webrtc.EglBase; | ||
| 9 | +import livekit.org.webrtc.SurfaceTextureHelper; | ||
| 10 | +import livekit.org.webrtc.VideoFrame; | ||
| 11 | +import livekit.org.webrtc.VideoSink; | ||
| 12 | + | ||
| 13 | +/** | ||
| 14 | + * RVMNcnn processor that delegates all pixel processing to native (cpp) and | ||
| 15 | + * renders processed frames directly into a Surface provided by SurfaceTextureHelper. | ||
| 16 | + * Java does not perform any image processing. | ||
| 17 | + */ | ||
| 18 | +public class RVMNcnn extends NoDropVideoProcessor { | ||
| 19 | + | ||
| 20 | + // Native JNI hooks | ||
| 8 | public native boolean loadModel(AssetManager mgr, int modelid, int sizeid, int intrainterid, int postprocid, int cpugpu); | 21 | public native boolean loadModel(AssetManager mgr, int modelid, int sizeid, int intrainterid, int postprocid, int cpugpu); |
| 9 | public native boolean openCamera(int facing); | 22 | public native boolean openCamera(int facing); |
| 10 | public native boolean closeCamera(); | 23 | public native boolean closeCamera(); |
| 11 | public native boolean setOutputWindow(Surface surface); | 24 | public native boolean setOutputWindow(Surface surface); |
| 12 | - public native boolean processFrame(long rgbaAddr, int width, int height, int rotation); | 25 | + public native boolean setBackgroundImage(Bitmap bitmap); |
| 13 | 26 | ||
| 14 | static { | 27 | static { |
| 15 | System.loadLibrary("rvmncnn"); | 28 | System.loadLibrary("rvmncnn"); |
| 16 | } | 29 | } |
| 30 | + | ||
| 31 | + private final EglBase eglBase; | ||
| 32 | + private final SurfaceTextureHelper surfaceTextureHelper; | ||
| 33 | + private final Surface outputSurface; | ||
| 34 | + | ||
| 35 | + private VideoSink targetSink; | ||
| 36 | + | ||
| 37 | + /** | ||
| 38 | + * Controls whether the native virtual background is enabled. | ||
| 39 | + * When enabled, native renders to outputSurface and Java forwards those frames. | ||
| 40 | + * When disabled, incoming frames are passed through to targetSink. | ||
| 41 | + */ | ||
| 42 | + public boolean enabled = true; | ||
| 43 | + | ||
| 44 | + /** | ||
| 45 | + * Facing: 0 back, 1 front. Used when starting native camera pipeline. | ||
| 46 | + */ | ||
| 47 | + private int facing = 1; | ||
| 48 | + | ||
| 49 | + public RVMNcnn(EglBase eglBase) { | ||
| 50 | + this.eglBase = eglBase; | ||
| 51 | + this.surfaceTextureHelper = SurfaceTextureHelper.create("RVMNcnn", eglBase.getEglBaseContext()); | ||
| 52 | + this.outputSurface = new Surface(surfaceTextureHelper.getSurfaceTexture()); | ||
| 53 | + } | ||
| 54 | + | ||
| 55 | + @Override | ||
| 56 | + public void onCapturerStarted(boolean started) { | ||
| 57 | + if (started) { | ||
| 58 | + // Listen to frames produced from the output surface (rendered by native), | ||
| 59 | + // and forward to target sink. | ||
| 60 | + surfaceTextureHelper.stopListening(); | ||
| 61 | + surfaceTextureHelper.startListening(frame -> { | ||
| 62 | + VideoSink sink = targetSink; | ||
| 63 | + if (sink != null) { | ||
| 64 | + sink.onFrame(frame); | ||
| 65 | + } | ||
| 66 | + }); | ||
| 67 | + | ||
| 68 | + if (enabled) { | ||
| 69 | + // Direct native to render into our SurfaceTextureHelper's surface | ||
| 70 | + setOutputWindow(outputSurface); | ||
| 71 | + // Start native camera pipeline (cpp will process and render) | ||
| 72 | + openCamera(facing); | ||
| 73 | + } | ||
| 74 | + } | ||
| 75 | + } | ||
| 76 | + | ||
| 77 | + @Override | ||
| 78 | + public void onCapturerStopped() { | ||
| 79 | + // Stop Java-side listening and shutdown native pipeline | ||
| 80 | + surfaceTextureHelper.stopListening(); | ||
| 81 | + closeCamera(); | ||
| 82 | + } | ||
| 83 | + | ||
| 84 | + @Override | ||
| 85 | + public void onFrameCaptured(VideoFrame frame) { | ||
| 86 | + // If disabled, pass-through original frames. | ||
| 87 | + if (!enabled) { | ||
| 88 | + VideoSink sink = targetSink; | ||
| 89 | + if (sink != null) { | ||
| 90 | + sink.onFrame(frame); | ||
| 91 | + } | ||
| 92 | + return; | ||
| 93 | + } | ||
| 94 | + // Enabled: Java does not process pixels nor forward original frames. | ||
| 95 | + // Native renders processed frames into outputSurface, which we already forward above. | ||
| 96 | + // Drop the incoming frame here. | ||
| 97 | + } | ||
| 98 | + | ||
| 99 | + @Override | ||
| 100 | + public void setSink(VideoSink sink) { | ||
| 101 | + this.targetSink = sink; | ||
| 102 | + } | ||
| 103 | + | ||
| 104 | + /** | ||
| 105 | + * Update facing and restart native pipeline if needed. | ||
| 106 | + * 0 = back, 1 = front. | ||
| 107 | + */ | ||
| 108 | + public void setFacing(int facing) { | ||
| 109 | + this.facing = facing == 0 ? 0 : 1; | ||
| 110 | + if (enabled) { | ||
| 111 | + // If running, restart native camera with new facing | ||
| 112 | + closeCamera(); | ||
| 113 | + openCamera(this.facing); | ||
| 114 | + } | ||
| 115 | + } | ||
| 116 | + | ||
| 117 | + /** | ||
| 118 | + * Update the background image used by native processor. | ||
| 119 | + * Pass null to clear. | ||
| 120 | + */ | ||
| 121 | + public boolean updateBackgroundImage(Bitmap bitmap) { | ||
| 122 | + return setBackgroundImage(bitmap); | ||
| 123 | + } | ||
| 124 | + | ||
| 125 | + /** | ||
| 126 | + * Call when disposing the processor. | ||
| 127 | + */ | ||
| 128 | + public void dispose() { | ||
| 129 | + surfaceTextureHelper.stopListening(); | ||
| 130 | + closeCamera(); | ||
| 131 | + outputSurface.release(); | ||
| 132 | + surfaceTextureHelper.dispose(); | ||
| 133 | + } | ||
| 17 | } | 134 | } |
| @@ -142,7 +142,24 @@ void MyNdkCamera::on_image_render(cv::Mat& rgb) const | @@ -142,7 +142,24 @@ void MyNdkCamera::on_image_render(cv::Mat& rgb) const | ||
| 142 | } | 142 | } |
| 143 | } | 143 | } |
| 144 | 144 | ||
| 145 | + // overlay fps | ||
| 145 | draw_fps(rgb); | 146 | draw_fps(rgb); |
| 147 | + | ||
| 148 | + // enforce target output resolution 640x640 and 180-degree rotation | ||
| 149 | + { | ||
| 150 | + // resize to 640x640 if needed | ||
| 151 | + if (rgb.cols != 640 || rgb.rows != 640) | ||
| 152 | + { | ||
| 153 | + cv::Mat resized; | ||
| 154 | + cv::resize(rgb, resized, cv::Size(640, 640), 0, 0, cv::INTER_LINEAR); | ||
| 155 | + resized.copyTo(rgb); | ||
| 156 | + } | ||
| 157 | + | ||
| 158 | + // rotate 180 degrees | ||
| 159 | + cv::Mat rotated; | ||
| 160 | + cv::rotate(rgb, rotated, cv::ROTATE_180); | ||
| 161 | + rotated.copyTo(rgb); | ||
| 162 | + } | ||
| 146 | } | 163 | } |
| 147 | 164 | ||
| 148 | static MyNdkCamera* g_camera = 0; | 165 | static MyNdkCamera* g_camera = 0; |
| @@ -299,6 +316,9 @@ JNIEXPORT jboolean JNICALL Java_io_livekit_android_track_processing_video_RVMNcn | @@ -299,6 +316,9 @@ JNIEXPORT jboolean JNICALL Java_io_livekit_android_track_processing_video_RVMNcn | ||
| 299 | 316 | ||
| 300 | __android_log_print(ANDROID_LOG_DEBUG, "ncnn", "setOutputWindow %p", win); | 317 | __android_log_print(ANDROID_LOG_DEBUG, "ncnn", "setOutputWindow %p", win); |
| 301 | 318 | ||
| 319 | + // Set buffer geometry to 640x640, keep current format (0) | ||
| 320 | + ANativeWindow_setBuffersGeometry(win, 640, 640, 0); | ||
| 321 | + | ||
| 302 | g_camera->set_window(win); | 322 | g_camera->set_window(win); |
| 303 | 323 | ||
| 304 | return JNI_TRUE; | 324 | return JNI_TRUE; |
-
请 注册 或 登录 后发表评论