Committed by
GitHub
VirtualBackgroundVideoProcessor and track-processors package (#660)
* save * port js shaders * save * fix downsampler * save - trying to get blur shader workging * fix wrong height to downsample * expose specific floats arguments for coordinate matrixes * constant vertex shader * properly set sizes for framebuffer to draw into * working blur shader * Rename DownSamplerShader to ResamplerShader * save * Virtual background and track-processors module * Delete experimental files * Clean up example * Clean up documentation * Delete unused file * revert change to module.md * cleanup documentation * revert unwanted change * changeset * spotless * spotless
正在显示
55 个修改的文件
包含
1598 行增加
和
356 行删除
.changeset/dirty-chairs-fry.md
0 → 100644
| @@ -3,6 +3,7 @@ | @@ -3,6 +3,7 @@ | ||
| 3 | <component name="CompilerConfiguration"> | 3 | <component name="CompilerConfiguration"> |
| 4 | <bytecodeTargetLevel target="1.8"> | 4 | <bytecodeTargetLevel target="1.8"> |
| 5 | <module name="livekit-android.examples.screenshareaudio" target="17" /> | 5 | <module name="livekit-android.examples.screenshareaudio" target="17" /> |
| 6 | + <module name="livekit-android.examples.selfie-segmentation" target="17" /> | ||
| 6 | </bytecodeTargetLevel> | 7 | </bytecodeTargetLevel> |
| 7 | </component> | 8 | </component> |
| 8 | </project> | 9 | </project> |
| 1 | <component name="InspectionProjectProfileManager"> | 1 | <component name="InspectionProjectProfileManager"> |
| 2 | <profile version="1.0"> | 2 | <profile version="1.0"> |
| 3 | <option name="myName" value="Project Default" /> | 3 | <option name="myName" value="Project Default" /> |
| 4 | - <inspection_tool class="AndroidLintVisibleForTests" enabled="true" level="WARNING" enabled_by_default="true"> | ||
| 5 | - <scope name="Library Projects" level="WARNING" enabled="false" /> | ||
| 6 | - </inspection_tool> | ||
| 7 | <inspection_tool class="MemberVisibilityCanBePrivate" enabled="true" level="WEAK WARNING" enabled_by_default="true"> | 4 | <inspection_tool class="MemberVisibilityCanBePrivate" enabled="true" level="WEAK WARNING" enabled_by_default="true"> |
| 8 | <scope name="Library Projects" level="WEAK WARNING" enabled="false" /> | 5 | <scope name="Library Projects" level="WEAK WARNING" enabled="false" /> |
| 9 | </inspection_tool> | 6 | </inspection_tool> |
| @@ -60,3 +60,39 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | @@ -60,3 +60,39 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||
| 60 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | 60 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
| 61 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | 61 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
| 62 | SOFTWARE. | 62 | SOFTWARE. |
| 63 | + | ||
| 64 | +##################################################################################### | ||
| 65 | + | ||
| 66 | +Parts of this source code come from the WebRTC project, following a BSD-style license | ||
| 67 | + | ||
| 68 | +https://webrtc.googlesource.com/src | ||
| 69 | + | ||
| 70 | +Copyright (c) 2011, The WebRTC project authors. All rights reserved. | ||
| 71 | + | ||
| 72 | +Redistribution and use in source and binary forms, with or without | ||
| 73 | +modification, are permitted provided that the following conditions are | ||
| 74 | +met: | ||
| 75 | + | ||
| 76 | + * Redistributions of source code must retain the above copyright | ||
| 77 | + notice, this list of conditions and the following disclaimer. | ||
| 78 | + | ||
| 79 | + * Redistributions in binary form must reproduce the above copyright | ||
| 80 | + notice, this list of conditions and the following disclaimer in | ||
| 81 | + the documentation and/or other materials provided with the | ||
| 82 | + distribution. | ||
| 83 | + | ||
| 84 | + * Neither the name of Google nor the names of its contributors may | ||
| 85 | + be used to endorse or promote products derived from this software | ||
| 86 | + without specific prior written permission. | ||
| 87 | + | ||
| 88 | +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
| 89 | +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
| 90 | +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
| 91 | +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
| 92 | +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
| 93 | +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
| 94 | +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
| 95 | +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
| 96 | +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
| 97 | +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
| 98 | +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 1 | -/* | ||
| 2 | - * Copyright 2024 LiveKit, Inc. | ||
| 3 | - * | ||
| 4 | - * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | - * you may not use this file except in compliance with the License. | ||
| 6 | - * You may obtain a copy of the License at | ||
| 7 | - * | ||
| 8 | - * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | - * | ||
| 10 | - * Unless required by applicable law or agreed to in writing, software | ||
| 11 | - * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | - * See the License for the specific language governing permissions and | ||
| 14 | - * limitations under the License. | ||
| 15 | - */ | ||
| 16 | - | ||
| 17 | -package io.livekit.android.selfie | ||
| 18 | - | ||
| 19 | -import android.graphics.BitmapFactory | ||
| 20 | -import android.graphics.Color | ||
| 21 | -import android.graphics.ImageFormat | ||
| 22 | -import android.graphics.Matrix | ||
| 23 | -import android.graphics.Paint | ||
| 24 | -import android.graphics.Rect | ||
| 25 | -import android.graphics.YuvImage | ||
| 26 | -import android.os.Build | ||
| 27 | -import android.util.Log | ||
| 28 | -import android.view.Surface | ||
| 29 | -import androidx.core.graphics.set | ||
| 30 | -import com.google.mlkit.vision.common.InputImage | ||
| 31 | -import com.google.mlkit.vision.segmentation.Segmentation | ||
| 32 | -import com.google.mlkit.vision.segmentation.Segmenter | ||
| 33 | -import com.google.mlkit.vision.segmentation.selfie.SelfieSegmenterOptions | ||
| 34 | -import io.livekit.android.room.track.video.NoDropVideoProcessor | ||
| 35 | -import kotlinx.coroutines.CoroutineDispatcher | ||
| 36 | -import kotlinx.coroutines.CoroutineScope | ||
| 37 | -import kotlinx.coroutines.cancel | ||
| 38 | -import kotlinx.coroutines.channels.BufferOverflow | ||
| 39 | -import kotlinx.coroutines.flow.MutableSharedFlow | ||
| 40 | -import kotlinx.coroutines.launch | ||
| 41 | -import kotlinx.coroutines.sync.Mutex | ||
| 42 | -import livekit.org.webrtc.EglBase | ||
| 43 | -import livekit.org.webrtc.SurfaceTextureHelper | ||
| 44 | -import livekit.org.webrtc.VideoFrame | ||
| 45 | -import livekit.org.webrtc.VideoSink | ||
| 46 | -import livekit.org.webrtc.YuvHelper | ||
| 47 | -import java.io.ByteArrayOutputStream | ||
| 48 | -import java.nio.ByteBuffer | ||
| 49 | - | ||
| 50 | -class SelfieBitmapVideoProcessor(eglBase: EglBase, dispatcher: CoroutineDispatcher) : NoDropVideoProcessor() { | ||
| 51 | - | ||
| 52 | - private var targetSink: VideoSink? = null | ||
| 53 | - private val segmenter: Segmenter | ||
| 54 | - | ||
| 55 | - private var lastRotation = 0 | ||
| 56 | - private var lastWidth = 0 | ||
| 57 | - private var lastHeight = 0 | ||
| 58 | - private val surfaceTextureHelper = SurfaceTextureHelper.create("BitmapToYUV", eglBase.eglBaseContext) | ||
| 59 | - private val surface = Surface(surfaceTextureHelper.surfaceTexture) | ||
| 60 | - | ||
| 61 | - private val scope = CoroutineScope(dispatcher) | ||
| 62 | - private val taskFlow = MutableSharedFlow<VideoFrame>( | ||
| 63 | - replay = 0, | ||
| 64 | - extraBufferCapacity = 1, | ||
| 65 | - onBufferOverflow = BufferOverflow.SUSPEND, | ||
| 66 | - ) | ||
| 67 | - | ||
| 68 | - init { | ||
| 69 | - val options = | ||
| 70 | - SelfieSegmenterOptions.Builder() | ||
| 71 | - .setDetectorMode(SelfieSegmenterOptions.STREAM_MODE) | ||
| 72 | - .build() | ||
| 73 | - segmenter = Segmentation.getClient(options) | ||
| 74 | - | ||
| 75 | - // Funnel processing into a single flow that won't buffer, | ||
| 76 | - // since processing will be slower than video capture | ||
| 77 | - scope.launch { | ||
| 78 | - taskFlow.collect { frame -> | ||
| 79 | - processFrame(frame) | ||
| 80 | - } | ||
| 81 | - } | ||
| 82 | - } | ||
| 83 | - | ||
| 84 | - override fun onCapturerStarted(started: Boolean) { | ||
| 85 | - if (started) { | ||
| 86 | - surfaceTextureHelper.startListening { frame -> | ||
| 87 | - targetSink?.onFrame(frame) | ||
| 88 | - } | ||
| 89 | - } | ||
| 90 | - } | ||
| 91 | - | ||
| 92 | - override fun onCapturerStopped() { | ||
| 93 | - surfaceTextureHelper.stopListening() | ||
| 94 | - } | ||
| 95 | - | ||
| 96 | - override fun onFrameCaptured(frame: VideoFrame) { | ||
| 97 | - if (taskFlow.tryEmit(frame)) { | ||
| 98 | - frame.retain() | ||
| 99 | - } | ||
| 100 | - } | ||
| 101 | - | ||
| 102 | - suspend fun processFrame(frame: VideoFrame) { | ||
| 103 | - // toI420 causes a retain, so a corresponding frameBuffer.release is needed when done. | ||
| 104 | - val frameBuffer = frame.buffer.toI420() ?: return | ||
| 105 | - val rotationDegrees = frame.rotation | ||
| 106 | - | ||
| 107 | - val dataY = frameBuffer.dataY | ||
| 108 | - val dataU = frameBuffer.dataU | ||
| 109 | - val dataV = frameBuffer.dataV | ||
| 110 | - val nv12Buffer = ByteBuffer.allocateDirect(dataY.limit() + dataU.limit() + dataV.limit()) | ||
| 111 | - | ||
| 112 | - // For some reason, I420ToNV12 actually expects YV12 | ||
| 113 | - YuvHelper.I420ToNV12( | ||
| 114 | - frameBuffer.dataY, | ||
| 115 | - frameBuffer.strideY, | ||
| 116 | - frameBuffer.dataV, | ||
| 117 | - frameBuffer.strideV, | ||
| 118 | - frameBuffer.dataU, | ||
| 119 | - frameBuffer.strideU, | ||
| 120 | - nv12Buffer, | ||
| 121 | - frameBuffer.width, | ||
| 122 | - frameBuffer.height, | ||
| 123 | - ) | ||
| 124 | - | ||
| 125 | - // Use YuvImage to convert to bitmap | ||
| 126 | - val yuvImage = YuvImage(nv12Buffer.array(), ImageFormat.NV21, frameBuffer.width, frameBuffer.height, null) | ||
| 127 | - val stream = ByteArrayOutputStream() | ||
| 128 | - yuvImage.compressToJpeg(Rect(0, 0, frameBuffer.width, frameBuffer.height), 100, stream) | ||
| 129 | - | ||
| 130 | - val bitmap = BitmapFactory.decodeByteArray( | ||
| 131 | - stream.toByteArray(), | ||
| 132 | - 0, | ||
| 133 | - stream.size(), | ||
| 134 | - BitmapFactory.Options().apply { inMutable = true }, | ||
| 135 | - ) | ||
| 136 | - | ||
| 137 | - // No longer need the original frame buffer any more. | ||
| 138 | - frameBuffer.release() | ||
| 139 | - frame.release() | ||
| 140 | - | ||
| 141 | - // Ready for segementation processing. | ||
| 142 | - val inputImage = InputImage.fromBitmap(bitmap, 0) | ||
| 143 | - val task = segmenter.process(inputImage) | ||
| 144 | - | ||
| 145 | - val latch = Mutex(true) | ||
| 146 | - task.addOnSuccessListener { segmentationMask -> | ||
| 147 | - val mask = segmentationMask.buffer | ||
| 148 | - | ||
| 149 | - // Do some image processing | ||
| 150 | - for (y in 0 until segmentationMask.height) { | ||
| 151 | - for (x in 0 until segmentationMask.width) { | ||
| 152 | - val backgroundConfidence = 1 - mask.float | ||
| 153 | - | ||
| 154 | - if (backgroundConfidence > 0.8f) { | ||
| 155 | - bitmap[x, y] = Color.GREEN // Color off the background | ||
| 156 | - } | ||
| 157 | - } | ||
| 158 | - } | ||
| 159 | - | ||
| 160 | - // Prepare for creating the processed video frame. | ||
| 161 | - if (lastRotation != rotationDegrees) { | ||
| 162 | - surfaceTextureHelper?.setFrameRotation(rotationDegrees) | ||
| 163 | - lastRotation = rotationDegrees | ||
| 164 | - } | ||
| 165 | - | ||
| 166 | - if (lastWidth != bitmap.width || lastHeight != bitmap.height) { | ||
| 167 | - surfaceTextureHelper?.setTextureSize(bitmap.width, bitmap.height) | ||
| 168 | - lastWidth = bitmap.width | ||
| 169 | - lastHeight = bitmap.height | ||
| 170 | - } | ||
| 171 | - | ||
| 172 | - surfaceTextureHelper?.handler?.post { | ||
| 173 | - val canvas = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { | ||
| 174 | - surface.lockHardwareCanvas() | ||
| 175 | - } else { | ||
| 176 | - surface.lockCanvas(null) | ||
| 177 | - } | ||
| 178 | - | ||
| 179 | - if (canvas != null) { | ||
| 180 | - // Create the video frame. | ||
| 181 | - canvas.drawBitmap(bitmap, Matrix(), Paint()) | ||
| 182 | - surface.unlockCanvasAndPost(canvas) | ||
| 183 | - } | ||
| 184 | - bitmap.recycle() | ||
| 185 | - latch.unlock() | ||
| 186 | - } | ||
| 187 | - }.addOnFailureListener { | ||
| 188 | - Log.e("SelfieVideoProcessor", "failed to process frame!") | ||
| 189 | - } | ||
| 190 | - latch.lock() | ||
| 191 | - } | ||
| 192 | - | ||
| 193 | - override fun setSink(sink: VideoSink?) { | ||
| 194 | - targetSink = sink | ||
| 195 | - } | ||
| 196 | - | ||
| 197 | - fun dispose() { | ||
| 198 | - segmenter.close() | ||
| 199 | - surfaceTextureHelper.stopListening() | ||
| 200 | - surfaceTextureHelper.dispose() | ||
| 201 | - scope.cancel() | ||
| 202 | - } | ||
| 203 | -} |
examples/selfie-segmentation/src/main/java/io/livekit/android/selfie/SelfieVideoProcessor.kt
已删除
100644 → 0
| 1 | -/* | ||
| 2 | - * Copyright 2024 LiveKit, Inc. | ||
| 3 | - * | ||
| 4 | - * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | - * you may not use this file except in compliance with the License. | ||
| 6 | - * You may obtain a copy of the License at | ||
| 7 | - * | ||
| 8 | - * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | - * | ||
| 10 | - * Unless required by applicable law or agreed to in writing, software | ||
| 11 | - * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | - * See the License for the specific language governing permissions and | ||
| 14 | - * limitations under the License. | ||
| 15 | - */ | ||
| 16 | - | ||
| 17 | -package io.livekit.android.selfie | ||
| 18 | - | ||
| 19 | -import android.util.Log | ||
| 20 | -import com.google.mlkit.vision.common.InputImage | ||
| 21 | -import com.google.mlkit.vision.segmentation.Segmentation | ||
| 22 | -import com.google.mlkit.vision.segmentation.Segmenter | ||
| 23 | -import com.google.mlkit.vision.segmentation.selfie.SelfieSegmenterOptions | ||
| 24 | -import io.livekit.android.room.track.video.NoDropVideoProcessor | ||
| 25 | -import kotlinx.coroutines.CoroutineDispatcher | ||
| 26 | -import kotlinx.coroutines.CoroutineScope | ||
| 27 | -import kotlinx.coroutines.channels.BufferOverflow | ||
| 28 | -import kotlinx.coroutines.flow.MutableSharedFlow | ||
| 29 | -import kotlinx.coroutines.launch | ||
| 30 | -import livekit.org.webrtc.VideoFrame | ||
| 31 | -import livekit.org.webrtc.VideoSink | ||
| 32 | -import java.nio.ByteBuffer | ||
| 33 | - | ||
| 34 | -class SelfieVideoProcessor(dispatcher: CoroutineDispatcher) : NoDropVideoProcessor() { | ||
| 35 | - | ||
| 36 | - private var targetSink: VideoSink? = null | ||
| 37 | - private val segmenter: Segmenter | ||
| 38 | - | ||
| 39 | - private val scope = CoroutineScope(dispatcher) | ||
| 40 | - private val taskFlow = MutableSharedFlow<VideoFrame>( | ||
| 41 | - replay = 0, | ||
| 42 | - extraBufferCapacity = 1, | ||
| 43 | - onBufferOverflow = BufferOverflow.SUSPEND, | ||
| 44 | - ) | ||
| 45 | - | ||
| 46 | - init { | ||
| 47 | - val options = | ||
| 48 | - SelfieSegmenterOptions.Builder() | ||
| 49 | - .setDetectorMode(SelfieSegmenterOptions.STREAM_MODE) | ||
| 50 | - .build() | ||
| 51 | - segmenter = Segmentation.getClient(options) | ||
| 52 | - | ||
| 53 | - // Funnel processing into a single flow that won't buffer, | ||
| 54 | - // since processing will be slower than video capture | ||
| 55 | - scope.launch { | ||
| 56 | - taskFlow.collect { frame -> | ||
| 57 | - processFrame(frame) | ||
| 58 | - } | ||
| 59 | - } | ||
| 60 | - } | ||
| 61 | - | ||
| 62 | - override fun onCapturerStarted(started: Boolean) { | ||
| 63 | - } | ||
| 64 | - | ||
| 65 | - override fun onCapturerStopped() { | ||
| 66 | - } | ||
| 67 | - | ||
| 68 | - override fun onFrameCaptured(frame: VideoFrame) { | ||
| 69 | - if (taskFlow.tryEmit(frame)) { | ||
| 70 | - frame.retain() | ||
| 71 | - } | ||
| 72 | - } | ||
| 73 | - | ||
| 74 | - fun processFrame(frame: VideoFrame) { | ||
| 75 | - // toI420 causes a retain, so a corresponding frameBuffer.release is needed when done. | ||
| 76 | - val frameBuffer = frame.buffer.toI420() ?: return | ||
| 77 | - val byteBuffer = ByteBuffer.allocateDirect(frameBuffer.dataY.limit() + frameBuffer.dataV.limit() + frameBuffer.dataU.limit()) | ||
| 78 | - // YV12 is exactly like I420, but the order of the U and V planes is reversed. | ||
| 79 | - // In the name, "YV" refers to the plane order: Y, then V (then U). | ||
| 80 | - .put(frameBuffer.dataY) | ||
| 81 | - .put(frameBuffer.dataV) | ||
| 82 | - .put(frameBuffer.dataU) | ||
| 83 | - | ||
| 84 | - val image = InputImage.fromByteBuffer( | ||
| 85 | - byteBuffer, | ||
| 86 | - frameBuffer.width, | ||
| 87 | - frameBuffer.height, | ||
| 88 | - 0, | ||
| 89 | - InputImage.IMAGE_FORMAT_YV12, | ||
| 90 | - ) | ||
| 91 | - | ||
| 92 | - val task = segmenter.process(image) | ||
| 93 | - task.addOnSuccessListener { segmentationMask -> | ||
| 94 | - val mask = segmentationMask.buffer | ||
| 95 | - | ||
| 96 | - val dataY = frameBuffer.dataY | ||
| 97 | - | ||
| 98 | - // Do some image processing | ||
| 99 | - for (i in 0 until segmentationMask.height) { | ||
| 100 | - for (j in 0 until segmentationMask.width) { | ||
| 101 | - val backgroundConfidence = 1 - mask.float | ||
| 102 | - | ||
| 103 | - if (backgroundConfidence > 0.8f) { | ||
| 104 | - val position = dataY.position() | ||
| 105 | - val yValue = 0x80.toByte() | ||
| 106 | - dataY.position(position) | ||
| 107 | - dataY.put(yValue) | ||
| 108 | - } else { | ||
| 109 | - dataY.position(dataY.position() + 1) | ||
| 110 | - } | ||
| 111 | - } | ||
| 112 | - } | ||
| 113 | - | ||
| 114 | - // Send the final frame off to the sink. | ||
| 115 | - targetSink?.onFrame(VideoFrame(frameBuffer, frame.rotation, frame.timestampNs)) | ||
| 116 | - | ||
| 117 | - // Release any remaining resources | ||
| 118 | - frameBuffer.release() | ||
| 119 | - frame.release() | ||
| 120 | - }.addOnFailureListener { | ||
| 121 | - Log.e("SelfieVideoProcessor", "failed to process frame!") | ||
| 122 | - } | ||
| 123 | - } | ||
| 124 | - | ||
| 125 | - override fun setSink(sink: VideoSink?) { | ||
| 126 | - targetSink = sink | ||
| 127 | - } | ||
| 128 | - | ||
| 129 | - fun dispose() { | ||
| 130 | - segmenter.close() | ||
| 131 | - } | ||
| 132 | -} |
| @@ -33,9 +33,9 @@ android { | @@ -33,9 +33,9 @@ android { | ||
| 33 | } | 33 | } |
| 34 | 34 | ||
| 35 | dependencies { | 35 | dependencies { |
| 36 | - implementation 'com.google.mlkit:segmentation-selfie:16.0.0-beta4' | ||
| 37 | 36 | ||
| 38 | api project(":livekit-android-sdk") | 37 | api project(":livekit-android-sdk") |
| 38 | + api project(":livekit-android-track-processors") | ||
| 39 | 39 | ||
| 40 | api "androidx.core:core-ktx:${libs.versions.androidx.core.get()}" | 40 | api "androidx.core:core-ktx:${libs.versions.androidx.core.get()}" |
| 41 | implementation 'androidx.appcompat:appcompat:1.6.1' | 41 | implementation 'androidx.appcompat:appcompat:1.6.1' |
| @@ -44,6 +44,8 @@ dependencies { | @@ -44,6 +44,8 @@ dependencies { | ||
| 44 | api "androidx.lifecycle:lifecycle-runtime-ktx:${libs.versions.androidx.lifecycle.get()}" | 44 | api "androidx.lifecycle:lifecycle-runtime-ktx:${libs.versions.androidx.lifecycle.get()}" |
| 45 | api "androidx.lifecycle:lifecycle-viewmodel-ktx:${libs.versions.androidx.lifecycle.get()}" | 45 | api "androidx.lifecycle:lifecycle-viewmodel-ktx:${libs.versions.androidx.lifecycle.get()}" |
| 46 | api "androidx.lifecycle:lifecycle-common-java8:${libs.versions.androidx.lifecycle.get()}" | 46 | api "androidx.lifecycle:lifecycle-common-java8:${libs.versions.androidx.lifecycle.get()}" |
| 47 | + implementation project(':livekit-android-camerax') | ||
| 48 | + implementation libs.lifecycle.process | ||
| 47 | testImplementation 'junit:junit:4.13.2' | 49 | testImplementation 'junit:junit:4.13.2' |
| 48 | androidTestImplementation 'androidx.test.ext:junit:1.1.5' | 50 | androidTestImplementation 'androidx.test.ext:junit:1.1.5' |
| 49 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.5.1' | 51 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.5.1' |
| 1 | /* | 1 | /* |
| 2 | - * Copyright 2024 LiveKit, Inc. | 2 | + * Copyright 2024-2025 LiveKit, Inc. |
| 3 | * | 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. | 5 | * you may not use this file except in compliance with the License. |
| 1 | /* | 1 | /* |
| 2 | - * Copyright 2024 LiveKit, Inc. | 2 | + * Copyright 2024-2025 LiveKit, Inc. |
| 3 | * | 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. | 5 | * you may not use this file except in compliance with the License. |
| @@ -19,6 +19,7 @@ package io.livekit.android.selfie | @@ -19,6 +19,7 @@ package io.livekit.android.selfie | ||
| 19 | import android.Manifest | 19 | import android.Manifest |
| 20 | import android.content.pm.PackageManager | 20 | import android.content.pm.PackageManager |
| 21 | import android.os.Bundle | 21 | import android.os.Bundle |
| 22 | +import android.widget.Button | ||
| 22 | import android.widget.Toast | 23 | import android.widget.Toast |
| 23 | import androidx.activity.ComponentActivity | 24 | import androidx.activity.ComponentActivity |
| 24 | import androidx.activity.result.contract.ActivityResultContracts | 25 | import androidx.activity.result.contract.ActivityResultContracts |
| @@ -37,6 +38,13 @@ class MainActivity : AppCompatActivity() { | @@ -37,6 +38,13 @@ class MainActivity : AppCompatActivity() { | ||
| 37 | 38 | ||
| 38 | viewModel = ViewModelProvider(this)[MainViewModel::class.java] | 39 | viewModel = ViewModelProvider(this)[MainViewModel::class.java] |
| 39 | 40 | ||
| 41 | + val enableButton = findViewById<Button>(R.id.button) | ||
| 42 | + enableButton.setOnClickListener { | ||
| 43 | + val state = viewModel.toggleProcessor() | ||
| 44 | + | ||
| 45 | + enableButton.setText(if (state) "Disable" else "Enable") | ||
| 46 | + } | ||
| 47 | + | ||
| 40 | val renderer = findViewById<TextureViewRenderer>(R.id.renderer) | 48 | val renderer = findViewById<TextureViewRenderer>(R.id.renderer) |
| 41 | viewModel.room.initVideoRenderer(renderer) | 49 | viewModel.room.initVideoRenderer(renderer) |
| 42 | viewModel.track.observe(this) { track -> | 50 | viewModel.track.observe(this) { track -> |
| 1 | /* | 1 | /* |
| 2 | - * Copyright 2024 LiveKit, Inc. | 2 | + * Copyright 2024-2025 LiveKit, Inc. |
| 3 | * | 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. | 5 | * you may not use this file except in compliance with the License. |
| @@ -17,18 +17,34 @@ | @@ -17,18 +17,34 @@ | ||
| 17 | package io.livekit.android.selfie | 17 | package io.livekit.android.selfie |
| 18 | 18 | ||
| 19 | import android.app.Application | 19 | import android.app.Application |
| 20 | +import android.graphics.drawable.BitmapDrawable | ||
| 21 | +import androidx.annotation.OptIn | ||
| 22 | +import androidx.appcompat.content.res.AppCompatResources | ||
| 23 | +import androidx.camera.camera2.interop.ExperimentalCamera2Interop | ||
| 24 | +import androidx.camera.core.ImageAnalysis | ||
| 20 | import androidx.lifecycle.AndroidViewModel | 25 | import androidx.lifecycle.AndroidViewModel |
| 21 | import androidx.lifecycle.MutableLiveData | 26 | import androidx.lifecycle.MutableLiveData |
| 27 | +import androidx.lifecycle.ProcessLifecycleOwner | ||
| 22 | import io.livekit.android.LiveKit | 28 | import io.livekit.android.LiveKit |
| 23 | import io.livekit.android.LiveKitOverrides | 29 | import io.livekit.android.LiveKitOverrides |
| 24 | import io.livekit.android.room.track.CameraPosition | 30 | import io.livekit.android.room.track.CameraPosition |
| 25 | import io.livekit.android.room.track.LocalVideoTrack | 31 | import io.livekit.android.room.track.LocalVideoTrack |
| 26 | import io.livekit.android.room.track.LocalVideoTrackOptions | 32 | import io.livekit.android.room.track.LocalVideoTrackOptions |
| 33 | +import io.livekit.android.room.track.video.CameraCapturerUtils | ||
| 34 | +import io.livekit.android.track.processing.video.VirtualBackgroundVideoProcessor | ||
| 35 | +import io.livekit.android.util.LoggingLevel | ||
| 27 | import kotlinx.coroutines.Dispatchers | 36 | import kotlinx.coroutines.Dispatchers |
| 37 | +import kotlinx.coroutines.asExecutor | ||
| 38 | +import livekit.org.webrtc.CameraXHelper | ||
| 28 | import livekit.org.webrtc.EglBase | 39 | import livekit.org.webrtc.EglBase |
| 29 | 40 | ||
| 41 | +@OptIn(ExperimentalCamera2Interop::class) | ||
| 30 | class MainViewModel(application: Application) : AndroidViewModel(application) { | 42 | class MainViewModel(application: Application) : AndroidViewModel(application) { |
| 31 | 43 | ||
| 44 | + init { | ||
| 45 | + LiveKit.loggingLevel = LoggingLevel.INFO | ||
| 46 | + } | ||
| 47 | + | ||
| 32 | val eglBase = EglBase.create() | 48 | val eglBase = EglBase.create() |
| 33 | val room = LiveKit.create( | 49 | val room = LiveKit.create( |
| 34 | application, | 50 | application, |
| @@ -37,20 +53,35 @@ class MainViewModel(application: Application) : AndroidViewModel(application) { | @@ -37,20 +53,35 @@ class MainViewModel(application: Application) : AndroidViewModel(application) { | ||
| 37 | ), | 53 | ), |
| 38 | ) | 54 | ) |
| 39 | 55 | ||
| 40 | - val track = MutableLiveData<LocalVideoTrack?>(null) | 56 | + val processor = VirtualBackgroundVideoProcessor(eglBase, Dispatchers.IO).apply { |
| 57 | + val drawable = AppCompatResources.getDrawable(application, R.drawable.background) as BitmapDrawable | ||
| 58 | + backgroundImage = drawable.bitmap | ||
| 59 | + } | ||
| 60 | + | ||
| 61 | + private var cameraProvider: CameraCapturerUtils.CameraProvider? = null | ||
| 41 | 62 | ||
| 42 | - // For direct I420 processing: | ||
| 43 | - // val processor = SelfieVideoProcessor(Dispatchers.IO) | ||
| 44 | - val processor = SelfieBitmapVideoProcessor(eglBase, Dispatchers.IO) | 63 | + private var imageAnalysis = ImageAnalysis.Builder().build() |
| 64 | + .apply { setAnalyzer(Dispatchers.IO.asExecutor(), processor.imageAnalyzer) } | ||
| 65 | + | ||
| 66 | + init { | ||
| 67 | + CameraXHelper.createCameraProvider(ProcessLifecycleOwner.get(), arrayOf(imageAnalysis)).let { | ||
| 68 | + if (it.isSupported(application)) { | ||
| 69 | + CameraCapturerUtils.registerCameraProvider(it) | ||
| 70 | + cameraProvider = it | ||
| 71 | + } | ||
| 72 | + } | ||
| 73 | + } | ||
| 74 | + | ||
| 75 | + val track = MutableLiveData<LocalVideoTrack?>(null) | ||
| 45 | 76 | ||
| 46 | fun startCapture() { | 77 | fun startCapture() { |
| 47 | - val selfieVideoTrack = room.localParticipant.createVideoTrack( | 78 | + val videoTrack = room.localParticipant.createVideoTrack( |
| 48 | options = LocalVideoTrackOptions(position = CameraPosition.FRONT), | 79 | options = LocalVideoTrackOptions(position = CameraPosition.FRONT), |
| 49 | videoProcessor = processor, | 80 | videoProcessor = processor, |
| 50 | ) | 81 | ) |
| 51 | 82 | ||
| 52 | - selfieVideoTrack.startCapture() | ||
| 53 | - track.postValue(selfieVideoTrack) | 83 | + videoTrack.startCapture() |
| 84 | + track.postValue(videoTrack) | ||
| 54 | } | 85 | } |
| 55 | 86 | ||
| 56 | override fun onCleared() { | 87 | override fun onCleared() { |
| @@ -58,5 +89,14 @@ class MainViewModel(application: Application) : AndroidViewModel(application) { | @@ -58,5 +89,14 @@ class MainViewModel(application: Application) : AndroidViewModel(application) { | ||
| 58 | track.value?.stopCapture() | 89 | track.value?.stopCapture() |
| 59 | room.release() | 90 | room.release() |
| 60 | processor.dispose() | 91 | processor.dispose() |
| 92 | + cameraProvider?.let { | ||
| 93 | + CameraCapturerUtils.unregisterCameraProvider(it) | ||
| 94 | + } | ||
| 95 | + } | ||
| 96 | + | ||
| 97 | + fun toggleProcessor(): Boolean { | ||
| 98 | + val newState = !processor.enabled | ||
| 99 | + processor.enabled = newState | ||
| 100 | + return newState | ||
| 61 | } | 101 | } |
| 62 | } | 102 | } |
不能预览此文件类型
| @@ -3,7 +3,6 @@ | @@ -3,7 +3,6 @@ | ||
| 3 | xmlns:tools="http://schemas.android.com/tools" | 3 | xmlns:tools="http://schemas.android.com/tools" |
| 4 | android:layout_width="match_parent" | 4 | android:layout_width="match_parent" |
| 5 | android:layout_height="match_parent" | 5 | android:layout_height="match_parent" |
| 6 | - android:background="#F00" | ||
| 7 | tools:context=".MainActivity"> | 6 | tools:context=".MainActivity"> |
| 8 | 7 | ||
| 9 | <io.livekit.android.renderer.TextureViewRenderer | 8 | <io.livekit.android.renderer.TextureViewRenderer |
| @@ -11,4 +10,11 @@ | @@ -11,4 +10,11 @@ | ||
| 11 | android:layout_width="match_parent" | 10 | android:layout_width="match_parent" |
| 12 | android:layout_height="match_parent" /> | 11 | android:layout_height="match_parent" /> |
| 13 | 12 | ||
| 13 | + <Button | ||
| 14 | + android:id="@+id/button" | ||
| 15 | + android:layout_width="wrap_content" | ||
| 16 | + android:layout_height="wrap_content" | ||
| 17 | + android:layout_margin="10dp" | ||
| 18 | + android:text="Disable" /> | ||
| 19 | + | ||
| 14 | </FrameLayout> | 20 | </FrameLayout> |
| 1 | /* | 1 | /* |
| 2 | - * Copyright 2024 LiveKit, Inc. | 2 | + * Copyright 2024-2025 LiveKit, Inc. |
| 3 | * | 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. | 5 | * you may not use this file except in compliance with the License. |
| @@ -21,11 +21,13 @@ okhttp = "4.12.0" | @@ -21,11 +21,13 @@ okhttp = "4.12.0" | ||
| 21 | preferenceKtx = "1.2.1" | 21 | preferenceKtx = "1.2.1" |
| 22 | protobuf = "3.22.0" | 22 | protobuf = "3.22.0" |
| 23 | protobufJavalite = "3.22.0" | 23 | protobufJavalite = "3.22.0" |
| 24 | +segmentationSelfie = "16.0.0-beta6" | ||
| 24 | semver4j = "3.1.0" | 25 | semver4j = "3.1.0" |
| 25 | appcompat = "1.6.1" | 26 | appcompat = "1.6.1" |
| 26 | material = "1.12.0" | 27 | material = "1.12.0" |
| 27 | viewpager2 = "1.0.0" | 28 | viewpager2 = "1.0.0" |
| 28 | noise = "2.0.0" | 29 | noise = "2.0.0" |
| 30 | +lifecycleProcess = "2.8.7" | ||
| 29 | 31 | ||
| 30 | [libraries] | 32 | [libraries] |
| 31 | android-jain-sip-ri = { module = "javax.sip:android-jain-sip-ri", version.ref = "androidJainSipRi" } | 33 | android-jain-sip-ri = { module = "javax.sip:android-jain-sip-ri", version.ref = "androidJainSipRi" } |
| @@ -56,6 +58,7 @@ leakcanary-android = { module = "com.squareup.leakcanary:leakcanary-android", ve | @@ -56,6 +58,7 @@ leakcanary-android = { module = "com.squareup.leakcanary:leakcanary-android", ve | ||
| 56 | okhttp-lib = { module = "com.squareup.okhttp3:okhttp", version.ref = "okhttp" } | 58 | okhttp-lib = { module = "com.squareup.okhttp3:okhttp", version.ref = "okhttp" } |
| 57 | okhttp-coroutines = { module = "com.squareup.okhttp3:okhttp", version.ref = "okhttp" } | 59 | okhttp-coroutines = { module = "com.squareup.okhttp3:okhttp", version.ref = "okhttp" } |
| 58 | protobuf-javalite = { module = "com.google.protobuf:protobuf-javalite", version.ref = "protobufJavalite" } | 60 | protobuf-javalite = { module = "com.google.protobuf:protobuf-javalite", version.ref = "protobufJavalite" } |
| 61 | +segmentation-selfie = { module = "com.google.mlkit:segmentation-selfie", version.ref = "segmentationSelfie" } | ||
| 59 | semver4j = { module = "com.vdurmont:semver4j", version.ref = "semver4j" } | 62 | semver4j = { module = "com.vdurmont:semver4j", version.ref = "semver4j" } |
| 60 | webrtc = { module = "io.github.webrtc-sdk:android-prefixed", version.ref = "webrtc" } | 63 | webrtc = { module = "io.github.webrtc-sdk:android-prefixed", version.ref = "webrtc" } |
| 61 | 64 | ||
| @@ -103,6 +106,7 @@ androidx-ui-tooling-preview = { group = "androidx.compose.ui", name = "ui-toolin | @@ -103,6 +106,7 @@ androidx-ui-tooling-preview = { group = "androidx.compose.ui", name = "ui-toolin | ||
| 103 | androidx-ui-test-manifest = { group = "androidx.compose.ui", name = "ui-test-manifest" } | 106 | androidx-ui-test-manifest = { group = "androidx.compose.ui", name = "ui-test-manifest" } |
| 104 | androidx-ui-test-junit4 = { group = "androidx.compose.ui", name = "ui-test-junit4" } | 107 | androidx-ui-test-junit4 = { group = "androidx.compose.ui", name = "ui-test-junit4" } |
| 105 | androidx-material3 = { group = "androidx.compose.material3", name = "material3" } | 108 | androidx-material3 = { group = "androidx.compose.material3", name = "material3" } |
| 109 | +lifecycle-process = { group = "androidx.lifecycle", name = "lifecycle-process", version.ref = "lifecycleProcess" } | ||
| 106 | 110 | ||
| 107 | [plugins] | 111 | [plugins] |
| 108 | 112 |
| @@ -12,7 +12,8 @@ Utilities and composables for use with Jetpack Compose. | @@ -12,7 +12,8 @@ Utilities and composables for use with Jetpack Compose. | ||
| 12 | 12 | ||
| 13 | # Package io.livekit.android.room | 13 | # Package io.livekit.android.room |
| 14 | 14 | ||
| 15 | -Room is the primary class that manages the connection to the LiveKit Room. It exposes listeners that lets you hook into room events. | 15 | +Room is the primary class that manages the connection to the LiveKit Room. It exposes listeners that |
| 16 | +lets you hook into room events. | ||
| 16 | 17 | ||
| 17 | # Package io.livekit.android.room.track | 18 | # Package io.livekit.android.room.track |
| 18 | 19 |
livekit-android-track-processors/.gitignore
0 → 100644
| 1 | +/build |
livekit-android-track-processors/README.md
0 → 100644
| 1 | +# Track Processors for LiveKit Android SDK | ||
| 2 | + | ||
| 3 | +[](https://maven-badges.herokuapp.com/maven-central/io.livekit/livekit-android-camerax) | ||
| 4 | + | ||
| 5 | +This library provides track processors for use with the Android LiveKit SDK. | ||
| 6 | + | ||
| 7 | +## Installation | ||
| 8 | + | ||
| 9 | +```groovy title="build.gradle" | ||
| 10 | +implementation "io.livekit:livekit-android-track-processors:<current livekit sdk release>" | ||
| 11 | +``` | ||
| 12 | + | ||
| 13 | +See our [release page](https://github.com/livekit/client-sdk-android/releases) for details on the | ||
| 14 | +current release version. | ||
| 15 | + | ||
| 16 | +## Usage of prebuilt processors | ||
| 17 | + | ||
| 18 | +This package exposes `VirtualBackgroundVideoProcessor` as a pre-prepared video processor. | ||
| 19 | + | ||
| 20 | +``` | ||
| 21 | +val processor = VirtualBackgroundVideoProcessor(eglBase).apply { | ||
| 22 | + // Optionally set a background image. | ||
| 23 | + // Will blur the background of the video if none is set. | ||
| 24 | + val drawable = AppCompatResources.getDrawable(application, R.drawable.background) as BitmapDrawable | ||
| 25 | + backgroundImage = drawable.bitmap | ||
| 26 | +} | ||
| 27 | +``` | ||
| 28 | + | ||
| 29 | +### Register the image analyzer in the CameraProvider | ||
| 30 | + | ||
| 31 | +`VirtualBackgroundVideoProcessor` requires the use of our CameraX provider. | ||
| 32 | + | ||
| 33 | +``` | ||
| 34 | +val imageAnalysis = ImageAnalysis.Builder().build() | ||
| 35 | + .apply { setAnalyzer(Dispatchers.IO.asExecutor(), processor.imageAnalyzer) } | ||
| 36 | + | ||
| 37 | +CameraXHelper.createCameraProvider(ProcessLifecycleOwner.get(), arrayOf(imageAnalysis)).let { | ||
| 38 | + if (it.isSupported(application)) { | ||
| 39 | + CameraCapturerUtils.registerCameraProvider(it) | ||
| 40 | + } | ||
| 41 | +} | ||
| 42 | +``` | ||
| 43 | + | ||
| 44 | +### Create and publish the video track | ||
| 45 | + | ||
| 46 | +``` | ||
| 47 | +val videoTrack = room.localParticipant.createVideoTrack( | ||
| 48 | + options = LocalVideoTrackOptions(position = CameraPosition.FRONT), | ||
| 49 | + videoProcessor = processor, | ||
| 50 | +) | ||
| 51 | + | ||
| 52 | +videoTrack.startCapture() | ||
| 53 | +room.localParticipant.publishVideoTrack(videoTrack) | ||
| 54 | +``` | ||
| 55 | + | ||
| 56 | +You can find an offline example of the `VirtualBackgroundVideoProcessor` in | ||
| 57 | +use [here](https://github.com/livekit/client-sdk-android/tree/main/examples/virtual-background). |
| 1 | +plugins { | ||
| 2 | + id "org.jetbrains.dokka" | ||
| 3 | + id 'com.android.library' | ||
| 4 | + id 'kotlin-android' | ||
| 5 | + id 'kotlin-kapt' | ||
| 6 | +} | ||
| 7 | + | ||
| 8 | +android { | ||
| 9 | + namespace 'io.livekit.android.track.processing' | ||
| 10 | + compileSdkVersion androidSdk.compileVersion | ||
| 11 | + | ||
| 12 | + defaultConfig { | ||
| 13 | + minSdkVersion androidSdk.minVersion | ||
| 14 | + targetSdkVersion androidSdk.targetVersion | ||
| 15 | + | ||
| 16 | + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" | ||
| 17 | + consumerProguardFiles "consumer-rules.pro" | ||
| 18 | + } | ||
| 19 | + | ||
| 20 | + buildTypes { | ||
| 21 | + release { | ||
| 22 | + minifyEnabled false | ||
| 23 | + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' | ||
| 24 | + } | ||
| 25 | + } | ||
| 26 | + compileOptions { | ||
| 27 | + sourceCompatibility java_version | ||
| 28 | + targetCompatibility java_version | ||
| 29 | + } | ||
| 30 | + kotlinOptions { | ||
| 31 | + freeCompilerArgs = ["-Xinline-classes", "-opt-in=kotlin.RequiresOptIn"] | ||
| 32 | + jvmTarget = java_version | ||
| 33 | + } | ||
| 34 | + testOptions { | ||
| 35 | + unitTests { | ||
| 36 | + includeAndroidResources = true | ||
| 37 | + } | ||
| 38 | + } | ||
| 39 | +} | ||
| 40 | + | ||
| 41 | +dokkaHtml { | ||
| 42 | + moduleName.set("livekit-android-test") | ||
| 43 | + dokkaSourceSets { | ||
| 44 | + configureEach { | ||
| 45 | + skipEmptyPackages.set(true) | ||
| 46 | + includeNonPublic.set(false) | ||
| 47 | + includes.from("module.md") | ||
| 48 | + displayName.set("LiveKit Track Processors") | ||
| 49 | + sourceLink { | ||
| 50 | + localDirectory.set(file("src/main/java")) | ||
| 51 | + | ||
| 52 | + // URL showing where the source code can be accessed through the web browser | ||
| 53 | + remoteUrl.set(new URL( | ||
| 54 | + "https://github.com/livekit/client-sdk-android/tree/master/livekit-android-track-processors/src/main/java")) | ||
| 55 | + // Suffix which is used to append the line number to the URL. Use #L for GitHub | ||
| 56 | + remoteLineSuffix.set("#L") | ||
| 57 | + } | ||
| 58 | + | ||
| 59 | + perPackageOption { | ||
| 60 | + matchingRegex.set(".*\\.dagger.*") | ||
| 61 | + suppress.set(true) | ||
| 62 | + } | ||
| 63 | + | ||
| 64 | + perPackageOption { | ||
| 65 | + matchingRegex.set(".*\\.util.*") | ||
| 66 | + suppress.set(true) | ||
| 67 | + } | ||
| 68 | + } | ||
| 69 | + } | ||
| 70 | +} | ||
| 71 | + | ||
| 72 | +dependencies { | ||
| 73 | + | ||
| 74 | + implementation(project(":livekit-android-sdk")) | ||
| 75 | + implementation(project(":livekit-android-camerax")) | ||
| 76 | + implementation libs.timber | ||
| 77 | + implementation libs.coroutines.lib | ||
| 78 | + implementation libs.androidx.annotation | ||
| 79 | + | ||
| 80 | + implementation libs.webrtc | ||
| 81 | + implementation libs.segmentation.selfie | ||
| 82 | + | ||
| 83 | + testImplementation libs.junit | ||
| 84 | + testImplementation libs.robolectric | ||
| 85 | + | ||
| 86 | + androidTestImplementation libs.androidx.test.junit | ||
| 87 | + androidTestImplementation libs.espresso | ||
| 88 | +} | ||
| 89 | +tasks.withType(Test).configureEach { | ||
| 90 | + systemProperty "robolectric.logging.enabled", true | ||
| 91 | +} | ||
| 92 | + | ||
| 93 | +apply from: rootProject.file('gradle/gradle-mvn-push.gradle') | ||
| 94 | +apply from: rootProject.file('gradle/dokka-kotlin-dep-fix.gradle') | ||
| 95 | + | ||
| 96 | +afterEvaluate { | ||
| 97 | + publishing { | ||
| 98 | + publications { | ||
| 99 | + // Creates a Maven publication called "release". | ||
| 100 | + release(MavenPublication) { | ||
| 101 | + // Applies the component for the release build variant. | ||
| 102 | + from components.release | ||
| 103 | + | ||
| 104 | + // You can then customize attributes of the publication as shown below. | ||
| 105 | + groupId = GROUP | ||
| 106 | + artifactId = POM_ARTIFACT_ID | ||
| 107 | + version = VERSION_NAME | ||
| 108 | + } | ||
| 109 | + } | ||
| 110 | + } | ||
| 111 | +} |
livekit-android-track-processors/module.md
0 → 100644
| 1 | +# Add project specific ProGuard rules here. | ||
| 2 | +# You can control the set of applied configuration files using the | ||
| 3 | +# proguardFiles setting in build.gradle. | ||
| 4 | +# | ||
| 5 | +# For more details, see | ||
| 6 | +# http://developer.android.com/guide/developing/tools/proguard.html | ||
| 7 | + | ||
| 8 | +# If your project uses WebView with JS, uncomment the following | ||
| 9 | +# and specify the fully qualified class name to the JavaScript interface | ||
| 10 | +# class: | ||
| 11 | +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { | ||
| 12 | +# public *; | ||
| 13 | +#} | ||
| 14 | + | ||
| 15 | +# Uncomment this to preserve the line number information for | ||
| 16 | +# debugging stack traces. | ||
| 17 | +#-keepattributes SourceFile,LineNumberTable | ||
| 18 | + | ||
| 19 | +# If you keep the line number information, uncomment this to | ||
| 20 | +# hide the original source file name. | ||
| 21 | +#-renamesourcefileattribute SourceFile |
| 1 | +/* | ||
| 2 | + * Copyright 2025 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.track.processing.video | ||
| 18 | + | ||
| 19 | +import android.graphics.Bitmap | ||
| 20 | +import android.opengl.GLES20 | ||
| 21 | +import android.opengl.GLES30 | ||
| 22 | +import io.livekit.android.track.processing.video.opengl.LKGlTextureFrameBuffer | ||
| 23 | +import io.livekit.android.track.processing.video.shader.BlurShader | ||
| 24 | +import io.livekit.android.track.processing.video.shader.CompositeShader | ||
| 25 | +import io.livekit.android.track.processing.video.shader.ResamplerShader | ||
| 26 | +import io.livekit.android.track.processing.video.shader.createBlurShader | ||
| 27 | +import io.livekit.android.track.processing.video.shader.createBoxBlurShader | ||
| 28 | +import io.livekit.android.track.processing.video.shader.createCompsiteShader | ||
| 29 | +import io.livekit.android.track.processing.video.shader.createResampler | ||
| 30 | +import io.livekit.android.util.LKLog | ||
| 31 | +import livekit.org.webrtc.GlTextureFrameBuffer | ||
| 32 | +import livekit.org.webrtc.GlUtil | ||
| 33 | +import livekit.org.webrtc.RendererCommon | ||
| 34 | +import java.nio.ByteBuffer | ||
| 35 | + | ||
| 36 | +/** | ||
| 37 | + * Blurs the background of the camera video stream. | ||
| 38 | + */ | ||
| 39 | +class VirtualBackgroundTransformer( | ||
| 40 | + val blurRadius: Float = 16f, | ||
| 41 | + val downSampleFactor: Int = 2, | ||
| 42 | +) : RendererCommon.GlDrawer { | ||
| 43 | + | ||
| 44 | + data class MaskHolder(val width: Int, val height: Int, val buffer: ByteBuffer) | ||
| 45 | + | ||
| 46 | + private lateinit var compositeShader: CompositeShader | ||
| 47 | + private lateinit var blurShader: BlurShader | ||
| 48 | + private lateinit var boxBlurShader: BlurShader | ||
| 49 | + | ||
| 50 | + private var bgTexture = 0 | ||
| 51 | + private var frameTexture = 0 | ||
| 52 | + | ||
| 53 | + private lateinit var bgTextureFrameBuffers: Pair<GlTextureFrameBuffer, GlTextureFrameBuffer> | ||
| 54 | + | ||
| 55 | + private lateinit var downSampler: ResamplerShader | ||
| 56 | + | ||
| 57 | + var backgroundImageStateLock = Any() | ||
| 58 | + var backgroundImage: Bitmap? = null | ||
| 59 | + set(value) { | ||
| 60 | + if (value == field) { | ||
| 61 | + return | ||
| 62 | + } | ||
| 63 | + | ||
| 64 | + synchronized(backgroundImageStateLock) { | ||
| 65 | + field = value | ||
| 66 | + backgroundImageNeedsUploading = true | ||
| 67 | + } | ||
| 68 | + } | ||
| 69 | + var backgroundImageNeedsUploading = false | ||
| 70 | + | ||
| 71 | + // For double buffering the final mask | ||
| 72 | + private var readMaskIndex = 0 // Index for renderFrame to read from | ||
| 73 | + private var writeMaskIndex = 1 // Index for updateMask to write to | ||
| 74 | + | ||
| 75 | + private fun swapMaskIndexes() { | ||
| 76 | + if (readMaskIndex == 0) { | ||
| 77 | + readMaskIndex = 1 | ||
| 78 | + writeMaskIndex = 0 | ||
| 79 | + } else { | ||
| 80 | + readMaskIndex = 0 | ||
| 81 | + writeMaskIndex = 1 | ||
| 82 | + } | ||
| 83 | + } | ||
| 84 | + | ||
| 85 | + var newMask: MaskHolder? = null | ||
| 86 | + lateinit var anotherTempMaskFrameBuffer: LKGlTextureFrameBuffer | ||
| 87 | + lateinit var tempMaskTextureFrameBuffer: GlTextureFrameBuffer | ||
| 88 | + | ||
| 89 | + lateinit var finalMaskFrameBuffers: List<GlTextureFrameBuffer> | ||
| 90 | + | ||
| 91 | + var initialized = false | ||
| 92 | + | ||
| 93 | + fun initialize() { | ||
| 94 | + LKLog.e { "initialize shaders" } | ||
| 95 | + compositeShader = createCompsiteShader() | ||
| 96 | + blurShader = createBlurShader() | ||
| 97 | + boxBlurShader = createBoxBlurShader() | ||
| 98 | + | ||
| 99 | + bgTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D) | ||
| 100 | + frameTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D) | ||
| 101 | + | ||
| 102 | + bgTextureFrameBuffers = GlTextureFrameBuffer(GLES20.GL_RGBA) to GlTextureFrameBuffer(GLES20.GL_RGBA) | ||
| 103 | + | ||
| 104 | + downSampler = createResampler() | ||
| 105 | + | ||
| 106 | + // For double buffering the final mask | ||
| 107 | + anotherTempMaskFrameBuffer = LKGlTextureFrameBuffer(GLES30.GL_R32F, GLES30.GL_RED, GLES30.GL_FLOAT) | ||
| 108 | + tempMaskTextureFrameBuffer = GlTextureFrameBuffer(GLES20.GL_RGBA) | ||
| 109 | + | ||
| 110 | + finalMaskFrameBuffers = listOf(GlTextureFrameBuffer(GLES20.GL_RGBA), GlTextureFrameBuffer(GLES20.GL_RGBA)) | ||
| 111 | + | ||
| 112 | + GlUtil.checkNoGLES2Error("VirtualBackgroundTransformer.initialize") | ||
| 113 | + initialized = true | ||
| 114 | + } | ||
| 115 | + | ||
| 116 | + override fun drawOes( | ||
| 117 | + oesTextureId: Int, | ||
| 118 | + texMatrix: FloatArray, | ||
| 119 | + frameWidth: Int, | ||
| 120 | + frameHeight: Int, | ||
| 121 | + viewportX: Int, | ||
| 122 | + viewportY: Int, | ||
| 123 | + viewportWidth: Int, | ||
| 124 | + viewportHeight: Int, | ||
| 125 | + ) { | ||
| 126 | + LKLog.e { "drawOes" } | ||
| 127 | + if (!initialized) { | ||
| 128 | + initialize() | ||
| 129 | + } | ||
| 130 | + | ||
| 131 | + newMask?.let { | ||
| 132 | + updateMaskFrameBuffer(it) | ||
| 133 | + newMask = null | ||
| 134 | + } | ||
| 135 | + | ||
| 136 | + val backgroundTexture: Int | ||
| 137 | + | ||
| 138 | + synchronized(backgroundImageStateLock) { | ||
| 139 | + val backgroundImage = this.backgroundImage | ||
| 140 | + if (backgroundImage != null) { | ||
| 141 | + val bgTextureFrameBuffer = bgTextureFrameBuffers.first | ||
| 142 | + | ||
| 143 | + if (backgroundImageNeedsUploading || true) { | ||
| 144 | + val byteBuffer = ByteBuffer.allocateDirect(backgroundImage.byteCount) | ||
| 145 | + backgroundImage.copyPixelsToBuffer(byteBuffer) | ||
| 146 | + byteBuffer.rewind() | ||
| 147 | + | ||
| 148 | + // Upload the background into a texture | ||
| 149 | + bgTextureFrameBuffer.setSize(backgroundImage.width, backgroundImage.height) | ||
| 150 | + GLES20.glActiveTexture(GLES20.GL_TEXTURE0) | ||
| 151 | + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, bgTextureFrameBuffer.textureId) | ||
| 152 | + checkNoError("bindBackgroundTexture") | ||
| 153 | + | ||
| 154 | + GLES20.glTexSubImage2D( | ||
| 155 | + /*target*/ | ||
| 156 | + GLES20.GL_TEXTURE_2D, | ||
| 157 | + 0, | ||
| 158 | + 0, | ||
| 159 | + 0, | ||
| 160 | + backgroundImage.width, | ||
| 161 | + backgroundImage.height, | ||
| 162 | + /*format*/ | ||
| 163 | + GLES20.GL_RGBA, | ||
| 164 | + /*type*/ | ||
| 165 | + GLES20.GL_UNSIGNED_BYTE, | ||
| 166 | + byteBuffer, | ||
| 167 | + ) | ||
| 168 | + checkNoError("updateBackgroundFrameBuffer") | ||
| 169 | + backgroundImageNeedsUploading = false | ||
| 170 | + } | ||
| 171 | + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0) | ||
| 172 | + backgroundTexture = bgTextureFrameBuffer.textureId | ||
| 173 | + } else { | ||
| 174 | + val downSampleWidth = frameWidth / downSampleFactor | ||
| 175 | + val downSampleHeight = frameHeight / downSampleFactor | ||
| 176 | + | ||
| 177 | + val downSampledFrameTexture = downSampler.resample(oesTextureId, downSampleWidth, downSampleHeight, IDENTITY) | ||
| 178 | + backgroundTexture = | ||
| 179 | + blurShader.applyBlur(downSampledFrameTexture, blurRadius, downSampleWidth, downSampleHeight, bgTextureFrameBuffers) | ||
| 180 | + } | ||
| 181 | + } | ||
| 182 | + | ||
| 183 | + compositeShader.renderComposite( | ||
| 184 | + backgroundTextureId = backgroundTexture, | ||
| 185 | + frameTextureId = oesTextureId, | ||
| 186 | + maskTextureId = finalMaskFrameBuffers[readMaskIndex].textureId, | ||
| 187 | + viewportX = viewportX, | ||
| 188 | + viewportY = viewportY, | ||
| 189 | + viewportWidth = viewportWidth, | ||
| 190 | + viewportHeight = viewportHeight, | ||
| 191 | + texMatrix = texMatrix, | ||
| 192 | + ) | ||
| 193 | + } | ||
| 194 | + | ||
| 195 | + /** | ||
| 196 | + * Thread-safe method to set the foreground mask. | ||
| 197 | + */ | ||
| 198 | + fun updateMask(segmentationMask: MaskHolder) { | ||
| 199 | + newMask = segmentationMask | ||
| 200 | + } | ||
| 201 | + | ||
| 202 | + private fun updateMaskFrameBuffer(segmentationMask: MaskHolder) { | ||
| 203 | + val width = segmentationMask.width | ||
| 204 | + val height = segmentationMask.height | ||
| 205 | + | ||
| 206 | + anotherTempMaskFrameBuffer.setSize(segmentationMask.width, segmentationMask.height) | ||
| 207 | + tempMaskTextureFrameBuffer.setSize(segmentationMask.width, segmentationMask.height) | ||
| 208 | + finalMaskFrameBuffers[0].setSize(segmentationMask.width, segmentationMask.height) | ||
| 209 | + finalMaskFrameBuffers[1].setSize(segmentationMask.width, segmentationMask.height) | ||
| 210 | + | ||
| 211 | + // Upload the mask into a texture | ||
| 212 | + GLES20.glActiveTexture(GLES20.GL_TEXTURE0) | ||
| 213 | + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, anotherTempMaskFrameBuffer.textureId) | ||
| 214 | + checkNoError("bindMaskTexture") | ||
| 215 | + | ||
| 216 | + GLES20.glTexSubImage2D( | ||
| 217 | + /*target*/ | ||
| 218 | + GLES20.GL_TEXTURE_2D, | ||
| 219 | + 0, | ||
| 220 | + 0, | ||
| 221 | + 0, | ||
| 222 | + width, | ||
| 223 | + height, | ||
| 224 | + /*format*/ | ||
| 225 | + GLES30.GL_RED, | ||
| 226 | + /*type*/ | ||
| 227 | + GLES20.GL_FLOAT, | ||
| 228 | + segmentationMask.buffer, | ||
| 229 | + ) | ||
| 230 | + | ||
| 231 | + checkNoError("updateMaskFrameBuffer") | ||
| 232 | + | ||
| 233 | + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0) | ||
| 234 | + | ||
| 235 | + val finalMaskBuffer = finalMaskFrameBuffers[writeMaskIndex] | ||
| 236 | + val frameBuffers = tempMaskTextureFrameBuffer to finalMaskBuffer | ||
| 237 | + | ||
| 238 | + boxBlurShader.applyBlur(anotherTempMaskFrameBuffer.textureId, 2f, width, height, frameBuffers) | ||
| 239 | + | ||
| 240 | + // Swap indicies for next frame. | ||
| 241 | + swapMaskIndexes() | ||
| 242 | + } | ||
| 243 | + | ||
| 244 | + override fun drawRgb(p0: Int, p1: FloatArray?, p2: Int, p3: Int, p4: Int, p5: Int, p6: Int, p7: Int) { | ||
| 245 | + TODO("Not yet implemented") | ||
| 246 | + } | ||
| 247 | + | ||
| 248 | + override fun drawYuv(p0: IntArray?, p1: FloatArray?, p2: Int, p3: Int, p4: Int, p5: Int, p6: Int, p7: Int) { | ||
| 249 | + TODO("Not yet implemented") | ||
| 250 | + } | ||
| 251 | + | ||
| 252 | + override fun release() { | ||
| 253 | + compositeShader.release() | ||
| 254 | + blurShader.release() | ||
| 255 | + boxBlurShader.release() | ||
| 256 | + | ||
| 257 | + bgTextureFrameBuffers.first.release() | ||
| 258 | + bgTextureFrameBuffers.second.release() | ||
| 259 | + downSampler.release() | ||
| 260 | + | ||
| 261 | + anotherTempMaskFrameBuffer.release() | ||
| 262 | + tempMaskTextureFrameBuffer.release() | ||
| 263 | + finalMaskFrameBuffers.forEach { | ||
| 264 | + it.release() | ||
| 265 | + } | ||
| 266 | + } | ||
| 267 | + | ||
| 268 | + companion object { | ||
| 269 | + | ||
| 270 | + val TAG = VirtualBackgroundTransformer::class.java.simpleName | ||
| 271 | + val IDENTITY = | ||
| 272 | + floatArrayOf( | ||
| 273 | + 1f, 0f, 0f, 0f, | ||
| 274 | + 0f, 1f, 0f, 0f, | ||
| 275 | + 0f, 0f, 1f, 0f, | ||
| 276 | + 0f, 0f, 0f, 1f, | ||
| 277 | + ) | ||
| 278 | + } | ||
| 279 | + | ||
| 280 | + private fun checkNoError(message: String) { | ||
| 281 | + GlUtil.checkNoGLES2Error("$TAG.$message") | ||
| 282 | + } | ||
| 283 | +} |
| 1 | +/* | ||
| 2 | + * Copyright 2024-2025 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.track.processing.video | ||
| 18 | + | ||
| 19 | +import android.graphics.Bitmap | ||
| 20 | +import android.graphics.Matrix | ||
| 21 | +import android.view.Surface | ||
| 22 | +import androidx.annotation.OptIn | ||
| 23 | +import androidx.camera.core.ExperimentalGetImage | ||
| 24 | +import androidx.camera.core.ImageAnalysis | ||
| 25 | +import androidx.camera.core.ImageProxy | ||
| 26 | +import com.google.mlkit.vision.common.InputImage | ||
| 27 | +import com.google.mlkit.vision.segmentation.Segmentation | ||
| 28 | +import com.google.mlkit.vision.segmentation.Segmenter | ||
| 29 | +import com.google.mlkit.vision.segmentation.selfie.SelfieSegmenterOptions | ||
| 30 | +import io.livekit.android.room.track.video.NoDropVideoProcessor | ||
| 31 | +import kotlinx.coroutines.CoroutineDispatcher | ||
| 32 | +import kotlinx.coroutines.CoroutineScope | ||
| 33 | +import kotlinx.coroutines.Dispatchers | ||
| 34 | +import kotlinx.coroutines.cancel | ||
| 35 | +import kotlinx.coroutines.channels.BufferOverflow | ||
| 36 | +import kotlinx.coroutines.flow.MutableSharedFlow | ||
| 37 | +import kotlinx.coroutines.launch | ||
| 38 | +import livekit.org.webrtc.EglBase | ||
| 39 | +import livekit.org.webrtc.EglRenderer | ||
| 40 | +import livekit.org.webrtc.GlUtil | ||
| 41 | +import livekit.org.webrtc.SurfaceTextureHelper | ||
| 42 | +import livekit.org.webrtc.VideoFrame | ||
| 43 | +import livekit.org.webrtc.VideoSink | ||
| 44 | +import java.util.concurrent.Semaphore | ||
| 45 | + | ||
| 46 | +/** | ||
| 47 | + * A virtual background video processor for the local camera video stream. | ||
| 48 | + * | ||
| 49 | + * By default, blurs the background of the video stream. | ||
| 50 | + * Setting [backgroundImage] will use the provided image instead. | ||
| 51 | + */ | ||
| 52 | +class VirtualBackgroundVideoProcessor(private val eglBase: EglBase, dispatcher: CoroutineDispatcher = Dispatchers.Default) : NoDropVideoProcessor() { | ||
| 53 | + | ||
| 54 | + private var targetSink: VideoSink? = null | ||
| 55 | + private val segmenter: Segmenter | ||
| 56 | + | ||
| 57 | + private var lastRotation = 0 | ||
| 58 | + private var lastWidth = 0 | ||
| 59 | + private var lastHeight = 0 | ||
| 60 | + private val surfaceTextureHelper = SurfaceTextureHelper.create("BitmapToYUV", eglBase.eglBaseContext) | ||
| 61 | + private val surface = Surface(surfaceTextureHelper.surfaceTexture) | ||
| 62 | + private val backgroundTransformer = VirtualBackgroundTransformer() | ||
| 63 | + private val eglRenderer = EglRenderer(VirtualBackgroundVideoProcessor::class.java.simpleName) | ||
| 64 | + .apply { | ||
| 65 | + init(eglBase.eglBaseContext, EglBase.CONFIG_PLAIN, backgroundTransformer) | ||
| 66 | + createEglSurface(surface) | ||
| 67 | + } | ||
| 68 | + | ||
| 69 | + private val scope = CoroutineScope(dispatcher) | ||
| 70 | + private val taskFlow = MutableSharedFlow<VideoFrame>( | ||
| 71 | + replay = 0, | ||
| 72 | + extraBufferCapacity = 1, | ||
| 73 | + onBufferOverflow = BufferOverflow.SUSPEND, | ||
| 74 | + ) | ||
| 75 | + | ||
| 76 | + /** | ||
| 77 | + * Enables or disables the virtual background. | ||
| 78 | + * | ||
| 79 | + * Defaults to true. | ||
| 80 | + */ | ||
| 81 | + var enabled: Boolean = true | ||
| 82 | + | ||
| 83 | + var backgroundImage: Bitmap? = null | ||
| 84 | + set(value) { | ||
| 85 | + field = value | ||
| 86 | + backgroundImageNeedsUpdating = true | ||
| 87 | + } | ||
| 88 | + private var backgroundImageNeedsUpdating = false | ||
| 89 | + | ||
| 90 | + init { | ||
| 91 | + val options = | ||
| 92 | + SelfieSegmenterOptions.Builder() | ||
| 93 | + .setDetectorMode(SelfieSegmenterOptions.STREAM_MODE) | ||
| 94 | + .build() | ||
| 95 | + segmenter = Segmentation.getClient(options) | ||
| 96 | + | ||
| 97 | + // Funnel processing into a single flow that won't buffer, | ||
| 98 | + // since processing may be slower than video capture. | ||
| 99 | + scope.launch { | ||
| 100 | + taskFlow.collect { frame -> | ||
| 101 | + processFrame(frame) | ||
| 102 | + frame.release() | ||
| 103 | + } | ||
| 104 | + } | ||
| 105 | + } | ||
| 106 | + | ||
| 107 | + private var lastMask: VirtualBackgroundTransformer.MaskHolder? = null | ||
| 108 | + | ||
| 109 | + private inner class ImageAnalyser : ImageAnalysis.Analyzer { | ||
| 110 | + val latch = Semaphore(1, true) | ||
| 111 | + | ||
| 112 | + @OptIn(ExperimentalGetImage::class) | ||
| 113 | + override fun analyze(imageProxy: ImageProxy) { | ||
| 114 | + val image = imageProxy.image | ||
| 115 | + | ||
| 116 | + if (enabled && image != null) { | ||
| 117 | + // Put 0 for rotation degrees | ||
| 118 | + // We'll rotate it together with the original video frame in the shader. | ||
| 119 | + val inputImage = InputImage.fromMediaImage(image, 0) | ||
| 120 | + latch.acquire() | ||
| 121 | + val task = segmenter.process(inputImage) | ||
| 122 | + task.addOnSuccessListener { mask -> | ||
| 123 | + val holder = VirtualBackgroundTransformer.MaskHolder(mask.width, mask.height, mask.buffer) | ||
| 124 | + lastMask = holder | ||
| 125 | + latch.release() | ||
| 126 | + } | ||
| 127 | + latch.acquire() | ||
| 128 | + latch.release() | ||
| 129 | + } | ||
| 130 | + | ||
| 131 | + imageProxy.close() | ||
| 132 | + } | ||
| 133 | + } | ||
| 134 | + | ||
| 135 | + @Suppress("unused") | ||
| 136 | + val imageAnalyzer: ImageAnalysis.Analyzer = ImageAnalyser() | ||
| 137 | + | ||
| 138 | + override fun onCapturerStarted(started: Boolean) { | ||
| 139 | + if (started) { | ||
| 140 | + surfaceTextureHelper.startListening { frame -> | ||
| 141 | + targetSink?.onFrame(frame) | ||
| 142 | + } | ||
| 143 | + } | ||
| 144 | + } | ||
| 145 | + | ||
| 146 | + override fun onCapturerStopped() { | ||
| 147 | + surfaceTextureHelper.stopListening() | ||
| 148 | + } | ||
| 149 | + | ||
| 150 | + override fun onFrameCaptured(frame: VideoFrame) { | ||
| 151 | + // If disabled, just pass through to the sink. | ||
| 152 | + if (!enabled) { | ||
| 153 | + targetSink?.onFrame(frame) | ||
| 154 | + return | ||
| 155 | + } | ||
| 156 | + | ||
| 157 | + try { | ||
| 158 | + frame.retain() | ||
| 159 | + } catch (e: Exception) { | ||
| 160 | + return | ||
| 161 | + } | ||
| 162 | + | ||
| 163 | + // If the frame is succesfully emitted, the process flow will own the frame. | ||
| 164 | + if (!taskFlow.tryEmit(frame)) { | ||
| 165 | + frame.release() | ||
| 166 | + } | ||
| 167 | + } | ||
| 168 | + | ||
| 169 | + fun processFrame(frame: VideoFrame) { | ||
| 170 | + if (lastRotation != frame.rotation) { | ||
| 171 | + lastRotation = frame.rotation | ||
| 172 | + backgroundImageNeedsUpdating = true | ||
| 173 | + } | ||
| 174 | + | ||
| 175 | + if (lastWidth != frame.rotatedWidth || lastHeight != frame.rotatedHeight) { | ||
| 176 | + surfaceTextureHelper.setTextureSize(frame.rotatedWidth, frame.rotatedHeight) | ||
| 177 | + lastWidth = frame.rotatedWidth | ||
| 178 | + lastHeight = frame.rotatedHeight | ||
| 179 | + backgroundImageNeedsUpdating = true | ||
| 180 | + } | ||
| 181 | + | ||
| 182 | + frame.retain() | ||
| 183 | + surfaceTextureHelper.handler.post { | ||
| 184 | + val backgroundImage = this.backgroundImage | ||
| 185 | + if (backgroundImageNeedsUpdating && backgroundImage != null) { | ||
| 186 | + val imageAspect = backgroundImage.width / backgroundImage.height.toFloat() | ||
| 187 | + val targetAspect = frame.rotatedWidth / frame.rotatedHeight.toFloat() | ||
| 188 | + var sx = 0 | ||
| 189 | + var sy = 0 | ||
| 190 | + var sWidth = backgroundImage.width | ||
| 191 | + var sHeight = backgroundImage.height | ||
| 192 | + | ||
| 193 | + if (imageAspect > targetAspect) { | ||
| 194 | + sWidth = Math.round(backgroundImage.height * targetAspect) | ||
| 195 | + sx = Math.round((backgroundImage.width - sWidth) / 2f) | ||
| 196 | + } else { | ||
| 197 | + sHeight = Math.round(backgroundImage.width / targetAspect) | ||
| 198 | + sy = Math.round((backgroundImage.height - sHeight) / 2f) | ||
| 199 | + } | ||
| 200 | + | ||
| 201 | + val diffAspect = targetAspect / imageAspect | ||
| 202 | + | ||
| 203 | + val matrix = Matrix() | ||
| 204 | + | ||
| 205 | + matrix.postRotate(-frame.rotation.toFloat()) | ||
| 206 | + | ||
| 207 | + val resizedImage = Bitmap.createBitmap( | ||
| 208 | + backgroundImage, | ||
| 209 | + sx, | ||
| 210 | + sy, | ||
| 211 | + sWidth, | ||
| 212 | + sHeight, | ||
| 213 | + matrix, | ||
| 214 | + true, | ||
| 215 | + ) | ||
| 216 | + backgroundTransformer.backgroundImage = resizedImage | ||
| 217 | + backgroundImageNeedsUpdating = false | ||
| 218 | + } | ||
| 219 | + | ||
| 220 | + lastMask?.let { | ||
| 221 | + backgroundTransformer.updateMask(it) | ||
| 222 | + } | ||
| 223 | + lastMask = null | ||
| 224 | + eglRenderer.onFrame(frame) | ||
| 225 | + frame.release() | ||
| 226 | + } | ||
| 227 | + } | ||
| 228 | + | ||
| 229 | + override fun setSink(sink: VideoSink?) { | ||
| 230 | + targetSink = sink | ||
| 231 | + } | ||
| 232 | + | ||
| 233 | + fun dispose() { | ||
| 234 | + scope.cancel() | ||
| 235 | + segmenter.close() | ||
| 236 | + surfaceTextureHelper.stopListening() | ||
| 237 | + surfaceTextureHelper.dispose() | ||
| 238 | + surface.release() | ||
| 239 | + eglRenderer.release() | ||
| 240 | + backgroundTransformer.release() | ||
| 241 | + GlUtil.checkNoGLES2Error("VirtualBackgroundVideoProcessor.dispose") | ||
| 242 | + } | ||
| 243 | +} |
| 1 | +/* | ||
| 2 | + * Copyright 2015 The WebRTC project authors. All Rights Reserved. | ||
| 3 | + * | ||
| 4 | + * Use of this source code is governed by a BSD-style license | ||
| 5 | + * that can be found in the LICENSE file in the root of the source | ||
| 6 | + * tree. An additional intellectual property rights grant can be found | ||
| 7 | + * in the file PATENTS. All contributing project authors may | ||
| 8 | + * be found in the AUTHORS file in the root of the source tree. | ||
| 9 | + */ | ||
| 10 | + | ||
| 11 | +package io.livekit.android.track.processing.video.opengl; | ||
| 12 | + | ||
| 13 | +import android.opengl.GLES20; | ||
| 14 | +import android.opengl.GLES30; | ||
| 15 | + | ||
| 16 | +import livekit.org.webrtc.GlUtil; | ||
| 17 | + | ||
| 18 | +/** | ||
| 19 | + * Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil | ||
| 20 | + * buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color | ||
| 21 | + * conversion. This class is not thread safe and must be used by a thread with an active GL context. | ||
| 22 | + */ | ||
| 23 | +// TODO(magjed): Add unittests for this class. | ||
| 24 | +public class LKGlTextureFrameBuffer { | ||
| 25 | + private final int internalFormat; | ||
| 26 | + private final int pixelFormat; | ||
| 27 | + private final int type; | ||
| 28 | + private int frameBufferId; | ||
| 29 | + private int textureId; | ||
| 30 | + private int width; | ||
| 31 | + private int height; | ||
| 32 | + | ||
| 33 | + /** | ||
| 34 | + * Generate texture and framebuffer resources. An EGLContext must be bound on the current thread | ||
| 35 | + * when calling this function. The framebuffer is not complete until setSize() is called. | ||
| 36 | + */ | ||
| 37 | + public LKGlTextureFrameBuffer(int internalFormat, int pixelFormat, int type) { | ||
| 38 | + this.internalFormat = internalFormat; | ||
| 39 | + this.pixelFormat = pixelFormat; | ||
| 40 | + this.type = type; | ||
| 41 | + this.width = 0; | ||
| 42 | + this.height = 0; | ||
| 43 | + } | ||
| 44 | + | ||
| 45 | + /** | ||
| 46 | + * (Re)allocate texture. Will do nothing if the requested size equals the current size. An | ||
| 47 | + * EGLContext must be bound on the current thread when calling this function. Must be called at | ||
| 48 | + * least once before using the framebuffer. May be called multiple times to change size. | ||
| 49 | + */ | ||
| 50 | + public void setSize(int width, int height) { | ||
| 51 | + if (width <= 0 || height <= 0) { | ||
| 52 | + throw new IllegalArgumentException("Invalid size: " + width + "x" + height); | ||
| 53 | + } | ||
| 54 | + if (width == this.width && height == this.height) { | ||
| 55 | + return; | ||
| 56 | + } | ||
| 57 | + this.width = width; | ||
| 58 | + this.height = height; | ||
| 59 | + // Lazy allocation the first time setSize() is called. | ||
| 60 | + if (textureId == 0) { | ||
| 61 | + textureId = GlUtil.generateTexture(GLES30.GL_TEXTURE_2D); | ||
| 62 | + } | ||
| 63 | + if (frameBufferId == 0) { | ||
| 64 | + final int frameBuffers[] = new int[1]; | ||
| 65 | + GLES30.glGenFramebuffers(1, frameBuffers, 0); | ||
| 66 | + frameBufferId = frameBuffers[0]; | ||
| 67 | + } | ||
| 68 | + | ||
| 69 | + // Allocate texture. | ||
| 70 | + GLES30.glActiveTexture(GLES30.GL_TEXTURE0); | ||
| 71 | + GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, textureId); | ||
| 72 | + GLES30.glTexImage2D(GLES30.GL_TEXTURE_2D, 0, internalFormat, width, height, 0, pixelFormat, | ||
| 73 | + type, null); | ||
| 74 | + GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, 0); | ||
| 75 | + GlUtil.checkNoGLES2Error("GlTextureFrameBuffer setSize"); | ||
| 76 | + | ||
| 77 | + // Attach the texture to the framebuffer as color attachment. | ||
| 78 | + GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, frameBufferId); | ||
| 79 | + GLES30.glFramebufferTexture2D( | ||
| 80 | + GLES30.GL_FRAMEBUFFER, GLES30.GL_COLOR_ATTACHMENT0, GLES30.GL_TEXTURE_2D, textureId, 0); | ||
| 81 | + | ||
| 82 | + // Check that the framebuffer is in a good state. | ||
| 83 | + final int status = GLES30.glCheckFramebufferStatus(GLES30.GL_FRAMEBUFFER); | ||
| 84 | + if (status != GLES30.GL_FRAMEBUFFER_COMPLETE) { | ||
| 85 | + throw new IllegalStateException("Framebuffer not complete, status: " + status); | ||
| 86 | + } | ||
| 87 | + | ||
| 88 | + GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, 0); | ||
| 89 | + } | ||
| 90 | + | ||
| 91 | + public int getWidth() { | ||
| 92 | + return width; | ||
| 93 | + } | ||
| 94 | + | ||
| 95 | + public int getHeight() { | ||
| 96 | + return height; | ||
| 97 | + } | ||
| 98 | + | ||
| 99 | + /** | ||
| 100 | + * Gets the OpenGL frame buffer id. This value is only valid after setSize() has been called. | ||
| 101 | + */ | ||
| 102 | + public int getFrameBufferId() { | ||
| 103 | + return frameBufferId; | ||
| 104 | + } | ||
| 105 | + | ||
| 106 | + /** | ||
| 107 | + * Gets the OpenGL texture id. This value is only valid after setSize() has been called. | ||
| 108 | + */ | ||
| 109 | + public int getTextureId() { | ||
| 110 | + return textureId; | ||
| 111 | + } | ||
| 112 | + | ||
| 113 | + /** | ||
| 114 | + * Release texture and framebuffer. An EGLContext must be bound on the current thread when calling | ||
| 115 | + * this function. This object should not be used after this call. | ||
| 116 | + */ | ||
| 117 | + public void release() { | ||
| 118 | + GLES20.glDeleteTextures(1, new int[]{textureId}, 0); | ||
| 119 | + textureId = 0; | ||
| 120 | + GLES20.glDeleteFramebuffers(1, new int[]{frameBufferId}, 0); | ||
| 121 | + frameBufferId = 0; | ||
| 122 | + width = 0; | ||
| 123 | + height = 0; | ||
| 124 | + } | ||
| 125 | +} |
| 1 | +/* | ||
| 2 | + * Copyright 2025 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.track.processing.video.shader | ||
| 18 | + | ||
| 19 | +import android.opengl.GLES20 | ||
| 20 | +import livekit.org.webrtc.GlShader | ||
| 21 | +import livekit.org.webrtc.GlTextureFrameBuffer | ||
| 22 | +import livekit.org.webrtc.GlUtil | ||
| 23 | + | ||
| 24 | +private const val BLUR_FRAGMENT_SHADER = """#version 300 es | ||
| 25 | +precision mediump float; | ||
| 26 | + | ||
| 27 | +in vec2 texCoords; | ||
| 28 | + | ||
| 29 | +uniform sampler2D u_texture; | ||
| 30 | +uniform vec2 u_texelSize; | ||
| 31 | +uniform vec2 u_direction; | ||
| 32 | +uniform float u_radius; | ||
| 33 | + | ||
| 34 | +out vec4 fragColor; | ||
| 35 | + | ||
| 36 | +void main() { | ||
| 37 | + float sigma = u_radius; | ||
| 38 | + float twoSigmaSq = 2.0 * sigma * sigma; | ||
| 39 | + float totalWeight = 0.0; | ||
| 40 | + vec3 result = vec3(0.0); | ||
| 41 | + const int MAX_SAMPLES = 16; | ||
| 42 | + int radius = int(min(float(MAX_SAMPLES), ceil(u_radius))); | ||
| 43 | + | ||
| 44 | + for (int i = -MAX_SAMPLES; i <= MAX_SAMPLES; ++i) { | ||
| 45 | + float offset = float(i); | ||
| 46 | + if (abs(offset) > float(radius)) continue; | ||
| 47 | + float weight = exp(-(offset * offset) / twoSigmaSq); | ||
| 48 | + vec2 sampleCoord = texCoords + u_direction * u_texelSize * offset; | ||
| 49 | + result += texture(u_texture, sampleCoord).rgb * weight; | ||
| 50 | + totalWeight += weight; | ||
| 51 | + } | ||
| 52 | + | ||
| 53 | + fragColor = vec4(result / totalWeight, 1.0); | ||
| 54 | +} | ||
| 55 | +""" | ||
| 56 | + | ||
| 57 | +internal fun createBlurShader(): BlurShader { | ||
| 58 | + val shader = GlShader(CONSTANT_VERTEX_SHADER_SOURCE, BLUR_FRAGMENT_SHADER) | ||
| 59 | + | ||
| 60 | + return BlurShader( | ||
| 61 | + shader = shader, | ||
| 62 | + texMatrixLocation = 0, | ||
| 63 | + inPosLocation = shader.getAttribLocation(VERTEX_SHADER_POS_COORD_NAME), | ||
| 64 | + inTcLocation = 0, | ||
| 65 | + texture = shader.getUniformLocation("u_texture"), | ||
| 66 | + texelSize = shader.getUniformLocation("u_texelSize"), | ||
| 67 | + direction = shader.getUniformLocation("u_direction"), | ||
| 68 | + radius = shader.getUniformLocation("u_radius"), | ||
| 69 | + ) | ||
| 70 | +} | ||
| 71 | + | ||
| 72 | +internal data class BlurShader( | ||
| 73 | + val shader: GlShader, | ||
| 74 | + val inPosLocation: Int, | ||
| 75 | + val inTcLocation: Int, | ||
| 76 | + val texMatrixLocation: Int, | ||
| 77 | + val texture: Int, | ||
| 78 | + val texelSize: Int, | ||
| 79 | + val direction: Int, | ||
| 80 | + val radius: Int, | ||
| 81 | +) { | ||
| 82 | + fun release() { | ||
| 83 | + shader.release() | ||
| 84 | + } | ||
| 85 | + | ||
| 86 | + fun applyBlur( | ||
| 87 | + inputTextureId: Int, | ||
| 88 | + blurRadius: Float, | ||
| 89 | + viewportWidth: Int, | ||
| 90 | + viewportHeight: Int, | ||
| 91 | + processFrameBuffer: Pair<GlTextureFrameBuffer, GlTextureFrameBuffer>, | ||
| 92 | + texMatrix: FloatArray? = null, | ||
| 93 | + ): Int { | ||
| 94 | + shader.useProgram() | ||
| 95 | + | ||
| 96 | + // Upload the texture coordinates. | ||
| 97 | + ShaderUtil.loadCoordMatrix( | ||
| 98 | + inPosLocation = inPosLocation, | ||
| 99 | + inPosFloats = FULL_RECTANGLE_BUFFER, | ||
| 100 | + inTcLocation = inTcLocation, | ||
| 101 | + inTcFloats = if (texMatrix != null) FULL_RECTANGLE_TEXTURE_BUFFER else null, | ||
| 102 | + texMatrixLocation = texMatrixLocation, | ||
| 103 | + texMatrix = texMatrix, | ||
| 104 | + ) | ||
| 105 | + | ||
| 106 | + GlUtil.checkNoGLES2Error("BlurShader.loadCoordMatrix") | ||
| 107 | + | ||
| 108 | + processFrameBuffer.first.setSize(viewportWidth, viewportHeight) | ||
| 109 | + processFrameBuffer.second.setSize(viewportWidth, viewportHeight) | ||
| 110 | + GlUtil.checkNoGLES2Error("BlurShader.updateFrameBufferSizes") | ||
| 111 | + | ||
| 112 | + val texelWidth = 1.0f / viewportWidth | ||
| 113 | + val texelHeight = 1.0f / viewportHeight | ||
| 114 | + | ||
| 115 | + // First pass - horizontal blur | ||
| 116 | + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, processFrameBuffer.first.frameBufferId) | ||
| 117 | + GLES20.glViewport(0, 0, viewportWidth, viewportHeight) | ||
| 118 | + | ||
| 119 | + GlUtil.checkNoGLES2Error("BlurShader.glBindFramebuffer") | ||
| 120 | + GLES20.glActiveTexture(GLES20.GL_TEXTURE0) | ||
| 121 | + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, inputTextureId) | ||
| 122 | + GlUtil.checkNoGLES2Error("BlurShader.bind oes") | ||
| 123 | + GLES20.glUniform1i(texture, 0) | ||
| 124 | + GLES20.glUniform2f(texelSize, texelWidth, texelHeight) | ||
| 125 | + GLES20.glUniform2f(direction, 1.0f, 0.0f) // Horizontal | ||
| 126 | + GLES20.glUniform1f(radius, blurRadius) | ||
| 127 | + | ||
| 128 | + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4) | ||
| 129 | + GlUtil.checkNoGLES2Error("BlurShader.GL_TRIANGLE_STRIP") | ||
| 130 | + | ||
| 131 | + // cleanup | ||
| 132 | + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0) | ||
| 133 | + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0) | ||
| 134 | + | ||
| 135 | + // Second pass - vertical blur | ||
| 136 | + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, processFrameBuffer.second.frameBufferId) | ||
| 137 | + GLES20.glViewport(0, 0, viewportWidth, viewportHeight) | ||
| 138 | + | ||
| 139 | + GLES20.glActiveTexture(GLES20.GL_TEXTURE0) | ||
| 140 | + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, processFrameBuffer.first.textureId) | ||
| 141 | + GLES20.glUniform1i(texture, 0) | ||
| 142 | + GLES20.glUniform2f(texelSize, texelWidth, texelHeight) | ||
| 143 | + GLES20.glUniform2f(direction, 0.0f, 1.0f) // Vertical | ||
| 144 | + GLES20.glUniform1f(radius, blurRadius) | ||
| 145 | + | ||
| 146 | + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4) | ||
| 147 | + | ||
| 148 | + GlUtil.checkNoGLES2Error("BlurShader.GL_TRIANGLE_STRIP2") | ||
| 149 | + // cleanup | ||
| 150 | + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0) | ||
| 151 | + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0) | ||
| 152 | + | ||
| 153 | + GlUtil.checkNoGLES2Error("BlurShader.applyBlur") | ||
| 154 | + return processFrameBuffer.second.textureId | ||
| 155 | + } | ||
| 156 | +} |
| 1 | +/* | ||
| 2 | + * Copyright 2025 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.track.processing.video.shader | ||
| 18 | + | ||
| 19 | +import livekit.org.webrtc.GlShader | ||
| 20 | + | ||
| 21 | +private const val BOX_BLUR_SHADER_SOURCE = """#version 300 es | ||
| 22 | +precision mediump float; | ||
| 23 | + | ||
| 24 | +in vec2 texCoords; | ||
| 25 | + | ||
| 26 | +uniform sampler2D u_texture; | ||
| 27 | +uniform vec2 u_texelSize; // 1.0 / texture size | ||
| 28 | +uniform vec2 u_direction; // (1.0, 0.0) for horizontal, (0.0, 1.0) for vertical | ||
| 29 | +uniform float u_radius; // blur radius in texels | ||
| 30 | + | ||
| 31 | +out vec4 fragColor; | ||
| 32 | + | ||
| 33 | +void main() { | ||
| 34 | + vec3 sum = vec3(0.0); | ||
| 35 | + float count = 0.0; | ||
| 36 | + | ||
| 37 | + // Limit radius to avoid excessive loop cost | ||
| 38 | + const int MAX_RADIUS = 16; | ||
| 39 | + int radius = int(min(float(MAX_RADIUS), u_radius)); | ||
| 40 | + | ||
| 41 | + for (int i = -MAX_RADIUS; i <= MAX_RADIUS; ++i) { | ||
| 42 | + if (abs(i) > radius) continue; | ||
| 43 | + | ||
| 44 | + vec2 offset = u_direction * u_texelSize * float(i); | ||
| 45 | + sum += texture(u_texture, texCoords + offset).rgb; | ||
| 46 | + count += 1.0; | ||
| 47 | + } | ||
| 48 | + | ||
| 49 | + fragColor = vec4(sum / count, 1.0); | ||
| 50 | +} | ||
| 51 | +""" | ||
| 52 | + | ||
| 53 | +internal fun createBoxBlurShader(): BlurShader { | ||
| 54 | + val shader = GlShader(CONSTANT_VERTEX_SHADER_SOURCE, BOX_BLUR_SHADER_SOURCE) | ||
| 55 | + | ||
| 56 | + return BlurShader( | ||
| 57 | + shader = shader, | ||
| 58 | + texMatrixLocation = 0, | ||
| 59 | + inPosLocation = shader.getAttribLocation(VERTEX_SHADER_POS_COORD_NAME), | ||
| 60 | + inTcLocation = 0, | ||
| 61 | + texture = shader.getUniformLocation("u_texture"), | ||
| 62 | + texelSize = shader.getUniformLocation("u_texelSize"), | ||
| 63 | + direction = shader.getUniformLocation("u_direction"), | ||
| 64 | + radius = shader.getUniformLocation("u_radius"), | ||
| 65 | + ) | ||
| 66 | +} |
| 1 | +/* | ||
| 2 | + * Copyright 2025 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.track.processing.video.shader | ||
| 18 | + | ||
| 19 | +import android.opengl.GLES11Ext | ||
| 20 | +import android.opengl.GLES20 | ||
| 21 | +import livekit.org.webrtc.GlShader | ||
| 22 | +import livekit.org.webrtc.GlUtil | ||
| 23 | + | ||
| 24 | +private const val COMPOSITE_FRAGMENT_SHADER_SOURCE = """#version 300 es | ||
| 25 | +#extension GL_OES_EGL_image_external_essl3 : require | ||
| 26 | +precision mediump float; | ||
| 27 | +in vec2 texCoords; | ||
| 28 | +uniform sampler2D background; | ||
| 29 | +uniform samplerExternalOES frame; | ||
| 30 | +uniform sampler2D mask; | ||
| 31 | +out vec4 fragColor; | ||
| 32 | + | ||
| 33 | +void main() { | ||
| 34 | + | ||
| 35 | + vec4 frameTex = texture(frame, texCoords); | ||
| 36 | + vec4 bgTex = texture(background, texCoords); | ||
| 37 | + | ||
| 38 | + float maskVal = texture(mask, texCoords).r; | ||
| 39 | + | ||
| 40 | + // Compute screen-space gradient to detect edge sharpness | ||
| 41 | + float grad = length(vec2(dFdx(maskVal), dFdy(maskVal))); | ||
| 42 | + | ||
| 43 | + float edgeSoftness = 6.0; // higher = softer | ||
| 44 | + | ||
| 45 | + // Create a smooth edge around binary transition | ||
| 46 | + float smoothAlpha = smoothstep(0.5 - grad * edgeSoftness, 0.5 + grad * edgeSoftness, maskVal); | ||
| 47 | + | ||
| 48 | + // Optional: preserve frame alpha, or override as fully opaque | ||
| 49 | + vec4 blended = mix(bgTex, vec4(frameTex.rgb, 1.0), 0.0 + smoothAlpha); | ||
| 50 | + | ||
| 51 | + fragColor = blended; | ||
| 52 | + | ||
| 53 | +} | ||
| 54 | +""" | ||
| 55 | + | ||
| 56 | +internal fun createCompsiteShader(): CompositeShader { | ||
| 57 | + val shader = GlShader(DEFAULT_VERTEX_SHADER_SOURCE, COMPOSITE_FRAGMENT_SHADER_SOURCE) | ||
| 58 | + | ||
| 59 | + return CompositeShader( | ||
| 60 | + shader = shader, | ||
| 61 | + texMatrixLocation = shader.getUniformLocation(VERTEX_SHADER_TEX_MAT_NAME), | ||
| 62 | + inPosLocation = shader.getAttribLocation(VERTEX_SHADER_POS_COORD_NAME), | ||
| 63 | + inTcLocation = shader.getAttribLocation(VERTEX_SHADER_TEX_COORD_NAME), | ||
| 64 | + mask = shader.getUniformLocation("mask"), | ||
| 65 | + frame = shader.getUniformLocation("frame"), | ||
| 66 | + background = shader.getUniformLocation("background"), | ||
| 67 | + ) | ||
| 68 | +} | ||
| 69 | + | ||
| 70 | +internal data class CompositeShader( | ||
| 71 | + val shader: GlShader, | ||
| 72 | + val inPosLocation: Int, | ||
| 73 | + val inTcLocation: Int, | ||
| 74 | + val texMatrixLocation: Int, | ||
| 75 | + val mask: Int, | ||
| 76 | + val frame: Int, | ||
| 77 | + val background: Int, | ||
| 78 | +) { | ||
| 79 | + fun renderComposite( | ||
| 80 | + backgroundTextureId: Int, | ||
| 81 | + frameTextureId: Int, | ||
| 82 | + maskTextureId: Int, | ||
| 83 | + viewportX: Int, | ||
| 84 | + viewportY: Int, | ||
| 85 | + viewportWidth: Int, | ||
| 86 | + viewportHeight: Int, | ||
| 87 | + texMatrix: FloatArray, | ||
| 88 | + ) { | ||
| 89 | + GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight) | ||
| 90 | + GLES20.glClearColor(1f, 1f, 1f, 1f) | ||
| 91 | + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT) | ||
| 92 | + | ||
| 93 | + // Set up uniforms for the composite shader | ||
| 94 | + shader.useProgram() | ||
| 95 | + | ||
| 96 | + ShaderUtil.loadCoordMatrix( | ||
| 97 | + inPosLocation = inPosLocation, | ||
| 98 | + inPosFloats = FULL_RECTANGLE_BUFFER, | ||
| 99 | + inTcLocation = inTcLocation, | ||
| 100 | + inTcFloats = FULL_RECTANGLE_TEXTURE_BUFFER, | ||
| 101 | + texMatrixLocation = texMatrixLocation, | ||
| 102 | + texMatrix = texMatrix, | ||
| 103 | + ) | ||
| 104 | + GlUtil.checkNoGLES2Error("loadCoordMatrix") | ||
| 105 | + | ||
| 106 | + GLES20.glActiveTexture(GLES20.GL_TEXTURE0) | ||
| 107 | + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, backgroundTextureId) | ||
| 108 | + GLES20.glUniform1i(background, 0) | ||
| 109 | + GlUtil.checkNoGLES2Error("GL_TEXTURE0") | ||
| 110 | + | ||
| 111 | + GLES20.glActiveTexture(GLES20.GL_TEXTURE1) | ||
| 112 | + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, frameTextureId) | ||
| 113 | + GLES20.glUniform1i(frame, 1) | ||
| 114 | + GlUtil.checkNoGLES2Error("GL_TEXTURE1") | ||
| 115 | + | ||
| 116 | + GLES20.glActiveTexture(GLES20.GL_TEXTURE2) | ||
| 117 | + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, maskTextureId) | ||
| 118 | + GLES20.glUniform1i(mask, 2) | ||
| 119 | + GlUtil.checkNoGLES2Error("GL_TEXTURE2") | ||
| 120 | + | ||
| 121 | + // Draw composite | ||
| 122 | + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4) | ||
| 123 | + GlUtil.checkNoGLES2Error("GL_TRIANGLE_STRIP") | ||
| 124 | + | ||
| 125 | + // Cleanup | ||
| 126 | + GLES20.glActiveTexture(GLES20.GL_TEXTURE0) | ||
| 127 | + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0) | ||
| 128 | + GLES20.glActiveTexture(GLES20.GL_TEXTURE1) | ||
| 129 | + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0) | ||
| 130 | + GLES20.glActiveTexture(GLES20.GL_TEXTURE2) | ||
| 131 | + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0) | ||
| 132 | + GlUtil.checkNoGLES2Error("renderComposite") | ||
| 133 | + } | ||
| 134 | + | ||
| 135 | + fun release() { | ||
| 136 | + shader.release() | ||
| 137 | + } | ||
| 138 | +} |
| 1 | +/* | ||
| 2 | + * Copyright 2025 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.track.processing.video.shader | ||
| 18 | + | ||
| 19 | +internal const val DEFAULT_VERTEX_SHADER_SOURCE = """#version 300 es | ||
| 20 | +out vec2 texCoords; | ||
| 21 | +in vec4 in_pos; | ||
| 22 | +in vec4 in_tc; | ||
| 23 | +uniform mat4 tex_mat; | ||
| 24 | +void main() { | ||
| 25 | + gl_Position = in_pos; | ||
| 26 | + texCoords = (tex_mat * in_tc).xy; | ||
| 27 | +} | ||
| 28 | +""" | ||
| 29 | + | ||
| 30 | +internal const val CONSTANT_VERTEX_SHADER_SOURCE = """#version 300 es | ||
| 31 | +in vec2 in_pos; | ||
| 32 | +out vec2 texCoords; | ||
| 33 | + | ||
| 34 | +void main() { | ||
| 35 | + texCoords = (in_pos + 1.0) / 2.0; | ||
| 36 | + gl_Position = vec4(in_pos, 0, 1.0); | ||
| 37 | +} | ||
| 38 | +""" | ||
| 39 | + | ||
| 40 | +internal const val VERTEX_SHADER_TEX_MAT_NAME = "tex_mat" | ||
| 41 | +internal const val VERTEX_SHADER_TEX_COORD_NAME = "in_tc" | ||
| 42 | +internal const val VERTEX_SHADER_POS_COORD_NAME = "in_pos" |
| 1 | +/* | ||
| 2 | + * Copyright 2025 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.track.processing.video.shader | ||
| 18 | + | ||
| 19 | +import android.opengl.GLES11Ext | ||
| 20 | +import android.opengl.GLES20 | ||
| 21 | +import livekit.org.webrtc.GlShader | ||
| 22 | +import livekit.org.webrtc.GlTextureFrameBuffer | ||
| 23 | +import livekit.org.webrtc.GlUtil | ||
| 24 | + | ||
| 25 | +private const val DOWNSAMPLER_VERTEX_SHADER_SOURCE = """ | ||
| 26 | +attribute vec4 in_pos; | ||
| 27 | +attribute vec4 in_tc; | ||
| 28 | +uniform mat4 tex_mat; | ||
| 29 | +varying vec2 v_uv; | ||
| 30 | +void main() { | ||
| 31 | + v_uv = (tex_mat * in_tc).xy; | ||
| 32 | + gl_Position = in_pos; | ||
| 33 | +} | ||
| 34 | +""" | ||
| 35 | + | ||
| 36 | +private const val DOWNSAMPLER_FRAGMENT_SHADER_SOURCE = """#extension GL_OES_EGL_image_external : require | ||
| 37 | +precision mediump float; | ||
| 38 | +varying vec2 v_uv; | ||
| 39 | +uniform samplerExternalOES u_texture; | ||
| 40 | + | ||
| 41 | +void main() { | ||
| 42 | + gl_FragColor = texture2D(u_texture, v_uv); | ||
| 43 | +} | ||
| 44 | +""" | ||
| 45 | + | ||
| 46 | +// Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) | ||
| 47 | +// is top-right. | ||
| 48 | +internal val FULL_RECTANGLE_BUFFER = GlUtil.createFloatBuffer( | ||
| 49 | + floatArrayOf( | ||
| 50 | + -1.0f, | ||
| 51 | + -1.0f, // Bottom left. | ||
| 52 | + 1.0f, | ||
| 53 | + -1.0f, // Bottom right. | ||
| 54 | + -1.0f, | ||
| 55 | + 1.0f, // Top left. | ||
| 56 | + 1.0f, | ||
| 57 | + 1.0f, // Top right. | ||
| 58 | + ), | ||
| 59 | +) | ||
| 60 | + | ||
| 61 | +// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right. | ||
| 62 | +internal val FULL_RECTANGLE_TEXTURE_BUFFER = GlUtil.createFloatBuffer( | ||
| 63 | + floatArrayOf( | ||
| 64 | + 0.0f, | ||
| 65 | + 0.0f, // Bottom left. | ||
| 66 | + 1.0f, | ||
| 67 | + 0.0f, // Bottom right. | ||
| 68 | + 0.0f, | ||
| 69 | + 1.0f, // Top left. | ||
| 70 | + 1.0f, | ||
| 71 | + 1.0f, // Top right. | ||
| 72 | + ), | ||
| 73 | +) | ||
| 74 | + | ||
| 75 | +internal fun createResampler(): ResamplerShader { | ||
| 76 | + val textureFrameBuffer = GlTextureFrameBuffer(GLES20.GL_RGBA) | ||
| 77 | + val shader = GlShader(DOWNSAMPLER_VERTEX_SHADER_SOURCE, DOWNSAMPLER_FRAGMENT_SHADER_SOURCE) | ||
| 78 | + | ||
| 79 | + return ResamplerShader( | ||
| 80 | + shader = shader, | ||
| 81 | + textureFrameBuffer = textureFrameBuffer, | ||
| 82 | + texMatrixLocation = shader.getUniformLocation(VERTEX_SHADER_TEX_MAT_NAME), | ||
| 83 | + inPosLocation = shader.getAttribLocation(VERTEX_SHADER_POS_COORD_NAME), | ||
| 84 | + inTcLocation = shader.getAttribLocation(VERTEX_SHADER_TEX_COORD_NAME), | ||
| 85 | + texture = shader.getUniformLocation("u_texture"), | ||
| 86 | + ) | ||
| 87 | +} | ||
| 88 | + | ||
| 89 | +/** | ||
| 90 | + * A shader that resamples a texture at a new size. | ||
| 91 | + */ | ||
| 92 | +internal data class ResamplerShader( | ||
| 93 | + val shader: GlShader, | ||
| 94 | + val textureFrameBuffer: GlTextureFrameBuffer, | ||
| 95 | + val texMatrixLocation: Int, | ||
| 96 | + val inPosLocation: Int, | ||
| 97 | + val inTcLocation: Int, | ||
| 98 | + val texture: Int, | ||
| 99 | +) { | ||
| 100 | + | ||
| 101 | + fun release() { | ||
| 102 | + shader.release() | ||
| 103 | + textureFrameBuffer.release() | ||
| 104 | + } | ||
| 105 | + | ||
| 106 | + fun resample( | ||
| 107 | + inputTexture: Int, | ||
| 108 | + newWidth: Int, | ||
| 109 | + newHeight: Int, | ||
| 110 | + texMatrix: FloatArray, | ||
| 111 | + ): Int { | ||
| 112 | + textureFrameBuffer.setSize(newWidth, newHeight) | ||
| 113 | + | ||
| 114 | + shader.useProgram() | ||
| 115 | + | ||
| 116 | + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureFrameBuffer.frameBufferId) | ||
| 117 | + GLES20.glViewport(0, 0, newWidth, newHeight) | ||
| 118 | + ShaderUtil.loadCoordMatrix( | ||
| 119 | + inPosLocation = inPosLocation, | ||
| 120 | + inPosFloats = FULL_RECTANGLE_BUFFER, | ||
| 121 | + inTcLocation = inTcLocation, | ||
| 122 | + inTcFloats = FULL_RECTANGLE_TEXTURE_BUFFER, | ||
| 123 | + texMatrixLocation = texMatrixLocation, | ||
| 124 | + texMatrix = texMatrix, | ||
| 125 | + ) | ||
| 126 | + | ||
| 127 | + GLES20.glActiveTexture(GLES20.GL_TEXTURE0) | ||
| 128 | + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, inputTexture) | ||
| 129 | + GlUtil.checkNoGLES2Error("ResamplerShader.glBindTexture") | ||
| 130 | + GLES20.glUniform1i(texture, 0) | ||
| 131 | + | ||
| 132 | + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4) | ||
| 133 | + GlUtil.checkNoGLES2Error("ResamplerShader.glDrawArrays") | ||
| 134 | + | ||
| 135 | + // cleanup | ||
| 136 | + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0) | ||
| 137 | + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0) | ||
| 138 | + | ||
| 139 | + GlUtil.checkNoGLES2Error("ResamplerShader.applyDownsampling") | ||
| 140 | + return textureFrameBuffer.textureId | ||
| 141 | + } | ||
| 142 | +} |
| 1 | +/* | ||
| 2 | + * Copyright 2025 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.track.processing.video.shader | ||
| 18 | + | ||
| 19 | +import android.opengl.GLES20 | ||
| 20 | +import java.nio.FloatBuffer | ||
| 21 | + | ||
| 22 | +internal object ShaderUtil { | ||
| 23 | + fun loadCoordMatrix( | ||
| 24 | + inPosLocation: Int, | ||
| 25 | + inPosFloats: FloatBuffer? = null, | ||
| 26 | + inTcLocation: Int, | ||
| 27 | + inTcFloats: FloatBuffer? = null, | ||
| 28 | + texMatrixLocation: Int, | ||
| 29 | + texMatrix: FloatArray? = null, | ||
| 30 | + ) { | ||
| 31 | + if (inPosFloats != null) { | ||
| 32 | + // Upload the vertex coordinates. | ||
| 33 | + GLES20.glEnableVertexAttribArray(inPosLocation) | ||
| 34 | + GLES20.glVertexAttribPointer( | ||
| 35 | + inPosLocation, | ||
| 36 | + /* size= */ | ||
| 37 | + 2, | ||
| 38 | + /* type= */ | ||
| 39 | + GLES20.GL_FLOAT, | ||
| 40 | + /* normalized= */ | ||
| 41 | + false, | ||
| 42 | + /* stride= */ | ||
| 43 | + 0, | ||
| 44 | + inPosFloats, | ||
| 45 | + ) | ||
| 46 | + } | ||
| 47 | + | ||
| 48 | + if (inTcFloats != null) { | ||
| 49 | + // Upload the texture coordinates. | ||
| 50 | + GLES20.glEnableVertexAttribArray(inTcLocation) | ||
| 51 | + GLES20.glVertexAttribPointer( | ||
| 52 | + inTcLocation, | ||
| 53 | + /* size= */ | ||
| 54 | + 2, | ||
| 55 | + /* type= */ | ||
| 56 | + GLES20.GL_FLOAT, | ||
| 57 | + /* normalized= */ | ||
| 58 | + false, | ||
| 59 | + /* stride= */ | ||
| 60 | + 0, | ||
| 61 | + inTcFloats, | ||
| 62 | + ) | ||
| 63 | + } | ||
| 64 | + | ||
| 65 | + if (texMatrix != null) { | ||
| 66 | + // Upload the texture transformation matrix. | ||
| 67 | + GLES20.glUniformMatrix4fv( | ||
| 68 | + texMatrixLocation, | ||
| 69 | + /* count= */ | ||
| 70 | + 1, | ||
| 71 | + /* transpose= */ | ||
| 72 | + false, | ||
| 73 | + texMatrix, | ||
| 74 | + /* offset= */ | ||
| 75 | + 0, | ||
| 76 | + ) | ||
| 77 | + } | ||
| 78 | + } | ||
| 79 | +} |
| @@ -96,7 +96,9 @@ class CallViewModel( | @@ -96,7 +96,9 @@ class CallViewModel( | ||
| 96 | appContext = application, | 96 | appContext = application, |
| 97 | options = getRoomOptions(), | 97 | options = getRoomOptions(), |
| 98 | overrides = LiveKitOverrides( | 98 | overrides = LiveKitOverrides( |
| 99 | - audioOptions = AudioOptions(audioProcessorOptions = audioProcessorOptions), | 99 | + audioOptions = AudioOptions( |
| 100 | + audioProcessorOptions = audioProcessorOptions, | ||
| 101 | + ), | ||
| 100 | ), | 102 | ), |
| 101 | ) | 103 | ) |
| 102 | 104 |
| @@ -26,7 +26,8 @@ include ':livekit-lint' | @@ -26,7 +26,8 @@ include ':livekit-lint' | ||
| 26 | include ':video-encode-decode-test' | 26 | include ':video-encode-decode-test' |
| 27 | include ':sample-app-basic' | 27 | include ':sample-app-basic' |
| 28 | include ':sample-app-record-local' | 28 | include ':sample-app-record-local' |
| 29 | -include ':examples:selfie-segmentation' | 29 | +include ':examples:virtual-background' |
| 30 | include ':livekit-android-test' | 30 | include ':livekit-android-test' |
| 31 | include ':livekit-android-camerax' | 31 | include ':livekit-android-camerax' |
| 32 | include ':examples:screenshare-audio' | 32 | include ':examples:screenshare-audio' |
| 33 | +include ':livekit-android-track-processors' |
-
请 注册 或 登录 后发表评论