Vlad Provalionok
Committed by GitHub

Improved VirtualBackgroundVideoProcessor and VirtualBackgroundTransformer (#731)

* Made blur radius dynamically changeable

Added lazy initialization of Segmenter

Removed blurring in shader if radius less or equals 0

Configured in sample Target aspect ratio for ImageAnalysis

* make blurRadius a constructor variable

Also keeps the original API to avoid any changes.

* Remove unneeded updateBlurRadius method

* Fix compile

* changeset

* fix compile

---------

Co-authored-by: davidliu <davidliu@deviange.net>
---
"client-sdk-android": patch
---
Make blurRadius in the VirtualBackgroundTransformer variable to allow for dynamically changing the value.
... ...
... ... @@ -51,6 +51,13 @@ class MainActivity : AppCompatActivity() {
track?.addRenderer(renderer)
}
findViewById<Button>(R.id.buttonIncreaseBlur).setOnClickListener {
viewModel.increaseBlur()
}
findViewById<Button>(R.id.buttonDecreaseBlur).setOnClickListener {
viewModel.decreaseBlur()
}
requestNeededPermissions {
viewModel.startCapture()
}
... ...
... ... @@ -22,6 +22,8 @@ import androidx.annotation.OptIn
import androidx.appcompat.content.res.AppCompatResources
import androidx.camera.camera2.interop.ExperimentalCamera2Interop
import androidx.camera.core.ImageAnalysis
import androidx.camera.core.resolutionselector.AspectRatioStrategy
import androidx.camera.core.resolutionselector.ResolutionSelector
import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.ProcessLifecycleOwner
... ... @@ -52,15 +54,23 @@ class MainViewModel(application: Application) : AndroidViewModel(application) {
eglBase = eglBase,
),
)
val processor = VirtualBackgroundVideoProcessor(eglBase, Dispatchers.IO).apply {
private var blur = 16f
private val processor = VirtualBackgroundVideoProcessor(eglBase, Dispatchers.IO, initialBlurRadius = blur).apply {
val drawable = AppCompatResources.getDrawable(application, R.drawable.background) as BitmapDrawable
backgroundImage = drawable.bitmap
}
private var cameraProvider: CameraCapturerUtils.CameraProvider? = null
private var imageAnalysis = ImageAnalysis.Builder().build()
private var imageAnalysis = ImageAnalysis.Builder()
.setResolutionSelector(
ResolutionSelector.Builder()
// LocalVideoTrack has default aspect ratio 16:9 VideoPreset169.H720
// ImageAnalysis of CameraX has default aspect ratio 4:3
.setAspectRatioStrategy(AspectRatioStrategy.RATIO_16_9_FALLBACK_AUTO_STRATEGY)
.build(),
)
.build()
.apply { setAnalyzer(Dispatchers.IO.asExecutor(), processor.imageAnalyzer) }
init {
... ... @@ -99,4 +109,14 @@ class MainViewModel(application: Application) : AndroidViewModel(application) {
processor.enabled = newState
return newState
}
fun decreaseBlur() {
blur -= 5
processor.updateBlurRadius(blur)
}
fun increaseBlur() {
blur += 5
processor.updateBlurRadius(blur)
}
}
... ...
... ... @@ -17,4 +17,21 @@
android:layout_margin="10dp"
android:text="Disable" />
<Button
android:id="@+id/buttonIncreaseBlur"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="end"
android:layout_margin="10dp"
android:text="Blur more" />
<Button
android:id="@+id/buttonDecreaseBlur"
android:layout_width="wrap_content"
android:layout_gravity="end"
android:layout_height="wrap_content"
android:layout_marginTop="64dp"
android:layout_marginEnd="10dp"
android:text="Blur less" />
</FrameLayout>
... ...
... ... @@ -37,8 +37,8 @@ import java.nio.ByteBuffer
* Blurs the background of the camera video stream.
*/
class VirtualBackgroundTransformer(
val blurRadius: Float = 16f,
val downSampleFactor: Int = 2,
var blurRadius: Float = 16f,
var downSampleFactor: Int = 2,
) : RendererCommon.GlDrawer {
data class MaskHolder(val width: Int, val height: Int, val buffer: ByteBuffer)
... ... @@ -54,7 +54,7 @@ class VirtualBackgroundTransformer(
private lateinit var downSampler: ResamplerShader
var backgroundImageStateLock = Any()
private var backgroundImageStateLock = Any()
var backgroundImage: Bitmap? = null
set(value) {
if (value == field) {
... ... @@ -66,7 +66,7 @@ class VirtualBackgroundTransformer(
backgroundImageNeedsUploading = true
}
}
var backgroundImageNeedsUploading = false
private var backgroundImageNeedsUploading = false
// For double buffering the final mask
private var readMaskIndex = 0 // Index for renderFrame to read from
... ... @@ -250,6 +250,7 @@ class VirtualBackgroundTransformer(
}
override fun release() {
if (!initialized) return
compositeShader.release()
blurShader.release()
boxBlurShader.release()
... ...
... ... @@ -49,17 +49,27 @@ import java.util.concurrent.Semaphore
* By default, blurs the background of the video stream.
* Setting [backgroundImage] will use the provided image instead.
*/
class VirtualBackgroundVideoProcessor(private val eglBase: EglBase, dispatcher: CoroutineDispatcher = Dispatchers.Default) : NoDropVideoProcessor() {
class VirtualBackgroundVideoProcessor(
private val eglBase: EglBase,
dispatcher: CoroutineDispatcher = Dispatchers.Default,
initialBlurRadius: Float = 16f,
) : NoDropVideoProcessor() {
private var targetSink: VideoSink? = null
private val segmenter: Segmenter
private val segmenter: Segmenter by lazy {
val options =
SelfieSegmenterOptions.Builder()
.setDetectorMode(SelfieSegmenterOptions.STREAM_MODE)
.build()
Segmentation.getClient(options)
}
private var lastRotation = 0
private var lastWidth = 0
private var lastHeight = 0
private val surfaceTextureHelper = SurfaceTextureHelper.create("BitmapToYUV", eglBase.eglBaseContext)
private val surface = Surface(surfaceTextureHelper.surfaceTexture)
private val backgroundTransformer = VirtualBackgroundTransformer()
private val backgroundTransformer = VirtualBackgroundTransformer(blurRadius = initialBlurRadius)
private val eglRenderer = EglRenderer(VirtualBackgroundVideoProcessor::class.java.simpleName)
.apply {
init(eglBase.eglBaseContext, EglBase.CONFIG_PLAIN, backgroundTransformer)
... ... @@ -88,12 +98,6 @@ class VirtualBackgroundVideoProcessor(private val eglBase: EglBase, dispatcher:
private var backgroundImageNeedsUpdating = false
init {
val options =
SelfieSegmenterOptions.Builder()
.setDetectorMode(SelfieSegmenterOptions.STREAM_MODE)
.build()
segmenter = Segmentation.getClient(options)
// Funnel processing into a single flow that won't buffer,
// since processing may be slower than video capture.
scope.launch {
... ... @@ -167,7 +171,11 @@ class VirtualBackgroundVideoProcessor(private val eglBase: EglBase, dispatcher:
}
}
fun processFrame(frame: VideoFrame) {
override fun setSink(sink: VideoSink?) {
targetSink = sink
}
private fun processFrame(frame: VideoFrame) {
if (lastRotation != frame.rotation) {
lastRotation = frame.rotation
backgroundImageNeedsUpdating = true
... ... @@ -227,8 +235,8 @@ class VirtualBackgroundVideoProcessor(private val eglBase: EglBase, dispatcher:
}
}
override fun setSink(sink: VideoSink?) {
targetSink = sink
fun updateBlurRadius(blurRadius: Float) {
backgroundTransformer.blurRadius = blurRadius
}
fun dispose() {
... ...
... ... @@ -34,6 +34,11 @@ uniform float u_radius;
out vec4 fragColor;
void main() {
if (u_radius <= 0.0) {
fragColor = texture(u_texture, texCoords);
return;
}
float sigma = u_radius;
float twoSigmaSq = 2.0 * sigma * sigma;
float totalWeight = 0.0;
... ...