davidliu
Committed by GitHub

Fixes for video processing and examples (#495)

* Add utility class NoDropVideoProcessor to force video processing while not connected

* Fix local video tracks not rendering processed frames
---
"client-sdk-android": patch
---
Fix local video tracks not rendering processed frames
... ...
---
"client-sdk-android": patch
---
Add utility class NoDropVideoProcessor to force video processing while not connected
... ...
... ... @@ -31,6 +31,7 @@ import com.google.mlkit.vision.common.InputImage
import com.google.mlkit.vision.segmentation.Segmentation
import com.google.mlkit.vision.segmentation.Segmenter
import com.google.mlkit.vision.segmentation.selfie.SelfieSegmenterOptions
import io.livekit.android.room.track.video.NoDropVideoProcessor
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.cancel
... ... @@ -41,13 +42,12 @@ import kotlinx.coroutines.sync.Mutex
import livekit.org.webrtc.EglBase
import livekit.org.webrtc.SurfaceTextureHelper
import livekit.org.webrtc.VideoFrame
import livekit.org.webrtc.VideoProcessor
import livekit.org.webrtc.VideoSink
import livekit.org.webrtc.YuvHelper
import java.io.ByteArrayOutputStream
import java.nio.ByteBuffer
class SelfieBitmapVideoProcessor(eglBase: EglBase, dispatcher: CoroutineDispatcher) : VideoProcessor {
class SelfieBitmapVideoProcessor(eglBase: EglBase, dispatcher: CoroutineDispatcher) : NoDropVideoProcessor() {
private var targetSink: VideoSink? = null
private val segmenter: Segmenter
... ... @@ -138,6 +138,7 @@ class SelfieBitmapVideoProcessor(eglBase: EglBase, dispatcher: CoroutineDispatch
frameBuffer.release()
frame.release()
// Ready for segementation processing.
val inputImage = InputImage.fromBitmap(bitmap, 0)
val task = segmenter.process(inputImage)
... ... @@ -156,6 +157,7 @@ class SelfieBitmapVideoProcessor(eglBase: EglBase, dispatcher: CoroutineDispatch
}
}
// Prepare for creating the processed video frame.
if (lastRotation != rotationDegrees) {
surfaceTextureHelper?.setFrameRotation(rotationDegrees)
lastRotation = rotationDegrees
... ... @@ -175,6 +177,7 @@ class SelfieBitmapVideoProcessor(eglBase: EglBase, dispatcher: CoroutineDispatch
}
if (canvas != null) {
// Create the video frame.
canvas.drawBitmap(bitmap, Matrix(), Paint())
surface.unlockCanvasAndPost(canvas)
}
... ...
... ... @@ -21,17 +21,17 @@ import com.google.mlkit.vision.common.InputImage
import com.google.mlkit.vision.segmentation.Segmentation
import com.google.mlkit.vision.segmentation.Segmenter
import com.google.mlkit.vision.segmentation.selfie.SelfieSegmenterOptions
import io.livekit.android.room.track.video.NoDropVideoProcessor
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.channels.BufferOverflow
import kotlinx.coroutines.flow.MutableSharedFlow
import kotlinx.coroutines.launch
import livekit.org.webrtc.VideoFrame
import livekit.org.webrtc.VideoProcessor
import livekit.org.webrtc.VideoSink
import java.nio.ByteBuffer
class SelfieVideoProcessor(dispatcher: CoroutineDispatcher) : VideoProcessor {
class SelfieVideoProcessor(dispatcher: CoroutineDispatcher) : NoDropVideoProcessor() {
private var targetSink: VideoSink? = null
private val segmenter: Segmenter
... ...
/*
* Copyright 2024 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.livekit.android.annotations
/**
* The target marked with this annotation is sensitive to the internal
* code of WebRTC, and should be directly retested whenever WebRTC version
* is upgraded.
*/
@Retention(AnnotationRetention.SOURCE)
annotation class WebRTCSensitive
... ...
... ... @@ -476,13 +476,20 @@ constructor(
source.setVideoProcessor(videoProcessor)
val surfaceTextureHelper = SurfaceTextureHelper.create("VideoCaptureThread", rootEglBase.eglBaseContext)
val dispatchObserver = CaptureDispatchObserver()
dispatchObserver.registerObserver(source.capturerObserver)
// Dispatch raw frames to local renderer only if not using a VideoProcessor.
val dispatchObserver = if (videoProcessor == null) {
CaptureDispatchObserver().apply {
registerObserver(source.capturerObserver)
}
} else {
null
}
capturer.initialize(
surfaceTextureHelper,
context,
dispatchObserver,
dispatchObserver ?: source.capturerObserver,
)
val rtcTrack = peerConnectionFactory.createVideoTrack(UUID.randomUUID().toString(), source)
... ...
/*
* Copyright 2024 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.livekit.android.room.track.video
import io.livekit.android.annotations.WebRTCSensitive
import livekit.org.webrtc.VideoFrame
import livekit.org.webrtc.VideoProcessor
/**
* When not connected to a room, the base [VideoProcessor] implementation will refuse
* to process frames as they will all be dropped (i.e. not sent).
*
* This implementation by default forces all frames to be processed regardless of publish status.
*
* Change [allowDropping] to true if you want to allow dropping of frames.
*/
abstract class NoDropVideoProcessor : VideoProcessor {
/**
* If set to false, forces all frames to be processed regardless of publish status.
* If set to true, frames will only be processed when the associated video track is published.
*
* By default, set to false.
*/
@Suppress("MemberVisibilityCanBePrivate")
var allowDropping = false
@WebRTCSensitive
override fun onFrameCaptured(frame: VideoFrame, parameters: VideoProcessor.FrameAdaptationParameters) {
if (allowDropping) {
super.onFrameCaptured(frame, parameters)
} else {
// Altered from VideoProcessor
val adaptedFrame = VideoProcessor.applyFrameAdaptationParameters(frame, parameters)
if (adaptedFrame != null) {
this.onFrameCaptured(adaptedFrame)
adaptedFrame.release()
} else {
this.onFrameCaptured(frame)
}
}
}
}
... ...